diff --git a/README.md b/README.md index 545862e..da980c6 100644 --- a/README.md +++ b/README.md @@ -130,6 +130,46 @@ Again set the `component`, `stack`, `planPath`, and `action` in the same manner cosmosEndpoint: "https://my-cosmo-account.documents.azure.com:443/" ``` +## Google Cloud + +This action supports Google Cloud Platform (GCP). In GCP, we store Terraform plan files in Google Cloud Storage and metadata in Firestore. + +To use the GCP implementation, specify `planRepositoryType` as `gcs` and `metadataRepositoryType` as `firestore`, then provide the following GCP-specific settings: `googleProjectId` to specify the project for both GCS bucket and Firestore, `bucketName` for GCS storage, and `googleFirestoreDatabaseName`/`googleFirestoreCollectionName` for Firestore metadata. + +The `component`, `stack`, `planPath`, and `action` parameters work the same way as in AWS and Azure examples. + +```yaml + - name: Store Plan + uses: cloudposse/github-action-terraform-plan-storage@v2 + id: store-plan + with: + action: storePlan + planPath: my-plan.tfplan + component: mycomponent + stack: core-mycomponent-use1 + planRepositoryType: gcs + metadataRepositoryType: firestore + bucketName: my-terraform-plans + gcpProjectId: my-gcp-project + gcpFirestoreDatabaseName: terraform-plan-metadata + gcpFirestoreCollectionName: terraform-plan-storage + + - name: Get Plan + uses: cloudposse/github-action-terraform-plan-storage@v2 + id: get-plan + with: + action: getPlan + planPath: my-plan.tfplan + component: mycomponent + stack: core-mycomponent-use1 + planRepositoryType: gcs + metadataRepositoryType: firestore + bucketName: my-terraform-plans + gcpProjectId: my-gcp-project + gcpFirestoreDatabaseName: terraform-plan-metadata + gcpFirestoreCollectionName: terraform-plan-storage +``` + > [!IMPORTANT] > In Cloud Posse's examples, we avoid pinning modules to specific versions to prevent discrepancies between the documentation > and the latest released versions. However, for your own projects, we strongly advise pinning each module to the exact version @@ -152,7 +192,7 @@ Again set the `component`, `stack`, `planPath`, and `action` in the same manner | action | which action to perform. Valid values are: 'storePlan', 'getPlan', 'taintPlan' | storePlan | true | | blobAccountName | the name of the Azure Blob Storage account to store the plan file | N/A | false | | blobContainerName | the name of the Azure Blob Storage container to store the plan file | N/A | false | -| bucketName | the name of the S3 bucket to store the plan file | terraform-plan-storage | false | +| bucketName | the name of the S3 or GCS bucket to store the plan file | terraform-plan-storage | false | | commitSHA | Commit SHA to use for fetching plan | | false | | component | the name of the component corresponding to the plan file | N/A | false | | cosmosConnectionString | the connection string to the CosmosDB account to store the metadata | N/A | false | @@ -160,9 +200,12 @@ Again set the `component`, `stack`, `planPath`, and `action` in the same manner | cosmosDatabaseName | the name of the CosmosDB database to store the metadata | N/A | false | | cosmosEndpoint | the endpoint of the CosmosDB account to store the metadata | N/A | false | | failOnMissingPlan | Fail if plan is missing | true | false | -| metadataRepositoryType | the type of repository where the plan file is stored. Valid values are: 'dynamo', 'cosmodb' | dynamo | false | +| gcpFirestoreCollectionName | the name of the Firestore collection to store the metadata | terraform-plan-storage | false | +| gcpFirestoreDatabaseName | the name of the Firestore database to store the metadata | (default) | false | +| gcpProjectId | the Google Cloud project ID for GCP services (GCS, Firestore) | N/A | false | +| metadataRepositoryType | the type of repository where the plan file is stored. Valid values are: 'dynamo', 'cosmodb', 'firestore' | dynamo | false | | planPath | path to the Terraform plan file. Required for 'storePlan' and 'getPlan' actions | N/A | false | -| planRepositoryType | the type of repository where the metadata is stored. Valid values are: 's3', 'azureblob' | s3 | false | +| planRepositoryType | the type of repository where the metadata is stored. Valid values are: 's3', 'azureblob', 'gcs' | s3 | false | | stack | the name of the stack corresponding to the plan file | N/A | false | | tableName | the name of the dynamodb table to store metadata | terraform-plan-storage | false | diff --git a/README.yaml b/README.yaml index 20ff3c6..9e85153 100644 --- a/README.yaml +++ b/README.yaml @@ -119,6 +119,46 @@ usage: |- cosmosEndpoint: "https://my-cosmo-account.documents.azure.com:443/" ``` + ## Google Cloud + + This action supports Google Cloud Platform (GCP). In GCP, we store Terraform plan files in Google Cloud Storage and metadata in Firestore. + + To use the GCP implementation, specify `planRepositoryType` as `gcs` and `metadataRepositoryType` as `firestore`, then provide the following GCP-specific settings: `googleProjectId` to specify the project for both GCS bucket and Firestore, `bucketName` for GCS storage, and `googleFirestoreDatabaseName`/`googleFirestoreCollectionName` for Firestore metadata. + + The `component`, `stack`, `planPath`, and `action` parameters work the same way as in AWS and Azure examples. + + ```yaml + - name: Store Plan + uses: cloudposse/github-action-terraform-plan-storage@v2 + id: store-plan + with: + action: storePlan + planPath: my-plan.tfplan + component: mycomponent + stack: core-mycomponent-use1 + planRepositoryType: gcs + metadataRepositoryType: firestore + bucketName: my-terraform-plans + gcpProjectId: my-gcp-project + gcpFirestoreDatabaseName: terraform-plan-metadata + gcpFirestoreCollectionName: terraform-plan-storage + + - name: Get Plan + uses: cloudposse/github-action-terraform-plan-storage@v2 + id: get-plan + with: + action: getPlan + planPath: my-plan.tfplan + component: mycomponent + stack: core-mycomponent-use1 + planRepositoryType: gcs + metadataRepositoryType: firestore + bucketName: my-terraform-plans + gcpProjectId: my-gcp-project + gcpFirestoreDatabaseName: terraform-plan-metadata + gcpFirestoreCollectionName: terraform-plan-storage + ``` + # Other files to include in this README from the project folder include: - "docs/github-action.md" diff --git a/action.yml b/action.yml index bc0b866..00acac4 100644 --- a/action.yml +++ b/action.yml @@ -16,7 +16,7 @@ inputs: description: "the name of the Azure Blob Storage container to store the plan file" required: false bucketName: - description: "the name of the S3 bucket to store the plan file" + description: "the name of the S3 or GCS bucket to store the plan file" required: false default: "terraform-plan-storage" commitSHA: @@ -43,13 +43,13 @@ inputs: required: false default: "true" metadataRepositoryType: - description: "the type of repository where the plan file is stored. Valid values are: 'dynamo', 'cosmodb'" + description: "the type of repository where the plan file is stored. Valid values are: 'dynamo', 'cosmodb', 'firestore'" required: false default: "dynamo" planPath: description: "path to the Terraform plan file. Required for 'storePlan' and 'getPlan' actions" planRepositoryType: - description: "the type of repository where the metadata is stored. Valid values are: 's3', 'azureblob'" + description: "the type of repository where the metadata is stored. Valid values are: 's3', 'azureblob', 'gcs'" required: false default: "s3" stack: @@ -59,6 +59,17 @@ inputs: description: "the name of the dynamodb table to store metadata" required: false default: "terraform-plan-storage" + gcpProjectId: + description: "the Google Cloud project ID for GCP services (GCS, Firestore)" + required: false + gcpFirestoreDatabaseName: + description: "the name of the Firestore database to store the metadata" + required: false + default: "(default)" + gcpFirestoreCollectionName: + description: "the name of the Firestore collection to store the metadata" + required: false + default: "terraform-plan-storage" outputs: {} runs: diff --git a/dist/index.js b/dist/index.js index 5d1b64c..6b29622 100644 --- a/dist/index.js +++ b/dist/index.js @@ -8,7 +8,11 @@ require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -21,7 +25,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -83,13 +87,13 @@ class Command { } } function escapeData(s) { - return utils_1.toCommandValue(s) + return (0, utils_1.toCommandValue)(s) .replace(/%/g, '%25') .replace(/\r/g, '%0D') .replace(/\n/g, '%0A'); } function escapeProperty(s) { - return utils_1.toCommandValue(s) + return (0, utils_1.toCommandValue)(s) .replace(/%/g, '%25') .replace(/\r/g, '%0D') .replace(/\n/g, '%0A') @@ -107,7 +111,11 @@ function escapeProperty(s) { var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -120,7 +128,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -134,7 +142,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +exports.platform = exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = exports.markdownSummary = exports.summary = exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; const command_1 = __nccwpck_require__(87351); const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); @@ -154,7 +162,7 @@ var ExitCode; * A code indicating that the action was a failure */ ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +})(ExitCode || (exports.ExitCode = ExitCode = {})); //----------------------------------------------------------------------- // Variables //----------------------------------------------------------------------- @@ -165,13 +173,13 @@ var ExitCode; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function exportVariable(name, val) { - const convertedVal = utils_1.toCommandValue(val); + const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { - return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); + return (0, file_command_1.issueFileCommand)('ENV', (0, file_command_1.prepareKeyValueMessage)(name, val)); } - command_1.issueCommand('set-env', { name }, convertedVal); + (0, command_1.issueCommand)('set-env', { name }, convertedVal); } exports.exportVariable = exportVariable; /** @@ -179,7 +187,7 @@ exports.exportVariable = exportVariable; * @param secret value of the secret */ function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); + (0, command_1.issueCommand)('add-mask', {}, secret); } exports.setSecret = setSecret; /** @@ -189,10 +197,10 @@ exports.setSecret = setSecret; function addPath(inputPath) { const filePath = process.env['GITHUB_PATH'] || ''; if (filePath) { - file_command_1.issueFileCommand('PATH', inputPath); + (0, file_command_1.issueFileCommand)('PATH', inputPath); } else { - command_1.issueCommand('add-path', {}, inputPath); + (0, command_1.issueCommand)('add-path', {}, inputPath); } process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; } @@ -267,10 +275,10 @@ exports.getBooleanInput = getBooleanInput; function setOutput(name, value) { const filePath = process.env['GITHUB_OUTPUT'] || ''; if (filePath) { - return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + return (0, file_command_1.issueFileCommand)('OUTPUT', (0, file_command_1.prepareKeyValueMessage)(name, value)); } process.stdout.write(os.EOL); - command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); + (0, command_1.issueCommand)('set-output', { name }, (0, utils_1.toCommandValue)(value)); } exports.setOutput = setOutput; /** @@ -279,7 +287,7 @@ exports.setOutput = setOutput; * */ function setCommandEcho(enabled) { - command_1.issue('echo', enabled ? 'on' : 'off'); + (0, command_1.issue)('echo', enabled ? 'on' : 'off'); } exports.setCommandEcho = setCommandEcho; //----------------------------------------------------------------------- @@ -310,7 +318,7 @@ exports.isDebug = isDebug; * @param message debug message */ function debug(message) { - command_1.issueCommand('debug', {}, message); + (0, command_1.issueCommand)('debug', {}, message); } exports.debug = debug; /** @@ -319,7 +327,7 @@ exports.debug = debug; * @param properties optional properties to add to the annotation. */ function error(message, properties = {}) { - command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); + (0, command_1.issueCommand)('error', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports.error = error; /** @@ -328,7 +336,7 @@ exports.error = error; * @param properties optional properties to add to the annotation. */ function warning(message, properties = {}) { - command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); + (0, command_1.issueCommand)('warning', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports.warning = warning; /** @@ -337,7 +345,7 @@ exports.warning = warning; * @param properties optional properties to add to the annotation. */ function notice(message, properties = {}) { - command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); + (0, command_1.issueCommand)('notice', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports.notice = notice; /** @@ -356,14 +364,14 @@ exports.info = info; * @param name The name of the output group */ function startGroup(name) { - command_1.issue('group', name); + (0, command_1.issue)('group', name); } exports.startGroup = startGroup; /** * End an output group. */ function endGroup() { - command_1.issue('endgroup'); + (0, command_1.issue)('endgroup'); } exports.endGroup = endGroup; /** @@ -401,9 +409,9 @@ exports.group = group; function saveState(name, value) { const filePath = process.env['GITHUB_STATE'] || ''; if (filePath) { - return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + return (0, file_command_1.issueFileCommand)('STATE', (0, file_command_1.prepareKeyValueMessage)(name, value)); } - command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); + (0, command_1.issueCommand)('save-state', { name }, (0, utils_1.toCommandValue)(value)); } exports.saveState = saveState; /** @@ -439,6 +447,10 @@ var path_utils_1 = __nccwpck_require__(2981); Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); +/** + * Platform utilities exports + */ +exports.platform = __importStar(__nccwpck_require__(85243)); //# sourceMappingURL=core.js.map /***/ }), @@ -451,7 +463,11 @@ Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: funct // For internal use, subject to change. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -464,7 +480,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -472,9 +488,9 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ +const crypto = __importStar(__nccwpck_require__(6113)); const fs = __importStar(__nccwpck_require__(57147)); const os = __importStar(__nccwpck_require__(22037)); -const uuid_1 = __nccwpck_require__(78974); const utils_1 = __nccwpck_require__(5278); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; @@ -484,14 +500,14 @@ function issueFileCommand(command, message) { if (!fs.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + fs.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os.EOL}`, { encoding: 'utf8' }); } exports.issueFileCommand = issueFileCommand; function prepareKeyValueMessage(key, value) { - const delimiter = `ghadelimiter_${uuid_1.v4()}`; - const convertedValue = utils_1.toCommandValue(value); + const delimiter = `ghadelimiter_${crypto.randomUUID()}`; + const convertedValue = (0, utils_1.toCommandValue)(value); // These should realistically never happen, but just in case someone finds a // way to exploit uuid generation let's not allow keys or values that contain // the delimiter. @@ -576,9 +592,9 @@ class OidcClient { const encodedAudience = encodeURIComponent(audience); id_token_url = `${id_token_url}&audience=${encodedAudience}`; } - core_1.debug(`ID token url is ${id_token_url}`); + (0, core_1.debug)(`ID token url is ${id_token_url}`); const id_token = yield OidcClient.getCall(id_token_url); - core_1.setSecret(id_token); + (0, core_1.setSecret)(id_token); return id_token; } catch (error) { @@ -599,7 +615,11 @@ exports.OidcClient = OidcClient; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -612,7 +632,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -657,6 +677,107 @@ exports.toPlatformPath = toPlatformPath; /***/ }), +/***/ 85243: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDetails = exports.isLinux = exports.isMacOS = exports.isWindows = exports.arch = exports.platform = void 0; +const os_1 = __importDefault(__nccwpck_require__(22037)); +const exec = __importStar(__nccwpck_require__(71514)); +const getWindowsInfo = () => __awaiter(void 0, void 0, void 0, function* () { + const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', undefined, { + silent: true + }); + const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', undefined, { + silent: true + }); + return { + name: name.trim(), + version: version.trim() + }; +}); +const getMacOsInfo = () => __awaiter(void 0, void 0, void 0, function* () { + var _a, _b, _c, _d; + const { stdout } = yield exec.getExecOutput('sw_vers', undefined, { + silent: true + }); + const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : ''; + const name = (_d = (_c = stdout.match(/ProductName:\s*(.+)/)) === null || _c === void 0 ? void 0 : _c[1]) !== null && _d !== void 0 ? _d : ''; + return { + name, + version + }; +}); +const getLinuxInfo = () => __awaiter(void 0, void 0, void 0, function* () { + const { stdout } = yield exec.getExecOutput('lsb_release', ['-i', '-r', '-s'], { + silent: true + }); + const [name, version] = stdout.trim().split('\n'); + return { + name, + version + }; +}); +exports.platform = os_1.default.platform(); +exports.arch = os_1.default.arch(); +exports.isWindows = exports.platform === 'win32'; +exports.isMacOS = exports.platform === 'darwin'; +exports.isLinux = exports.platform === 'linux'; +function getDetails() { + return __awaiter(this, void 0, void 0, function* () { + return Object.assign(Object.assign({}, (yield (exports.isWindows + ? getWindowsInfo() + : exports.isMacOS + ? getMacOsInfo() + : getLinuxInfo()))), { platform: exports.platform, + arch: exports.arch, + isWindows: exports.isWindows, + isMacOS: exports.isMacOS, + isLinux: exports.isLinux }); + }); +} +exports.getDetails = getDetails; +//# sourceMappingURL=platform.js.map + +/***/ }), + /***/ 81327: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -994,649 +1115,738 @@ exports.toCommandProperties = toCommandProperties; /***/ }), -/***/ 78974: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); - -var _v = _interopRequireDefault(__nccwpck_require__(81595)); - -var _v2 = _interopRequireDefault(__nccwpck_require__(26993)); - -var _v3 = _interopRequireDefault(__nccwpck_require__(51472)); - -var _v4 = _interopRequireDefault(__nccwpck_require__(16217)); - -var _nil = _interopRequireDefault(__nccwpck_require__(32381)); - -var _version = _interopRequireDefault(__nccwpck_require__(40427)); - -var _validate = _interopRequireDefault(__nccwpck_require__(92609)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); - -var _parse = _interopRequireDefault(__nccwpck_require__(26385)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/***/ }), - -/***/ 5842: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('md5').update(bytes).digest(); -} - -var _default = md5; -exports["default"] = _default; - -/***/ }), - -/***/ 32381: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; - -/***/ }), - -/***/ 26385: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(92609)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ - - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ - - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ - - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ - - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) - - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} - -var _default = parse; -exports["default"] = _default; - -/***/ }), - -/***/ 86230: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; - -/***/ }), - -/***/ 9784: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate - -let poolPtr = rnds8Pool.length; - -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); - - poolPtr = 0; - } - - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} - -/***/ }), - -/***/ 38844: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('sha1').update(bytes).digest(); -} - -var _default = sha1; -exports["default"] = _default; - -/***/ }), - -/***/ 61458: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 71514: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ - value: true +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; })); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(92609)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getExecOutput = exports.exec = void 0; +const string_decoder_1 = __nccwpck_require__(71576); +const tr = __importStar(__nccwpck_require__(88159)); /** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code */ -const byteToHex = []; - -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); } - -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields - - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); - } - - return uuid; +exports.exec = exec; +/** + * Exec a command and get the output. + * Output will be streamed to the live console. + * Returns promise with the exit code and collected stdout and stderr + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code, stdout, and stderr + */ +function getExecOutput(commandLine, args, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + let stdout = ''; + let stderr = ''; + //Using string decoder covers the case where a mult-byte character is split + const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); + const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); + const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = (data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); + } + }; + const stdOutListener = (data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); + } + }; + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + //flush any remaining characters + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; + }); } - -var _default = stringify; -exports["default"] = _default; +exports.getExecOutput = getExecOutput; +//# sourceMappingURL=exec.js.map /***/ }), -/***/ 81595: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 88159: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ - value: true +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; })); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(9784)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; - -let _clockseq; // Previous uuid creation time - - -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); - - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.argStringToArray = exports.ToolRunner = void 0; +const os = __importStar(__nccwpck_require__(22037)); +const events = __importStar(__nccwpck_require__(82361)); +const child = __importStar(__nccwpck_require__(32081)); +const path = __importStar(__nccwpck_require__(71017)); +const io = __importStar(__nccwpck_require__(47351)); +const ioUtil = __importStar(__nccwpck_require__(81962)); +const timers_1 = __nccwpck_require__(39512); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; } - - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - - - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested - - - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` - - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` - - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - - b[i++] = clockseq & 0xff; // `node` - - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf || (0, _stringify.default)(b); -} - -var _default = v1; -exports["default"] = _default; - -/***/ }), - -/***/ 26993: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(65920)); - -var _md = _interopRequireDefault(__nccwpck_require__(5842)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; - -/***/ }), - -/***/ 65920: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; - -var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); - -var _parse = _interopRequireDefault(__nccwpck_require__(26385)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape - - const bytes = []; - - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } - - return bytes; -} - -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + return s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + return ''; + } } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } - - return buf; + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; } - - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support - - - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} - -/***/ }), - -/***/ 51472: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(9784)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function v4(options, buf, offset) { - options = options || {}; - - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - - - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + let stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + let errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); + } + cp.stdin.end(this.options.input); + } + })); + }); } - - return buf; - } - - return (0, _stringify.default)(rnds); } - -var _default = v4; -exports["default"] = _default; - -/***/ }), - -/***/ 16217: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(65920)); - -var _sha = _interopRequireDefault(__nccwpck_require__(38844)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; - -/***/ }), - -/***/ 92609: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _regex = _interopRequireDefault(__nccwpck_require__(86230)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; } - -var _default = validate; -exports["default"] = _default; - -/***/ }), - -/***/ 40427: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(92609)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - return parseInt(uuid.substr(14, 1), 16); +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } } - -var _default = version; -exports["default"] = _default; +//# sourceMappingURL=toolrunner.js.map /***/ }), @@ -1789,7 +1999,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(6341)); +const httpClient = __importStar(__nccwpck_require__(96255)); const undici_1 = __nccwpck_require__(41773); function getAuthString(token, options) { if (!token && !options.auth) { @@ -1893,7 +2103,95 @@ exports.getOctokitOptions = getOctokitOptions; /***/ }), -/***/ 6341: +/***/ 35526: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 96255: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -1935,7 +2233,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; const http = __importStar(__nccwpck_require__(13685)); const https = __importStar(__nccwpck_require__(95687)); -const pm = __importStar(__nccwpck_require__(53466)); +const pm = __importStar(__nccwpck_require__(19835)); const tunnel = __importStar(__nccwpck_require__(74294)); const undici_1 = __nccwpck_require__(41773); var HttpCodes; @@ -2556,7 +2854,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa /***/ }), -/***/ 53466: +/***/ 19835: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -2645,11 +2943,30 @@ function isLoopbackAddress(host) { /***/ }), -/***/ 35526: -/***/ (function(__unused_webpack_module, exports) { +/***/ 81962: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -2659,86 +2976,168 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; +var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); +exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; +const fs = __importStar(__nccwpck_require__(57147)); +const path = __importStar(__nccwpck_require__(71017)); +_a = fs.promises +// export const {open} = 'fs' +, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +// export const {open} = 'fs' +exports.IS_WINDOWS = process.platform === 'win32'; +// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 +exports.UV_FS_O_EXLOCK = 0x10000000; +exports.READONLY = fs.constants.O_RDONLY; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); } - options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; } - options.headers['Authorization'] = `Bearer ${this.token}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello } + return p.startsWith('/'); } -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); +exports.isRooted = isRooted; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); } - options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); } -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; -//# sourceMappingURL=auth.js.map +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +// Get the path of cmd.exe in windows +function getCmdPath() { + var _a; + return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +} +exports.getCmdPath = getCmdPath; +//# sourceMappingURL=io-util.js.map /***/ }), -/***/ 96255: +/***/ 47351: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -/* eslint-disable @typescript-eslint/no-explicit-any */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); @@ -2768,663 +3167,275 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(13685)); -const https = __importStar(__nccwpck_require__(95687)); -const pm = __importStar(__nccwpck_require__(19835)); -const tunnel = __importStar(__nccwpck_require__(74294)); -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; +const assert_1 = __nccwpck_require__(39491); +const path = __importStar(__nccwpck_require__(71017)); +const ioUtil = __importStar(__nccwpck_require__(81962)); /** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. */ -function getProxyUrl(serverUrl) { - const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } -} -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - })); - }); - } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - const parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive, copySourceDirectory } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() && copySourceDirectory + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; + else { + yield cpDirRecursive(source, newDest, 0, force); } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); } + yield copyFile(source, newDest, force); } - } - options(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - }); - } - get(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - }); - } - del(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - }); - } - post(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - }); - } - patch(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - }); - } - put(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - }); - } - head(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - }); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream, additionalHeaders); - }); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - getJson(requestUrl, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - const res = yield this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - postJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - putJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - patchJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - request(verb, requestUrl, data, headers) { - return __awaiter(this, void 0, void 0, function* () { - if (this._disposed) { - throw new Error('Client has already been disposed.'); + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); } - const parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - do { - response = yield this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (const handler of this.handlers) { - if (handler.canHandleAuthentication(response)) { - authenticationHandler = handler; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (response.message.statusCode && - HttpRedirectCodes.includes(response.message.statusCode) && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - const parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol === 'https:' && - parsedUrl.protocol !== parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - yield response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (const header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = yield this.requestRaw(info, data); - redirectsRemaining--; + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); } - if (!response.message.statusCode || - !HttpResponseRetryCodes.includes(response.message.statusCode)) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - yield response.readBody(); - yield this._performExponentialBackoff(numTries); - } - } while (numTries < maxTries); - return response; - }); - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - function callbackForResult(err, res) { - if (err) { - reject(err); - } - else if (!res) { - // If `err` is not passed, then `res` must be passed. - reject(new Error('Unknown error')); - } - else { - resolve(res); - } + else { + throw new Error('Destination already exists'); } - this.requestRawWithCallback(info, data, callbackForResult); - }); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - if (typeof data === 'string') { - if (!info.options.headers) { - info.options.headers = {}; } - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } - let callbackCalled = false; - function handleResult(err, res) { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Check for invalid characters + // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); } } - const req = info.httpModule.request(info.options, (msg) => { - const res = new HttpClientResponse(msg); - handleResult(undefined, res); - }); - let socket; - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error(`Request timeout: ${info.options.path}`)); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); + try { + // note if path does not exist, error is silent + yield ioUtil.rm(inputPath, { + force: true, + maxRetries: 3, + recursive: true, + retryDelay: 300 }); - data.pipe(req); } - else { - req.end(); + catch (err) { + throw new Error(`File was unable to be removed ${err}`); } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - const parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + yield ioUtil.mkdir(fsPath, { recursive: true }); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - for (const handler of this.handlers) { - handler.prepareRequest(info.options); + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } } + return result; } - return info; - } - _mergeHeaders(headers) { - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + return ''; + }); +} +exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); } - return additionalHeaders[header] || clientHeader || _default; - } - _getAgent(parsedUrl) { - let agent; - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } } - if (this._keepAlive && !useProxy) { - agent = this._agent; + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; + } + return []; } - // if agent is already assigned use that agent. - if (agent) { - return agent; + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } } - // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. - if (proxyUrl && proxyUrl.hostname) { - const agentOptions = { - maxSockets, - keepAlive: this._keepAlive, - proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - })), { host: proxyUrl.hostname, port: proxyUrl.port }) - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; + }); +} +exports.findInPath = findInPath; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + const copySourceDirectory = options.copySourceDirectory == null + ? true + : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); } else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + yield copyFile(srcFile, destFile, force); } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - return __awaiter(this, void 0, void 0, function* () { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - }); - } - _processResponse(res, options) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - const statusCode = res.message.statusCode || 0; - const response = { - statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode === HttpCodes.NotFound) { - resolve(response); - } - // get the result from the body - function dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - const a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - let obj; - let contents; - try { - contents = yield res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = `Failed request: (${statusCode})`; - } - const err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - })); - }); - } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); } -exports.HttpClient = HttpClient; -const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 19835: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.checkBypass = exports.getProxyUrl = void 0; -function getProxyUrl(reqUrl) { - const usingSsl = reqUrl.protocol === 'https:'; - if (checkBypass(reqUrl)) { - return undefined; - } - const proxyVar = (() => { - if (usingSsl) { - return process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - return process.env['http_proxy'] || process.env['HTTP_PROXY']; +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); } - })(); - if (proxyVar) { - return new URL(proxyVar); - } - else { - return undefined; - } -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - const reqHost = reqUrl.hostname; - if (isLoopbackAddress(reqHost)) { - return true; - } - const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - const upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (const upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperNoProxyItem === '*' || - upperReqHosts.some(x => x === upperNoProxyItem || - x.endsWith(`.${upperNoProxyItem}`) || - (upperNoProxyItem.startsWith('.') && - x.endsWith(`${upperNoProxyItem}`)))) { - return true; + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); } - } - return false; -} -exports.checkBypass = checkBypass; -function isLoopbackAddress(host) { - const hostLower = host.toLowerCase(); - return (hostLower === 'localhost' || - hostLower.startsWith('127.') || - hostLower.startsWith('[::1]') || - hostLower.startsWith('[0:0:0:0:0:0:0:1]')); + }); } -//# sourceMappingURL=proxy.js.map +//# sourceMappingURL=io.js.map /***/ }), @@ -43506,6 +43517,192 @@ exports.AbortSignal = AbortSignal; //# sourceMappingURL=index.js.map +/***/ }), + +/***/ 39645: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var coreUtil = __nccwpck_require__(51333); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +class AzureKeyCredential { + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +class AzureNamedKeyCredential { + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +function isNamedKeyCredential(credential) { + return (coreUtil.isObjectWithProperties(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} + +// Copyright (c) Microsoft Corporation. +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +class AzureSASCredential { + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +function isSASCredential(credential) { + return (coreUtil.isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} + +exports.AzureKeyCredential = AzureKeyCredential; +exports.AzureNamedKeyCredential = AzureNamedKeyCredential; +exports.AzureSASCredential = AzureSASCredential; +exports.isNamedKeyCredential = isNamedKeyCredential; +exports.isSASCredential = isSASCredential; +exports.isTokenCredential = isTokenCredential; +//# sourceMappingURL=index.js.map + + /***/ }), /***/ 29729: @@ -45684,7 +45881,7 @@ exports.serializationPolicyName = serializationPolicyName; /***/ }), -/***/ 24607: +/***/ 88121: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -45692,27 +45889,21 @@ exports.serializationPolicyName = serializationPolicyName; Object.defineProperty(exports, "__esModule", ({ value: true })); -var uuid = __nccwpck_require__(43415); -var util = __nccwpck_require__(73837); -var tslib = __nccwpck_require__(82107); -var xml2js = __nccwpck_require__(66189); -var coreUtil = __nccwpck_require__(24918); var logger$1 = __nccwpck_require__(3233); -var coreAuth = __nccwpck_require__(9067); +var coreUtil = __nccwpck_require__(51333); var os = __nccwpck_require__(22037); -var http = __nccwpck_require__(13685); -var https = __nccwpck_require__(95687); var abortController = __nccwpck_require__(52557); -var tunnel = __nccwpck_require__(74294); +var httpsProxyAgent = __nccwpck_require__(77219); +var httpProxyAgent = __nccwpck_require__(23764); +var coreTracing = __nccwpck_require__(19363); +var util = __nccwpck_require__(73837); +var tslib = __nccwpck_require__(4351); var stream = __nccwpck_require__(12781); -var FormData = __nccwpck_require__(64334); -var node_fetch = __nccwpck_require__(80467); -var coreTracing = __nccwpck_require__(31754); - -function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } +var http = __nccwpck_require__(13685); +var https = __nccwpck_require__(95687); +var zlib = __nccwpck_require__(59796); -function _interopNamespace(e) { - if (e && e.__esModule) return e; +function _interopNamespaceDefault(e) { var n = Object.create(null); if (e) { Object.keys(e).forEach(function (k) { @@ -45725,2530 +45916,1773 @@ function _interopNamespace(e) { } }); } - n["default"] = e; + n.default = e; return Object.freeze(n); } -var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); -var os__namespace = /*#__PURE__*/_interopNamespace(os); -var http__namespace = /*#__PURE__*/_interopNamespace(http); -var https__namespace = /*#__PURE__*/_interopNamespace(https); -var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); -var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); -var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); +var os__namespace = /*#__PURE__*/_interopNamespaceDefault(os); +var http__namespace = /*#__PURE__*/_interopNamespaceDefault(http); +var https__namespace = /*#__PURE__*/_interopNamespaceDefault(https); +var zlib__namespace = /*#__PURE__*/_interopNamespaceDefault(zlib); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +const ValidPhaseNames = new Set(["Deserialize", "Serialize", "Retry", "Sign"]); /** - * A collection of HttpHeaders that can be sent with a HTTP request. + * A private implementation of Pipeline. + * Do not export this class from the package. + * @internal */ -function getHeaderKey(headerName) { - return headerName.toLowerCase(); -} -function isHttpHeadersLike(object) { - if (object && typeof object === "object") { - const castObject = object; - if (typeof castObject.rawHeaders === "function" && - typeof castObject.clone === "function" && - typeof castObject.get === "function" && - typeof castObject.set === "function" && - typeof castObject.contains === "function" && - typeof castObject.remove === "function" && - typeof castObject.headersArray === "function" && - typeof castObject.headerValues === "function" && - typeof castObject.headerNames === "function" && - typeof castObject.toJson === "function") { - return true; - } +class HttpPipeline { + constructor(policies) { + var _a; + this._policies = []; + this._policies = (_a = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a !== void 0 ? _a : []; + this._orderedPolicies = undefined; } - return false; -} -/** - * A collection of HTTP header key/value pairs. - */ -class HttpHeaders { - constructor(rawHeaders) { - this._headersMap = {}; - if (rawHeaders) { - for (const headerName in rawHeaders) { - this.set(headerName, rawHeaders[headerName]); - } + addPolicy(policy, options = {}) { + if (options.phase && options.afterPhase) { + throw new Error("Policies inside a phase cannot specify afterPhase."); + } + if (options.phase && !ValidPhaseNames.has(options.phase)) { + throw new Error(`Invalid phase name: ${options.phase}`); } + if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { + throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); + } + this._policies.push({ + policy, + options, + }); + this._orderedPolicies = undefined; } - /** - * Set a header in this collection with the provided name and value. The name is - * case-insensitive. - * @param headerName - The name of the header to set. This value is case-insensitive. - * @param headerValue - The value of the header to set. - */ - set(headerName, headerValue) { - this._headersMap[getHeaderKey(headerName)] = { - name: headerName, - value: headerValue.toString(), - }; + removePolicy(options) { + const removedPolicies = []; + this._policies = this._policies.filter((policyDescriptor) => { + if ((options.name && policyDescriptor.policy.name === options.name) || + (options.phase && policyDescriptor.options.phase === options.phase)) { + removedPolicies.push(policyDescriptor.policy); + return false; + } + else { + return true; + } + }); + this._orderedPolicies = undefined; + return removedPolicies; } - /** - * Get the header value for the provided header name, or undefined if no header exists in this - * collection with the provided name. - * @param headerName - The name of the header. - */ - get(headerName) { - const header = this._headersMap[getHeaderKey(headerName)]; - return !header ? undefined : header.value; + sendRequest(httpClient, request) { + const policies = this.getOrderedPolicies(); + const pipeline = policies.reduceRight((next, policy) => { + return (req) => { + return policy.sendRequest(req, next); + }; + }, (req) => httpClient.sendRequest(req)); + return pipeline(request); } - /** - * Get whether or not this header collection contains a header entry for the provided header name. - */ - contains(headerName) { - return !!this._headersMap[getHeaderKey(headerName)]; + getOrderedPolicies() { + if (!this._orderedPolicies) { + this._orderedPolicies = this.orderPolicies(); + } + return this._orderedPolicies; } - /** - * Remove the header with the provided headerName. Return whether or not the header existed and - * was removed. - * @param headerName - The name of the header to remove. - */ - remove(headerName) { - const result = this.contains(headerName); - delete this._headersMap[getHeaderKey(headerName)]; - return result; + clone() { + return new HttpPipeline(this._policies); } - /** - * Get the headers that are contained this collection as an object. - */ - rawHeaders() { - return this.toJson({ preserveCase: true }); + static create() { + return new HttpPipeline(); } - /** - * Get the headers that are contained in this collection as an array. - */ - headersArray() { - const headers = []; - for (const headerKey in this._headersMap) { - headers.push(this._headersMap[headerKey]); + orderPolicies() { + /** + * The goal of this method is to reliably order pipeline policies + * based on their declared requirements when they were added. + * + * Order is first determined by phase: + * + * 1. Serialize Phase + * 2. Policies not in a phase + * 3. Deserialize Phase + * 4. Retry Phase + * 5. Sign Phase + * + * Within each phase, policies are executed in the order + * they were added unless they were specified to execute + * before/after other policies or after a particular phase. + * + * To determine the final order, we will walk the policy list + * in phase order multiple times until all dependencies are + * satisfied. + * + * `afterPolicies` are the set of policies that must be + * executed before a given policy. This requirement is + * considered satisfied when each of the listed policies + * have been scheduled. + * + * `beforePolicies` are the set of policies that must be + * executed after a given policy. Since this dependency + * can be expressed by converting it into a equivalent + * `afterPolicies` declarations, they are normalized + * into that form for simplicity. + * + * An `afterPhase` dependency is considered satisfied when all + * policies in that phase have scheduled. + * + */ + const result = []; + // Track all policies we know about. + const policyMap = new Map(); + function createPhase(name) { + return { + name, + policies: new Set(), + hasRun: false, + hasAfterPolicies: false, + }; } - return headers; - } - /** - * Get the header names that are contained in this collection. - */ - headerNames() { - const headerNames = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerNames.push(headers[i].name); + // Track policies for each phase. + const serializePhase = createPhase("Serialize"); + const noPhase = createPhase("None"); + const deserializePhase = createPhase("Deserialize"); + const retryPhase = createPhase("Retry"); + const signPhase = createPhase("Sign"); + // a list of phases in order + const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; + // Small helper function to map phase name to each Phase + function getPhase(phase) { + if (phase === "Retry") { + return retryPhase; + } + else if (phase === "Serialize") { + return serializePhase; + } + else if (phase === "Deserialize") { + return deserializePhase; + } + else if (phase === "Sign") { + return signPhase; + } + else { + return noPhase; + } } - return headerNames; - } - /** - * Get the header values that are contained in this collection. - */ - headerValues() { - const headerValues = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerValues.push(headers[i].value); + // First walk each policy and create a node to track metadata. + for (const descriptor of this._policies) { + const policy = descriptor.policy; + const options = descriptor.options; + const policyName = policy.name; + if (policyMap.has(policyName)) { + throw new Error("Duplicate policy names not allowed in pipeline"); + } + const node = { + policy, + dependsOn: new Set(), + dependants: new Set(), + }; + if (options.afterPhase) { + node.afterPhase = getPhase(options.afterPhase); + node.afterPhase.hasAfterPolicies = true; + } + policyMap.set(policyName, node); + const phase = getPhase(options.phase); + phase.policies.add(node); } - return headerValues; - } - /** - * Get the JSON object representation of this HTTP header collection. - */ - toJson(options = {}) { - const result = {}; - if (options.preserveCase) { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[header.name] = header.value; + // Now that each policy has a node, connect dependency references. + for (const descriptor of this._policies) { + const { policy, options } = descriptor; + const policyName = policy.name; + const node = policyMap.get(policyName); + if (!node) { + throw new Error(`Missing node for policy ${policyName}`); + } + if (options.afterPolicies) { + for (const afterPolicyName of options.afterPolicies) { + const afterNode = policyMap.get(afterPolicyName); + if (afterNode) { + // Linking in both directions helps later + // when we want to notify dependants. + node.dependsOn.add(afterNode); + afterNode.dependants.add(node); + } + } + } + if (options.beforePolicies) { + for (const beforePolicyName of options.beforePolicies) { + const beforeNode = policyMap.get(beforePolicyName); + if (beforeNode) { + // To execute before another node, make it + // depend on the current node. + beforeNode.dependsOn.add(node); + node.dependants.add(beforeNode); + } + } } } - else { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[getHeaderKey(header.name)] = header.value; + function walkPhase(phase) { + phase.hasRun = true; + // Sets iterate in insertion order + for (const node of phase.policies) { + if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { + // If this node is waiting on a phase to complete, + // we need to skip it for now. + // Even if the phase is empty, we should wait for it + // to be walked to avoid re-ordering policies. + continue; + } + if (node.dependsOn.size === 0) { + // If there's nothing else we're waiting for, we can + // add this policy to the result list. + result.push(node.policy); + // Notify anything that depends on this policy that + // the policy has been scheduled. + for (const dependant of node.dependants) { + dependant.dependsOn.delete(node); + } + policyMap.delete(node.policy.name); + phase.policies.delete(node); + } } } - return result; - } - /** - * Get the string representation of this HTTP header collection. - */ - toString() { - return JSON.stringify(this.toJson({ preserveCase: true })); - } - /** - * Create a deep clone/copy of this HttpHeaders collection. - */ - clone() { - const resultPreservingCasing = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - resultPreservingCasing[header.name] = header.value; + function walkPhases() { + for (const phase of orderedPhases) { + walkPhase(phase); + // if the phase isn't complete + if (phase.policies.size > 0 && phase !== noPhase) { + if (!noPhase.hasRun) { + // Try running noPhase to see if that unblocks this phase next tick. + // This can happen if a phase that happens before noPhase + // is waiting on a noPhase policy to complete. + walkPhase(noPhase); + } + // Don't proceed to the next phase until this phase finishes. + return; + } + if (phase.hasAfterPolicies) { + // Run any policies unblocked by this phase + walkPhase(noPhase); + } + } } - return new HttpHeaders(resultPreservingCasing); + // Iterate until we've put every node in the result list. + let iteration = 0; + while (policyMap.size > 0) { + iteration++; + const initialResultLength = result.length; + // Keep walking each phase in order until we can order every node. + walkPhases(); + // The result list *should* get at least one larger each time + // after the first full pass. + // Otherwise, we're going to loop forever. + if (result.length <= initialResultLength && iteration > 1) { + throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); + } + } + return result; } } +/** + * Creates a totally empty pipeline. + * Useful for testing or creating a custom one. + */ +function createEmptyPipeline() { + return HttpPipeline.create(); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const logger = logger$1.createClientLogger("core-rest-pipeline"); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +const RedactedString = "REDACTED"; +// Make sure this list is up-to-date with the one under core/logger/Readme#Keyconcepts +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; /** - * Encodes a string in base64 format. - * @param value - The string to encode + * @internal */ -function encodeString(value) { - return Buffer.from(value).toString("base64"); +class Sanitizer { + constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [], } = {}) { + allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); + allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "headers") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || coreUtil.isObject(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(obj) { + const sanitized = {}; + for (const key of Object.keys(obj)) { + if (this.allowedHeaderNames.has(key.toLowerCase())) { + sanitized[key] = obj[key]; + } + else { + sanitized[key] = RedactedString; + } + } + return sanitized; + } + sanitizeQuery(value) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (this.allowedQueryParameters.has(k.toLowerCase())) { + sanitized[k] = value[k]; + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const url = new URL(value); + if (!url.search) { + return value; + } + for (const [key] of url.searchParams) { + if (!this.allowedQueryParameters.has(key.toLowerCase())) { + url.searchParams.set(key, RedactedString); + } + } + return url.toString(); + } } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Encodes a byte array in base64 format. - * @param value - The Uint8Aray to encode + * The programmatic identifier of the logPolicy. */ -function encodeByteArray(value) { - // Buffer.from accepts | -- the TypeScript definition is off here - // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length - const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); - return bufferValue.toString("base64"); -} +const logPolicyName = "logPolicy"; /** - * Decodes a base64 string into a byte array. - * @param value - The base64 string to decode + * A policy that logs all requests and responses. + * @param options - Options to configure logPolicy. */ -function decodeString(value) { - return Buffer.from(value, "base64"); +function logPolicy(options = {}) { + var _a; + const logger$1 = (_a = options.logger) !== null && _a !== void 0 ? _a : logger.info; + const sanitizer = new Sanitizer({ + additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, + }); + return { + name: logPolicyName, + async sendRequest(request, next) { + if (!logger$1.enabled) { + return next(request); + } + logger$1(`Request: ${sanitizer.sanitize(request)}`); + const response = await next(request); + logger$1(`Response status code: ${response.status}`); + logger$1(`Headers: ${sanitizer.sanitize(response.headers)}`); + return response; + }, + }; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * A set of constants used internally when processing requests. + * The programmatic identifier of the redirectPolicy. */ -const Constants = { - /** - * The core-http version - */ - coreHttpVersion: "3.0.2", - /** - * Specifies HTTP. - */ - HTTP: "http:", - /** - * Specifies HTTPS. - */ - HTTPS: "https:", - /** - * Specifies HTTP Proxy. - */ - HTTP_PROXY: "HTTP_PROXY", - /** - * Specifies HTTPS Proxy. - */ - HTTPS_PROXY: "HTTPS_PROXY", - /** - * Specifies NO Proxy. - */ - NO_PROXY: "NO_PROXY", - /** - * Specifies ALL Proxy. - */ - ALL_PROXY: "ALL_PROXY", - HttpConstants: { - /** - * Http Verbs - */ - HttpVerbs: { - PUT: "PUT", - GET: "GET", - DELETE: "DELETE", - POST: "POST", - MERGE: "MERGE", - HEAD: "HEAD", - PATCH: "PATCH", - }, - StatusCodes: { - TooManyRequests: 429, - ServiceUnavailable: 503, - }, - }, - /** - * Defines constants for use with HTTP headers. - */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization", - AUTHORIZATION_SCHEME: "Bearer", - /** - * The Retry-After response-header field can be used with a 503 (Service - * Unavailable) or 349 (Too Many Requests) responses to indicate how long - * the service is expected to be unavailable to the requesting client. - */ - RETRY_AFTER: "Retry-After", - /** - * The UserAgent header. - */ - USER_AGENT: "User-Agent", - }, -}; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +const redirectPolicyName = "redirectPolicy"; /** - * Default key used to access the XML attributes. + * Methods that are allowed to follow redirects 301 and 302 */ -const XML_ATTRKEY = "$"; +const allowedRedirect = ["GET", "HEAD"]; /** - * Default key used to access the XML value content. + * A policy to follow Location headers from the server in order + * to support server-side redirection. + * In the browser, this policy is not used. + * @param options - Options to control policy behavior. */ -const XML_CHARKEY = "_"; +function redirectPolicy(options = {}) { + const { maxRetries = 20 } = options; + return { + name: redirectPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return handleRedirect(next, response, maxRetries); + }, + }; +} +async function handleRedirect(next, response, maxRetries, currentRetries = 0) { + const { request, status, headers } = response; + const locationHeader = headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + currentRetries < maxRetries) { + const url = new URL(locationHeader, request.url); + request.url = url.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + request.headers.delete("Content-Length"); + delete request.body; + } + request.headers.delete("Authorization"); + const res = await next(request); + return handleRedirect(next, res, maxRetries, currentRetries + 1); + } + return response; +} // Copyright (c) Microsoft Corporation. -const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +// Licensed under the MIT license. /** - * Encodes an URI. - * - * @param uri - The URI to be encoded. - * @returns The encoded URI. + * @internal */ -function encodeUri(uri) { - return encodeURIComponent(uri) - .replace(/!/g, "%21") - .replace(/"/g, "%27") - .replace(/\(/g, "%28") - .replace(/\)/g, "%29") - .replace(/\*/g, "%2A"); +function getHeaderName() { + return "User-Agent"; } /** - * Returns a stripped version of the Http Response which only contains body, - * headers and the status. - * - * @param response - The Http Response - * @returns The stripped version of Http Response. + * @internal */ -function stripResponse(response) { - const strippedResponse = {}; - strippedResponse.body = response.bodyAsText; - strippedResponse.headers = response.headers; - strippedResponse.status = response.status; - return strippedResponse; +function setPlatformSpecificData(map) { + map.set("Node", process.version); + map.set("OS", `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`); } -/** - * Returns a stripped version of the Http Request that does not contain the - * Authorization header. - * - * @param request - The Http Request object - * @returns The stripped version of Http Request. - */ -function stripRequest(request) { - const strippedRequest = request.clone(); - if (strippedRequest.headers) { - strippedRequest.headers.remove("authorization"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const SDK_VERSION = "1.12.3"; +const DEFAULT_RETRY_POLICY_COUNT = 3; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function getUserAgentString(telemetryInfo) { + const parts = []; + for (const [key, value] of telemetryInfo) { + const token = value ? `${key}/${value}` : key; + parts.push(token); } - return strippedRequest; + return parts.join(" "); } /** - * Validates the given uuid as a string - * - * @param uuid - The uuid as a string that needs to be validated - * @returns True if the uuid is valid; false otherwise. + * @internal */ -function isValidUuid(uuid) { - return validUuidRegex.test(uuid); +function getUserAgentHeaderName() { + return getHeaderName(); } /** - * Generated UUID - * - * @returns RFC4122 v4 UUID. + * @internal */ -function generateUuid() { - return uuid.v4(); +function getUserAgentValue(prefix) { + const runtimeInfo = new Map(); + runtimeInfo.set("core-rest-pipeline", SDK_VERSION); + setPlatformSpecificData(runtimeInfo); + const defaultAgent = getUserAgentString(runtimeInfo); + const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; + return userAgentValue; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const UserAgentHeaderName = getUserAgentHeaderName(); /** - * Executes an array of promises sequentially. Inspiration of this method is here: - * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! - * - * @param promiseFactories - An array of promise factories(A function that return a promise) - * @param kickstart - Input to the first promise that is used to kickstart the promise chain. - * If not provided then the promise chain starts with undefined. - * @returns A chain of resolved or rejected promises + * The programmatic identifier of the userAgentPolicy. */ -function executePromisesSequentially(promiseFactories, kickstart) { - let result = Promise.resolve(kickstart); - promiseFactories.forEach((promiseFactory) => { - result = result.then(promiseFactory); - }); - return result; -} +const userAgentPolicyName = "userAgentPolicy"; /** - * Converts a Promise to a callback. - * @param promise - The Promise to be converted to a callback - * @returns A function that takes the callback `(cb: Function) => void` - * @deprecated generated code should instead depend on responseToBody + * A policy that sets the User-Agent header (or equivalent) to reflect + * the library version. + * @param options - Options to customize the user agent value. */ -// eslint-disable-next-line @typescript-eslint/ban-types -function promiseToCallback(promise) { - if (typeof promise.then !== "function") { - throw new Error("The provided input is not a Promise."); - } - // eslint-disable-next-line @typescript-eslint/ban-types - return (cb) => { - promise - .then((data) => { - // eslint-disable-next-line promise/no-callback-in-promise - return cb(undefined, data); - }) - .catch((err) => { - // eslint-disable-next-line promise/no-callback-in-promise - cb(err); - }); +function userAgentPolicy(options = {}) { + const userAgentValue = getUserAgentValue(options.userAgentPrefix); + return { + name: userAgentPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(UserAgentHeaderName)) { + request.headers.set(UserAgentHeaderName, userAgentValue); + } + return next(request); + }, }; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Converts a Promise to a service callback. - * @param promise - The Promise of HttpOperationResponse to be converted to a service callback - * @returns A function that takes the service callback (cb: ServiceCallback): void + * The programmatic identifier of the decompressResponsePolicy. */ -function promiseToServiceCallback(promise) { - if (typeof promise.then !== "function") { - throw new Error("The provided input is not a Promise."); - } - return (cb) => { - promise - .then((data) => { - return process.nextTick(cb, undefined, data.parsedBody, data.request, data); - }) - .catch((err) => { - process.nextTick(cb, err); - }); +const decompressResponsePolicyName = "decompressResponsePolicy"; +/** + * A policy to enable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +function decompressResponsePolicy() { + return { + name: decompressResponsePolicyName, + async sendRequest(request, next) { + // HEAD requests have no body + if (request.method !== "HEAD") { + request.headers.set("Accept-Encoding", "gzip,deflate"); + } + return next(request); + }, }; } -function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { - if (!Array.isArray(obj)) { - obj = [obj]; - } - if (!xmlNamespaceKey || !xmlNamespace) { - return { [elementName]: obj }; - } - const result = { [elementName]: obj }; - result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; - return result; -} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const StandardAbortMessage = "The operation was aborted."; /** - * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor - * @param targetCtor - The target object on which the properties need to be applied. - * @param sourceCtors - An array of source objects from which the properties need to be taken. + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * - abortSignal - The abortSignal associated with containing operation. + * - abortErrorMsg - The abort error message associated with containing operation. + * @returns Resolved promise */ -function applyMixins(targetCtorParam, sourceCtors) { - const castTargetCtorParam = targetCtorParam; - sourceCtors.forEach((sourceCtor) => { - Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { - castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; - }); +function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (timer) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } }); } -const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; /** - * Indicates whether the given string is in ISO 8601 format. - * @param value - The value to be validated for ISO 8601 duration format. - * @returns `true` if valid, `false` otherwise. + * @internal + * @returns the parsed value or undefined if the parsed value is invalid. */ -function isDuration(value) { - return validateISODuration.test(value); +function parseHeaderValueAsNumber(response, headerName) { + const value = response.headers.get(headerName); + if (!value) + return; + const valueAsNum = Number(value); + if (Number.isNaN(valueAsNum)) + return; + return valueAsNum; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Replace all of the instances of searchValue in value with the provided replaceValue. - * @param value - The value to search and replace in. - * @param searchValue - The value to search for in the value argument. - * @param replaceValue - The value to replace searchValue with in the value argument. - * @returns The value where each instance of searchValue was replaced with replacedValue. + * The header that comes back from Azure services representing + * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry). */ -function replaceAll(value, searchValue, replaceValue) { - return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); -} +const RetryAfterHeader = "Retry-After"; /** - * Determines whether the given entity is a basic/primitive type - * (string, number, boolean, null, undefined). - * @param value - Any entity - * @returns true is it is primitive type, false otherwise. + * The headers that come back from Azure services representing + * the amount of time (minimum) to wait to retry. + * + * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds + * "Retry-After" : seconds or timestamp */ -function isPrimitiveType(value) { - return (typeof value !== "object" && typeof value !== "function") || value === null; -} -function getEnvironmentValue(name) { - if (process.env[name]) { - return process.env[name]; - } - else if (process.env[name.toLowerCase()]) { - return process.env[name.toLowerCase()]; - } - return undefined; -} +const AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; /** + * A response is a throttling retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + * + * Returns the `retryAfterInMs` value if the response is a throttling retry response. + * If not throttling retry response, returns `undefined`. + * * @internal - * @returns true when input is an object type that is not null, Array, RegExp, or Date. - */ -function isObject(input) { - return (typeof input === "object" && - input !== null && - !Array.isArray(input) && - !(input instanceof RegExp) && - !(input instanceof Date)); -} - -// Copyright (c) Microsoft Corporation. -// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. -/** - * Used to map raw response objects to final shapes. - * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. - * Also allows pulling values from headers, as well as inserting default values and constants. */ -class Serializer { - constructor( - /** - * The provided model mapper. - */ - modelMappers = {}, - /** - * Whether the contents are XML or not. - */ - isXML) { - this.modelMappers = modelMappers; - this.isXML = isXML; - } - /** - * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. - * @param mapper - The definition of data models. - * @param value - The value. - * @param objectName - Name of the object. Used in the error messages. - * @deprecated Removing the constraints validation on client side. - */ - validateConstraints(mapper, value, objectName) { - const failValidation = (constraintName, constraintValue) => { - throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); - }; - if (mapper.constraints && value != undefined) { - const valueAsNumber = value; - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; - if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { - failValidation("ExclusiveMaximum", ExclusiveMaximum); - } - if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) { - failValidation("ExclusiveMinimum", ExclusiveMinimum); - } - if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) { - failValidation("InclusiveMaximum", InclusiveMaximum); - } - if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { - failValidation("InclusiveMinimum", InclusiveMinimum); - } - const valueAsArray = value; - if (MaxItems != undefined && valueAsArray.length > MaxItems) { - failValidation("MaxItems", MaxItems); - } - if (MaxLength != undefined && valueAsArray.length > MaxLength) { - failValidation("MaxLength", MaxLength); +function getRetryAfterInMs(response) { + if (!(response && [429, 503].includes(response.status))) + return undefined; + try { + // Headers: "retry-after-ms", "x-ms-retry-after-ms", "Retry-After" + for (const header of AllRetryAfterHeaders) { + const retryAfterValue = parseHeaderValueAsNumber(response, header); + if (retryAfterValue === 0 || retryAfterValue) { + // "Retry-After" header ==> seconds + // "retry-after-ms", "x-ms-retry-after-ms" headers ==> milli-seconds + const multiplyingFactor = header === RetryAfterHeader ? 1000 : 1; + return retryAfterValue * multiplyingFactor; // in milli-seconds } - if (MinItems != undefined && valueAsArray.length < MinItems) { - failValidation("MinItems", MinItems); + } + // RetryAfterHeader ("Retry-After") has a special case where it might be formatted as a date instead of a number of seconds + const retryAfterHeader = response.headers.get(RetryAfterHeader); + if (!retryAfterHeader) + return; + const date = Date.parse(retryAfterHeader); + const diff = date - Date.now(); + // negative diff would mean a date in the past, so retry asap with 0 milliseconds + return Number.isFinite(diff) ? Math.max(0, diff) : undefined; + } + catch (e) { + return undefined; + } +} +/** + * A response is a retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + */ +function isThrottlingRetryResponse(response) { + return Number.isFinite(getRetryAfterInMs(response)); +} +function throttlingRetryStrategy() { + return { + name: "throttlingRetryStrategy", + retry({ response }) { + const retryAfterInMs = getRetryAfterInMs(response); + if (!Number.isFinite(retryAfterInMs)) { + return { skipStrategy: true }; } - if (MinLength != undefined && valueAsArray.length < MinLength) { - failValidation("MinLength", MinLength); + return { + retryAfterInMs, + }; + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// intervals are in milliseconds +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 64; +/** + * A retry strategy that retries with an exponentially increasing delay in these two cases: + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505). + */ +function exponentialRetryStrategy(options = {}) { + var _a, _b; + const retryInterval = (_a = options.retryDelayInMs) !== null && _a !== void 0 ? _a : DEFAULT_CLIENT_RETRY_INTERVAL; + const maxRetryInterval = (_b = options.maxRetryDelayInMs) !== null && _b !== void 0 ? _b : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + let retryAfterInMs = retryInterval; + return { + name: "exponentialRetryStrategy", + retry({ retryCount, response, responseError }) { + const matchedSystemError = isSystemError(responseError); + const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; + const isExponential = isExponentialRetryResponse(response); + const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; + const unknownResponse = response && (isThrottlingRetryResponse(response) || !isExponential); + if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { + return { skipStrategy: true }; } - if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) { - failValidation("MultipleOf", MultipleOf); + if (responseError && !matchedSystemError && !isExponential) { + return { errorToThrow: responseError }; } - if (Pattern) { - const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; - if (typeof value !== "string" || value.match(pattern) === null) { - failValidation("Pattern", Pattern); + // Exponentially increase the delay each time + const exponentialDelay = retryAfterInMs * Math.pow(2, retryCount); + // Don't let the delay exceed the maximum + const clampedExponentialDelay = Math.min(maxRetryInterval, exponentialDelay); + // Allow the final value to have some "jitter" (within 50% of the delay size) so + // that retries across multiple clients don't occur simultaneously. + retryAfterInMs = + clampedExponentialDelay / 2 + coreUtil.getRandomIntegerInclusive(0, clampedExponentialDelay / 2); + return { retryAfterInMs }; + }, + }; +} +/** + * A response is a retry response if it has status codes: + * - 408, or + * - Greater or equal than 500, except for 501 and 505. + */ +function isExponentialRetryResponse(response) { + return Boolean(response && + response.status !== undefined && + (response.status >= 500 || response.status === 408) && + response.status !== 501 && + response.status !== 505); +} +/** + * Determines whether an error from a pipeline response was triggered in the network layer. + */ +function isSystemError(err) { + if (!err) { + return false; + } + return (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT" || + err.code === "ENOTFOUND"); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const retryPolicyLogger = logger$1.createClientLogger("core-rest-pipeline retryPolicy"); +/** + * The programmatic identifier of the retryPolicy. + */ +const retryPolicyName = "retryPolicy"; +/** + * retryPolicy is a generic policy to enable retrying requests when certain conditions are met + */ +function retryPolicy(strategies, options = { maxRetries: DEFAULT_RETRY_POLICY_COUNT }) { + const logger = options.logger || retryPolicyLogger; + return { + name: retryPolicyName, + async sendRequest(request, next) { + var _a, _b; + let response; + let responseError; + let retryCount = -1; + // eslint-disable-next-line no-constant-condition + retryRequest: while (true) { + retryCount += 1; + response = undefined; + responseError = undefined; + try { + logger.info(`Retry ${retryCount}: Attempting to send request`, request.requestId); + response = await next(request); + logger.info(`Retry ${retryCount}: Received a response from request`, request.requestId); + } + catch (e) { + logger.error(`Retry ${retryCount}: Received an error from request`, request.requestId); + // RestErrors are valid targets for the retry strategies. + // If none of the retry strategies can work with them, they will be thrown later in this policy. + // If the received error is not a RestError, it is immediately thrown. + responseError = e; + if (!e || responseError.name !== "RestError") { + throw e; + } + response = responseError.response; + } + if ((_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + logger.error(`Retry ${retryCount}: Request aborted.`); + const abortError = new abortController.AbortError(); + throw abortError; + } + if (retryCount >= ((_b = options.maxRetries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_POLICY_COUNT)) { + logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); + if (responseError) { + throw responseError; + } + else if (response) { + return response; + } + else { + throw new Error("Maximum retries reached with no response or error to throw"); + } + } + logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); + strategiesLoop: for (const strategy of strategies) { + const strategyLogger = strategy.logger || retryPolicyLogger; + strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); + const modifiers = strategy.retry({ + retryCount, + response, + responseError, + }); + if (modifiers.skipStrategy) { + strategyLogger.info(`Retry ${retryCount}: Skipped.`); + continue strategiesLoop; + } + const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; + if (errorToThrow) { + strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); + throw errorToThrow; + } + if (retryAfterInMs || retryAfterInMs === 0) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); + await delay(retryAfterInMs, undefined, { abortSignal: request.abortSignal }); + continue retryRequest; + } + if (redirectTo) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); + request.url = redirectTo; + continue retryRequest; + } } + if (responseError) { + logger.info(`None of the retry strategies could work with the received error. Throwing it.`); + throw responseError; + } + if (response) { + logger.info(`None of the retry strategies could work with the received response. Returning it.`); + return response; + } + // If all the retries skip and there's no response, + // we're still in the retry loop, so a new request will be sent + // until `maxRetries` is reached. } - if (UniqueItems && - valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { - failValidation("UniqueItems", UniqueItems); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Name of the {@link defaultRetryPolicy} + */ +const defaultRetryPolicyName = "defaultRetryPolicy"; +/** + * A policy that retries according to three strategies: + * - When the server sends a 429 response with a Retry-After header. + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. + */ +function defaultRetryPolicy(options = {}) { + var _a; + return { + name: defaultRetryPolicyName, + sendRequest: retryPolicy([throttlingRetryStrategy(), exponentialRetryStrategy(options)], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function normalizeName(name) { + return name.toLowerCase(); +} +function* headerIterator(map) { + for (const entry of map.values()) { + yield [entry.name, entry.value]; + } +} +class HttpHeadersImpl { + constructor(rawHeaders) { + this._headersMap = new Map(); + if (rawHeaders) { + for (const headerName of Object.keys(rawHeaders)) { + this.set(headerName, rawHeaders[headerName]); } } } /** - * Serialize the given object based on its metadata defined in the mapper. - * - * @param mapper - The mapper which defines the metadata of the serializable object. - * @param object - A valid Javascript object to be serialized. - * @param objectName - Name of the serialized object. - * @param options - additional options to deserialization. - * @returns A valid serialized Javascript object. + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param name - The name of the header to set. This value is case-insensitive. + * @param value - The value of the header to set. */ - serialize(mapper, object, objectName, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - let payload = {}; - const mapperType = mapper.type.name; - if (!objectName) { - objectName = mapper.serializedName; - } - if (mapperType.match(/^Sequence$/i) !== null) { - payload = []; - } - if (mapper.isConstant) { - object = mapper.defaultValue; - } - // This table of allowed values should help explain - // the mapper.required and mapper.nullable properties. - // X means "neither undefined or null are allowed". - // || required - // || true | false - // nullable || ========================== - // true || null | undefined/null - // false || X | undefined - // undefined || X | undefined/null - const { required, nullable } = mapper; - if (required && nullable && object === undefined) { - throw new Error(`${objectName} cannot be undefined.`); - } - if (required && !nullable && object == undefined) { - throw new Error(`${objectName} cannot be null or undefined.`); - } - if (!required && nullable === false && object === null) { - throw new Error(`${objectName} cannot be null.`); - } - if (object == undefined) { - payload = object; - } - else { - if (mapperType.match(/^any$/i) !== null) { - payload = object; - } - else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { - payload = serializeBasicTypes(mapperType, objectName, object); - } - else if (mapperType.match(/^Enum$/i) !== null) { - const enumMapper = mapper; - payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); - } - else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { - payload = serializeDateTypes(mapperType, object, objectName); - } - else if (mapperType.match(/^ByteArray$/i) !== null) { - payload = serializeByteArrayType(objectName, object); - } - else if (mapperType.match(/^Base64Url$/i) !== null) { - payload = serializeBase64UrlType(objectName, object); - } - else if (mapperType.match(/^Sequence$/i) !== null) { - payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - else if (mapperType.match(/^Composite$/i) !== null) { - payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - } - return payload; + set(name, value) { + this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); } /** - * Deserialize the given object based on its metadata defined in the mapper. - * - * @param mapper - The mapper which defines the metadata of the serializable object. - * @param responseBody - A valid Javascript entity to be deserialized. - * @param objectName - Name of the deserialized object. - * @param options - Controls behavior of XML parser and builder. - * @returns A valid deserialized Javascript object. + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param name - The name of the header. This value is case-insensitive. */ - deserialize(mapper, responseBody, objectName, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - if (responseBody == undefined) { - if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { - // Edge case for empty XML non-wrapped lists. xml2js can't distinguish - // between the list being empty versus being missing, - // so let's do the more user-friendly thing and return an empty list. - responseBody = []; - } - // specifically check for undefined as default value can be a falsey value `0, "", false, null` - if (mapper.defaultValue !== undefined) { - responseBody = mapper.defaultValue; + get(name) { + var _a; + return (_a = this._headersMap.get(normalizeName(name))) === null || _a === void 0 ? void 0 : _a.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + * @param name - The name of the header to set. This value is case-insensitive. + */ + has(name) { + return this._headersMap.has(normalizeName(name)); + } + /** + * Remove the header with the provided headerName. + * @param name - The name of the header to remove. + */ + delete(name) { + this._headersMap.delete(normalizeName(name)); + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJSON(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const entry of this._headersMap.values()) { + result[entry.name] = entry.value; } - return responseBody; - } - let payload; - const mapperType = mapper.type.name; - if (!objectName) { - objectName = mapper.serializedName; - } - if (mapperType.match(/^Composite$/i) !== null) { - payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); } else { - if (this.isXML) { - const xmlCharKey = updatedOptions.xmlCharKey; - const castResponseBody = responseBody; - /** - * If the mapper specifies this as a non-composite type value but the responseBody contains - * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, - * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. - */ - if (castResponseBody[XML_ATTRKEY] != undefined && - castResponseBody[xmlCharKey] != undefined) { - responseBody = castResponseBody[xmlCharKey]; - } - } - if (mapperType.match(/^Number$/i) !== null) { - payload = parseFloat(responseBody); - if (isNaN(payload)) { - payload = responseBody; - } + for (const [normalizedName, entry] of this._headersMap) { + result[normalizedName] = entry.value; } - else if (mapperType.match(/^Boolean$/i) !== null) { - if (responseBody === "true") { - payload = true; - } - else if (responseBody === "false") { - payload = false; + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJSON({ preserveCase: true })); + } + /** + * Iterate over tuples of header [name, value] pairs. + */ + [Symbol.iterator]() { + return headerIterator(this._headersMap); + } +} +/** + * Creates an object that satisfies the `HttpHeaders` interface. + * @param rawHeaders - A simple object representing initial headers + */ +function createHttpHeaders(rawHeaders) { + return new HttpHeadersImpl(rawHeaders); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the formDataPolicy. + */ +const formDataPolicyName = "formDataPolicy"; +/** + * A policy that encodes FormData on the request into the body. + */ +function formDataPolicy() { + return { + name: formDataPolicyName, + async sendRequest(request, next) { + if (request.formData) { + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { + request.body = wwwFormUrlEncode(request.formData); } else { - payload = responseBody; + await prepareFormData(request.formData, request); } + request.formData = undefined; } - else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { - payload = responseBody; - } - else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { - payload = new Date(responseBody); - } - else if (mapperType.match(/^UnixTime$/i) !== null) { - payload = unixTimeToDate(responseBody); - } - else if (mapperType.match(/^ByteArray$/i) !== null) { - payload = decodeString(responseBody); - } - else if (mapperType.match(/^Base64Url$/i) !== null) { - payload = base64UrlToByteArray(responseBody); - } - else if (mapperType.match(/^Sequence$/i) !== null) { - payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); - } - else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + return next(request); + }, + }; +} +function wwwFormUrlEncode(formData) { + const urlSearchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(formData)) { + if (Array.isArray(value)) { + for (const subValue of value) { + urlSearchParams.append(key, subValue.toString()); } } - if (mapper.isConstant) { - payload = mapper.defaultValue; + else { + urlSearchParams.append(key, value.toString()); } - return payload; - } -} -function trimEnd(str, ch) { - let len = str.length; - while (len - 1 >= 0 && str[len - 1] === ch) { - --len; - } - return str.substr(0, len); -} -function bufferToBase64Url(buffer) { - if (!buffer) { - return undefined; - } - if (!(buffer instanceof Uint8Array)) { - throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); } - // Uint8Array to Base64. - const str = encodeByteArray(buffer); - // Base64 to Base64Url. - return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); + return urlSearchParams.toString(); } -function base64UrlToByteArray(str) { - if (!str) { - return undefined; - } - if (str && typeof str.valueOf() !== "string") { - throw new Error("Please provide an input of type string for converting to Uint8Array"); +async function prepareFormData(formData, request) { + // validate content type (multipart/form-data) + const contentType = request.headers.get("Content-Type"); + if (contentType && !contentType.startsWith("multipart/form-data")) { + // content type is specified and is not multipart/form-data. Exit. + return; } - // Base64Url to Base64. - str = str.replace(/-/g, "+").replace(/_/g, "/"); - // Base64 to Uint8Array. - return decodeString(str); -} -function splitSerializeName(prop) { - const classes = []; - let partialclass = ""; - if (prop) { - const subwords = prop.split("."); - for (const item of subwords) { - if (item.charAt(item.length - 1) === "\\") { - partialclass += item.substr(0, item.length - 1) + "."; + request.headers.set("Content-Type", contentType !== null && contentType !== void 0 ? contentType : "multipart/form-data"); + // set body to MultipartRequestBody using content from FormDataMap + const parts = []; + for (const [fieldName, values] of Object.entries(formData)) { + for (const value of Array.isArray(values) ? values : [values]) { + if (typeof value === "string") { + parts.push({ + headers: createHttpHeaders({ + "Content-Disposition": `form-data; name="${fieldName}"`, + }), + body: coreUtil.stringToUint8Array(value, "utf-8"), + }); } else { - partialclass += item; - classes.push(partialclass); - partialclass = ""; + // using || instead of ?? here since if value.name is empty we should create a file name + const fileName = value.name || "blob"; + const headers = createHttpHeaders(); + headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); + if (value.type) { + headers.set("Content-Type", value.type); + } + parts.push({ + headers, + body: value, + }); } } + request.multipartBody = { parts }; } - return classes; } -function dateToUnixTime(d) { - if (!d) { - return undefined; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const HTTPS_PROXY = "HTTPS_PROXY"; +const HTTP_PROXY = "HTTP_PROXY"; +const ALL_PROXY = "ALL_PROXY"; +const NO_PROXY = "NO_PROXY"; +/** + * The programmatic identifier of the proxyPolicy. + */ +const proxyPolicyName = "proxyPolicy"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; } - if (typeof d.valueOf() === "string") { - d = new Date(d); + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; } - return Math.floor(d.getTime() / 1000); + return undefined; } -function unixTimeToDate(n) { - if (!n) { +function loadEnvironmentProxyValue() { + if (!process) { return undefined; } - return new Date(n * 1000); + const httpsProxy = getEnvironmentValue(HTTPS_PROXY); + const allProxy = getEnvironmentValue(ALL_PROXY); + const httpProxy = getEnvironmentValue(HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; } -function serializeBasicTypes(typeName, objectName, value) { - if (value !== null && value !== undefined) { - if (typeName.match(/^Number$/i) !== null) { - if (typeof value !== "number") { - throw new Error(`${objectName} with value ${value} must be of type number.`); - } - } - else if (typeName.match(/^String$/i) !== null) { - if (typeof value.valueOf() !== "string") { - throw new Error(`${objectName} with value "${value}" must be of type string.`); - } - } - else if (typeName.match(/^Uuid$/i) !== null) { - if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { - throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = new URL(uri).hostname; + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; } - } - else if (typeName.match(/^Boolean$/i) !== null) { - if (typeof value !== "boolean") { - throw new Error(`${objectName} with value ${value} must be of type boolean.`); + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } } } - else if (typeName.match(/^Stream$/i) !== null) { - const objectType = typeof value; - if (objectType !== "string" && - objectType !== "function" && - !(value instanceof ArrayBuffer) && - !ArrayBuffer.isView(value) && - !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { - throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); + else { + if (host === pattern) { + isBypassedFlag = true; } } } - return value; + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; } -function serializeEnumType(objectName, allowedValues, value) { - if (!allowedValues) { - throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); - } - const isPresent = allowedValues.some((item) => { - if (typeof item.valueOf() === "string") { - return item.toLowerCase() === value.toLowerCase(); - } - return item === value; - }); - if (!isPresent) { - throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); +function loadNoProxy() { + const noProxy = getEnvironmentValue(NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); } - return value; + return []; } -function serializeByteArrayType(objectName, value) { - let returnValue = ""; - if (value != undefined) { - if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); +/** + * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy. + * If no argument is given, it attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. + * @param proxyUrl - The url of the proxy to use. May contain authentication information. + */ +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; } - returnValue = encodeByteArray(value); } - return returnValue; + const parsedUrl = new URL(proxyUrl); + const schema = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; + return { + host: schema + parsedUrl.hostname, + port: Number.parseInt(parsedUrl.port || "80"), + username: parsedUrl.username, + password: parsedUrl.password, + }; } -function serializeBase64UrlType(objectName, value) { - let returnValue = ""; - if (value != undefined) { - if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); - } - returnValue = bufferToBase64Url(value) || ""; +/** + * @internal + */ +function getProxyAgentOptions(proxySettings, { headers, tlsSettings }) { + let parsedProxyUrl; + try { + parsedProxyUrl = new URL(proxySettings.host); + } + catch (_error) { + throw new Error(`Expecting a valid host string in proxy settings, but found "${proxySettings.host}".`); + } + if (tlsSettings) { + logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); + } + const proxyAgentOptions = { + hostname: parsedProxyUrl.hostname, + port: proxySettings.port, + protocol: parsedProxyUrl.protocol, + headers: headers.toJSON(), + }; + if (proxySettings.username && proxySettings.password) { + proxyAgentOptions.auth = `${proxySettings.username}:${proxySettings.password}`; } - return returnValue; + else if (proxySettings.username) { + proxyAgentOptions.auth = `${proxySettings.username}`; + } + return proxyAgentOptions; } -function serializeDateTypes(typeName, value, objectName) { - if (value != undefined) { - if (typeName.match(/^Date$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); - } - value = - value instanceof Date - ? value.toISOString().substring(0, 10) - : new Date(value).toISOString().substring(0, 10); - } - else if (typeName.match(/^DateTime$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); - } - value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); - } - else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); - } - value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); - } - else if (typeName.match(/^UnixTime$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + - `for it to be serialized in UnixTime/Epoch format.`); +function setProxyAgentOnRequest(request, cachedAgents) { + // Custom Agent should take precedence so if one is present + // we should skip to avoid overwriting it. + if (request.agent) { + return; + } + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + const proxySettings = request.proxySettings; + if (proxySettings) { + if (isInsecure) { + if (!cachedAgents.httpProxyAgent) { + const proxyAgentOptions = getProxyAgentOptions(proxySettings, request); + cachedAgents.httpProxyAgent = new httpProxyAgent.HttpProxyAgent(proxyAgentOptions); } - value = dateToUnixTime(value); + request.agent = cachedAgents.httpProxyAgent; } - else if (typeName.match(/^TimeSpan$/i) !== null) { - if (!isDuration(value)) { - throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + else { + if (!cachedAgents.httpsProxyAgent) { + const proxyAgentOptions = getProxyAgentOptions(proxySettings, request); + cachedAgents.httpsProxyAgent = new httpsProxyAgent.HttpsProxyAgent(proxyAgentOptions); } + request.agent = cachedAgents.httpsProxyAgent; } } - return value; } -function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { - if (!Array.isArray(object)) { - throw new Error(`${objectName} must be of type Array.`); - } - const elementType = mapper.type.element; - if (!elementType || typeof elementType !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +function proxyPolicy(proxySettings = getDefaultProxySettings(), options) { + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); } - const tempArray = []; - for (let i = 0; i < object.length; i++) { - const serializedValue = serializer.serialize(elementType, object[i], objectName, options); - if (isXml && elementType.xmlNamespace) { - const xmlnsKey = elementType.xmlNamespacePrefix - ? `xmlns:${elementType.xmlNamespacePrefix}` - : "xmlns"; - if (elementType.type.name === "Composite") { - tempArray[i] = Object.assign({}, serializedValue); - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + const cachedAgents = {}; + return { + name: proxyPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = options === null || options === void 0 ? void 0 : options.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, (options === null || options === void 0 ? void 0 : options.customNoProxyList) ? undefined : globalBypassedMap)) { + request.proxySettings = proxySettings; } - else { - tempArray[i] = {}; - tempArray[i][options.xmlCharKey] = serializedValue; - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + if (request.proxySettings) { + setProxyAgentOnRequest(request, cachedAgents); } - } - else { - tempArray[i] = serializedValue; - } - } - return tempArray; + return next(request); + }, + }; } -function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { - if (typeof object !== "object") { - throw new Error(`${objectName} must be of type object.`); - } - const valueType = mapper.type.value; - if (!valueType || typeof valueType !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); - } - const tempDictionary = {}; - for (const key of Object.keys(object)) { - const serializedValue = serializer.serialize(valueType, object[key], objectName, options); - // If the element needs an XML namespace we need to add it within the $ property - tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); - } - // Add the namespace to the root element if needed - if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; - const result = tempDictionary; - result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; - return result; - } - return tempDictionary; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the setClientRequestIdPolicy. + */ +const setClientRequestIdPolicyName = "setClientRequestIdPolicy"; +/** + * Each PipelineRequest gets a unique id upon creation. + * This policy passes that unique id along via an HTTP header to enable better + * telemetry and tracing. + * @param requestIdHeaderName - The name of the header to pass the request ID to. + */ +function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + name: setClientRequestIdPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(requestIdHeaderName)) { + request.headers.set(requestIdHeaderName, request.requestId); + } + return next(request); + }, + }; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Resolves the additionalProperties property from a referenced mapper. - * @param serializer - The serializer containing the entire set of mappers. - * @param mapper - The composite mapper to resolve. - * @param objectName - Name of the object being serialized. + * Name of the TLS Policy */ -function resolveAdditionalProperties(serializer, mapper, objectName) { - const additionalProperties = mapper.type.additionalProperties; - if (!additionalProperties && mapper.type.className) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); - return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; - } - return additionalProperties; +const tlsPolicyName = "tlsPolicy"; +/** + * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. + */ +function tlsPolicy(tlsSettings) { + return { + name: tlsPolicyName, + sendRequest: async (req, next) => { + // Users may define a request tlsSettings, honor those over the client level one + if (!req.tlsSettings) { + req.tlsSettings = tlsSettings; + } + return next(req); + }, + }; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const custom = util.inspect.custom; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const errorSanitizer = new Sanitizer(); /** - * Finds the mapper referenced by `className`. - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve - * @param objectName - Name of the object being serialized + * A custom error type for failed pipeline requests. */ -function resolveReferencedMapper(serializer, mapper, objectName) { - const className = mapper.type.className; - if (!className) { - throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); +class RestError extends Error { + constructor(message, options = {}) { + super(message); + this.name = "RestError"; + this.code = options.code; + this.statusCode = options.statusCode; + this.request = options.request; + this.response = options.response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; } - return serializer.modelMappers[className]; } /** - * Resolves a composite mapper's modelProperties. - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve + * Something went wrong when making the request. + * This means the actual request failed for some reason, + * such as a DNS issue or the connection being lost. */ -function resolveModelProperties(serializer, mapper, objectName) { - let modelProps = mapper.type.modelProperties; - if (!modelProps) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); - if (!modelMapper) { - throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); - } - modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; - if (!modelProps) { - throw new Error(`modelProperties cannot be null or undefined in the ` + - `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); - } +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * This means that parsing the response from the server failed. + * It may have been malformed. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; +/** + * Typeguard for RestError + * @param e - Something caught by a catch clause. + */ +function isRestError(e) { + if (e instanceof RestError) { + return true; } - return modelProps; + return coreUtil.isError(e) && e.name === "RestError"; } -function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { - if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { - mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); - } - if (object != undefined) { - const payload = {}; - const modelProps = resolveModelProperties(serializer, mapper, objectName); - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - if (propertyMapper.readOnly) { - continue; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the tracingPolicy. + */ +const tracingPolicyName = "tracingPolicy"; +/** + * A simple policy to create OpenTelemetry Spans for each request made by the pipeline + * that has SpanOptions with a parent. + * Requests made without a parent Span will not be recorded. + * @param options - Options to configure the telemetry logged by the tracing policy. + */ +function tracingPolicy(options = {}) { + const userAgent = getUserAgentValue(options.userAgentPrefix); + const tracingClient = tryCreateTracingClient(); + return { + name: tracingPolicyName, + async sendRequest(request, next) { + var _a, _b; + if (!tracingClient || !((_a = request.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext)) { + return next(request); } - let propName; - let parentObject = payload; - if (serializer.isXML) { - if (propertyMapper.xmlIsWrapped) { - propName = propertyMapper.xmlName; - } - else { - propName = propertyMapper.xmlElementName || propertyMapper.xmlName; - } + const { span, tracingContext } = (_b = tryCreateSpan(tracingClient, request, userAgent)) !== null && _b !== void 0 ? _b : {}; + if (!span || !tracingContext) { + return next(request); } - else { - const paths = splitSerializeName(propertyMapper.serializedName); - propName = paths.pop(); - for (const pathName of paths) { - const childObject = parentObject[pathName]; - if (childObject == undefined && - (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { - parentObject[pathName] = {}; - } - parentObject = parentObject[pathName]; - } - } - if (parentObject != undefined) { - if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix - ? `xmlns:${mapper.xmlNamespacePrefix}` - : "xmlns"; - parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); - } - const propertyObjectName = propertyMapper.serializedName !== "" - ? objectName + "." + propertyMapper.serializedName - : objectName; - let toSerialize = object[key]; - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); - if (polymorphicDiscriminator && - polymorphicDiscriminator.clientName === key && - toSerialize == undefined) { - toSerialize = mapper.serializedName; - } - const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); - if (serializedValue !== undefined && propName != undefined) { - const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); - if (isXml && propertyMapper.xmlIsAttribute) { - // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. - // This keeps things simple while preventing name collision - // with names in user documents. - parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; - parentObject[XML_ATTRKEY][propName] = serializedValue; - } - else if (isXml && propertyMapper.xmlIsWrapped) { - parentObject[propName] = { [propertyMapper.xmlElementName]: value }; - } - else { - parentObject[propName] = value; - } - } + try { + const response = await tracingClient.withContext(tracingContext, next, request); + tryProcessResponse(span, response); + return response; } - } - const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); - if (additionalPropertiesMapper) { - const propNames = Object.keys(modelProps); - for (const clientPropName in object) { - const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); - if (isAdditionalProperty) { - payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); - } + catch (err) { + tryProcessError(span, err); + throw err; } - } - return payload; - } - return object; + }, + }; } -function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { - if (!isXml || !propertyMapper.xmlNamespace) { - return serializedValue; +function tryCreateTracingClient() { + try { + return coreTracing.createTracingClient({ + namespace: "", + packageName: "@azure/core-rest-pipeline", + packageVersion: SDK_VERSION, + }); } - const xmlnsKey = propertyMapper.xmlNamespacePrefix - ? `xmlns:${propertyMapper.xmlNamespacePrefix}` - : "xmlns"; - const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; - if (["Composite"].includes(propertyMapper.type.name)) { - if (serializedValue[XML_ATTRKEY]) { - return serializedValue; - } - else { - const result = Object.assign({}, serializedValue); - result[XML_ATTRKEY] = xmlNamespace; - return result; - } + catch (e) { + logger.warning(`Error when creating the TracingClient: ${coreUtil.getErrorMessage(e)}`); + return undefined; } - const result = {}; - result[options.xmlCharKey] = serializedValue; - result[XML_ATTRKEY] = xmlNamespace; - return result; } -function isSpecialXmlProperty(propertyName, options) { - return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName); -} -function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { - var _a, _b; - const xmlCharKey = (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { - mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); - } - const modelProps = resolveModelProperties(serializer, mapper, objectName); - let instance = {}; - const handledPropertyNames = []; - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - const paths = splitSerializeName(modelProps[key].serializedName); - handledPropertyNames.push(paths[0]); - const { serializedName, xmlName, xmlElementName } = propertyMapper; - let propertyObjectName = objectName; - if (serializedName !== "" && serializedName !== undefined) { - propertyObjectName = objectName + "." + serializedName; - } - const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; - if (headerCollectionPrefix) { - const dictionary = {}; - for (const headerKey of Object.keys(responseBody)) { - if (headerKey.startsWith(headerCollectionPrefix)) { - dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); - } - handledPropertyNames.push(headerKey); - } - instance[key] = dictionary; - } - else if (serializer.isXML) { - if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { - instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); - } - else if (propertyMapper.xmlIsMsText) { - if (responseBody[xmlCharKey] !== undefined) { - instance[key] = responseBody[xmlCharKey]; - } - else if (typeof responseBody === "string") { - // The special case where xml parser parses "content" into JSON of - // `{ name: "content"}` instead of `{ name: { "_": "content" }}` - instance[key] = responseBody; - } - } - else { - const propertyName = xmlElementName || xmlName || serializedName; - if (propertyMapper.xmlIsWrapped) { - /* a list of wrapped by - For the xml example below - - ... - ... - - the responseBody has - { - Cors: { - CorsRule: [{...}, {...}] - } - } - xmlName is "Cors" and xmlElementName is"CorsRule". - */ - const wrapped = responseBody[xmlName]; - const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; - instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); - handledPropertyNames.push(xmlName); - } - else { - const property = responseBody[propertyName]; - instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); - handledPropertyNames.push(propertyName); - } - } - } - else { - // deserialize the property if it is present in the provided responseBody instance - let propertyInstance; - let res = responseBody; - // traversing the object step by step. - for (const item of paths) { - if (!res) - break; - res = res[item]; - } - propertyInstance = res; - const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; - // checking that the model property name (key)(ex: "fishtype") and the - // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") - // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") - // is a better approach. The generator is not consistent with escaping '\.' in the - // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator - // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, - // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and - // the transformation of model property name (ex: "fishtype") is done consistently. - // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. - if (polymorphicDiscriminator && - key === polymorphicDiscriminator.clientName && - propertyInstance == undefined) { - propertyInstance = mapper.serializedName; - } - let serializedValue; - // paging - if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { - propertyInstance = responseBody[key]; - const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); - // Copy over any properties that have already been added into the instance, where they do - // not exist on the newly de-serialized array - for (const [k, v] of Object.entries(instance)) { - if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { - arrayInstance[k] = v; - } - } - instance = arrayInstance; - } - else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { - serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); - instance[key] = serializedValue; - } +function tryCreateSpan(tracingClient, request, userAgent) { + try { + // As per spec, we do not need to differentiate between HTTP and HTTPS in span name. + const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, { + spanKind: "client", + spanAttributes: { + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; } - } - const additionalPropertiesMapper = mapper.type.additionalProperties; - if (additionalPropertiesMapper) { - const isAdditionalProperty = (responsePropName) => { - for (const clientPropName in modelProps) { - const paths = splitSerializeName(modelProps[clientPropName].serializedName); - if (paths[0] === responsePropName) { - return false; - } - } - return true; - }; - for (const responsePropName in responseBody) { - if (isAdditionalProperty(responsePropName)) { - instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); - } + if (userAgent) { + span.setAttribute("http.user_agent", userAgent); } - } - else if (responseBody) { - for (const key of Object.keys(responseBody)) { - if (instance[key] === undefined && - !handledPropertyNames.includes(key) && - !isSpecialXmlProperty(key, options)) { - instance[key] = responseBody[key]; - } + // set headers + const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); + for (const [key, value] of Object.entries(headers)) { + request.headers.set(key, value); } + return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; } - return instance; -} -function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { - const value = mapper.type.value; - if (!value || typeof value !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); - } - if (responseBody) { - const tempDictionary = {}; - for (const key of Object.keys(responseBody)) { - tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); - } - return tempDictionary; + catch (e) { + logger.warning(`Skipping creating a tracing span due to an error: ${coreUtil.getErrorMessage(e)}`); + return undefined; } - return responseBody; } -function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { - const element = mapper.type.element; - if (!element || typeof element !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); - } - if (responseBody) { - if (!Array.isArray(responseBody)) { - // xml2js will interpret a single element array as just the element, so force it to be an array - responseBody = [responseBody]; - } - const tempArray = []; - for (let i = 0; i < responseBody.length; i++) { - tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); +function tryProcessError(span, error) { + try { + span.setStatus({ + status: "error", + error: coreUtil.isError(error) ? error : undefined, + }); + if (isRestError(error) && error.statusCode) { + span.setAttribute("http.status_code", error.statusCode); } - return tempArray; + span.end(); } - return responseBody; -} -function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); - if (polymorphicDiscriminator) { - const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; - if (discriminatorName != undefined) { - const discriminatorValue = object[discriminatorName]; - if (discriminatorValue != undefined) { - const typeName = mapper.type.uberParent || mapper.type.className; - const indexDiscriminator = discriminatorValue === typeName - ? discriminatorValue - : typeName + "." + discriminatorValue; - const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; - if (polymorphicMapper) { - mapper = polymorphicMapper; - } - } - } + catch (e) { + logger.warning(`Skipping tracing span processing due to an error: ${coreUtil.getErrorMessage(e)}`); } - return mapper; -} -function getPolymorphicDiscriminatorRecursively(serializer, mapper) { - return (mapper.type.polymorphicDiscriminator || - getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || - getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); -} -function getPolymorphicDiscriminatorSafely(serializer, typeName) { - return (typeName && - serializer.modelMappers[typeName] && - serializer.modelMappers[typeName].type.polymorphicDiscriminator); } -/** - * Utility function that serializes an object that might contain binary information into a plain object, array or a string. - */ -function serializeObject(toSerialize) { - const castToSerialize = toSerialize; - if (toSerialize == undefined) - return undefined; - if (toSerialize instanceof Uint8Array) { - toSerialize = encodeByteArray(toSerialize); - return toSerialize; - } - else if (toSerialize instanceof Date) { - return toSerialize.toISOString(); - } - else if (Array.isArray(toSerialize)) { - const array = []; - for (let i = 0; i < toSerialize.length; i++) { - array.push(serializeObject(toSerialize[i])); - } - return array; - } - else if (typeof toSerialize === "object") { - const dictionary = {}; - for (const property in toSerialize) { - dictionary[property] = serializeObject(castToSerialize[property]); +function tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); } - return dictionary; + span.setStatus({ + status: "success", + }); + span.end(); } - return toSerialize; -} -/** - * Utility function to create a K:V from a list of strings - */ -function strEnum(o) { - const result = {}; - for (const key of o) { - result[key] = key; + catch (e) { + logger.warning(`Skipping tracing span processing due to an error: ${coreUtil.getErrorMessage(e)}`); } - return result; } -/** - * String enum containing the string types of property mappers. - */ -// eslint-disable-next-line @typescript-eslint/no-redeclare -const MapperType = strEnum([ - "Base64Url", - "Boolean", - "ByteArray", - "Composite", - "Date", - "DateTime", - "DateTimeRfc1123", - "Dictionary", - "Enum", - "Number", - "Object", - "Sequence", - "String", - "Stream", - "TimeSpan", - "UnixTime", -]); // Copyright (c) Microsoft Corporation. -function isWebResourceLike(object) { - if (object && typeof object === "object") { - const castObject = object; - if (typeof castObject.url === "string" && - typeof castObject.method === "string" && - typeof castObject.headers === "object" && - isHttpHeadersLike(castObject.headers) && - typeof castObject.validateRequestProperties === "function" && - typeof castObject.prepare === "function" && - typeof castObject.clone === "function") { - return true; - } - } - return false; +// Licensed under the MIT license. +function isNodeReadableStream(x) { + return Boolean(x && typeof x["pipe"] === "function"); } -/** - * Creates a new WebResource object. - * - * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary - * properties to initiate a request. - */ -class WebResource { - constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { - this.streamResponseBody = streamResponseBody; - this.streamResponseStatusCodes = streamResponseStatusCodes; - this.url = url || ""; - this.method = method || "GET"; - this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); - this.body = body; - this.query = query; - this.formData = undefined; - this.withCredentials = withCredentials || false; - this.abortSignal = abortSignal; - this.timeout = timeout || 0; - this.onUploadProgress = onUploadProgress; - this.onDownloadProgress = onDownloadProgress; - this.proxySettings = proxySettings; - this.keepAlive = keepAlive; - this.decompressResponse = decompressResponse; - this.requestId = this.headers.get("x-ms-client-request-id") || generateUuid(); - } - /** - * Validates that the required properties such as method, url, headers["Content-Type"], - * headers["accept-language"] are defined. It will throw an error if one of the above - * mentioned properties are not defined. - */ - validateRequestProperties() { - if (!this.method) { - throw new Error("WebResource.method is required."); - } - if (!this.url) { - throw new Error("WebResource.url is required."); - } - } - /** - * Prepares the request. - * @param options - Options to provide for preparing the request. - * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. - */ - prepare(options) { - if (!options) { - throw new Error("options object is required"); - } - if (options.method === undefined || - options.method === null || - typeof options.method.valueOf() !== "string") { - throw new Error("options.method must be a string."); - } - if (options.url && options.pathTemplate) { - throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); - } - if ((options.pathTemplate === undefined || - options.pathTemplate === null || - typeof options.pathTemplate.valueOf() !== "string") && - (options.url === undefined || - options.url === null || - typeof options.url.valueOf() !== "string")) { - throw new Error("Please provide exactly one of options.pathTemplate or options.url."); - } - // set the url if it is provided. - if (options.url) { - if (typeof options.url !== "string") { - throw new Error('options.url must be of type "string".'); - } - this.url = options.url; - } - // set the method - if (options.method) { - const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; - if (validMethods.indexOf(options.method.toUpperCase()) === -1) { - throw new Error('The provided method "' + - options.method + - '" is invalid. Supported HTTP methods are: ' + - JSON.stringify(validMethods)); - } - } - this.method = options.method.toUpperCase(); - // construct the url if path template is provided - if (options.pathTemplate) { - const { pathTemplate, pathParameters } = options; - if (typeof pathTemplate !== "string") { - throw new Error('options.pathTemplate must be of type "string".'); - } - if (!options.baseUrl) { - options.baseUrl = "https://management.azure.com"; - } - const baseUrl = options.baseUrl; - let url = baseUrl + - (baseUrl.endsWith("/") ? "" : "/") + - (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); - const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); - if (segments && segments.length) { - if (!pathParameters) { - throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); - } - segments.forEach(function (item) { - const pathParamName = item.slice(1, -1); - const pathParam = pathParameters[pathParamName]; - if (pathParam === null || - pathParam === undefined || - !(typeof pathParam === "string" || typeof pathParam === "object")) { - const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); - throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + - ` however, it is not present in parameters: ${stringifiedPathParameters}.` + - `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + - `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); - } - if (typeof pathParam.valueOf() === "string") { - url = url.replace(item, encodeURIComponent(pathParam)); - } - if (typeof pathParam.valueOf() === "object") { - if (!pathParam.value) { - throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); - } - if (pathParam.skipUrlEncoding) { - url = url.replace(item, pathParam.value); - } - else { - url = url.replace(item, encodeURIComponent(pathParam.value)); - } - } - }); - } - this.url = url; - } - // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. - if (options.queryParameters) { - const queryParameters = options.queryParameters; - if (typeof queryParameters !== "object") { - throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + - `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + - `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); - } - // append question mark if it is not present in the url - if (this.url && this.url.indexOf("?") === -1) { - this.url += "?"; - } - // construct queryString - const queryParams = []; - // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). - this.query = {}; - for (const queryParamName in queryParameters) { - const queryParam = queryParameters[queryParamName]; - if (queryParam) { - if (typeof queryParam === "string") { - queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); - this.query[queryParamName] = encodeURIComponent(queryParam); - } - else if (typeof queryParam === "object") { - if (!queryParam.value) { - throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); - } - if (queryParam.skipUrlEncoding) { - queryParams.push(queryParamName + "=" + queryParam.value); - this.query[queryParamName] = queryParam.value; - } - else { - queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); - this.query[queryParamName] = encodeURIComponent(queryParam.value); - } - } - } - } // end-of-for - // append the queryString - this.url += queryParams.join("&"); - } - // add headers to the request if they are provided - if (options.headers) { - const headers = options.headers; - for (const headerName of Object.keys(options.headers)) { - this.headers.set(headerName, headers[headerName]); - } - } - // ensure accept-language is set correctly - if (!this.headers.get("accept-language")) { - this.headers.set("accept-language", "en-US"); - } - // ensure the request-id is set correctly - if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { - this.headers.set("x-ms-client-request-id", this.requestId); - } - // default - if (!this.headers.get("Content-Type")) { - this.headers.set("Content-Type", "application/json; charset=utf-8"); - } - // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly - this.body = options.body; - if (options.body !== undefined && options.body !== null) { - // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. - if (options.bodyIsStream) { - if (!this.headers.get("Transfer-Encoding")) { - this.headers.set("Transfer-Encoding", "chunked"); - } - if (this.headers.get("Content-Type") !== "application/octet-stream") { - this.headers.set("Content-Type", "application/octet-stream"); - } - } - else { - if (options.serializationMapper) { - this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); - } - if (!options.disableJsonStringifyOnBody) { - this.body = JSON.stringify(options.body); - } - } - } - if (options.spanOptions) { - this.spanOptions = options.spanOptions; - } - if (options.tracingContext) { - this.tracingContext = options.tracingContext; - } - this.abortSignal = options.abortSignal; - this.onDownloadProgress = options.onDownloadProgress; - this.onUploadProgress = options.onUploadProgress; - return this; - } - /** - * Clone this WebResource HTTP request object. - * @returns The clone of this WebResource HTTP request object. - */ - clone() { - const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); - if (this.formData) { - result.formData = this.formData; - } - if (this.operationSpec) { - result.operationSpec = this.operationSpec; - } - if (this.shouldDeserialize) { - result.shouldDeserialize = this.shouldDeserialize; - } - if (this.operationResponseGetter) { - result.operationResponseGetter = this.operationResponseGetter; - } - return result; - } +function isWebReadableStream(x) { + return Boolean(x && + typeof x.getReader === "function" && + typeof x.tee === "function"); +} +function isBlob(x) { + return typeof x.stream === "function"; } // Copyright (c) Microsoft Corporation. -/** - * A class that handles the query portion of a URLBuilder. - */ -class URLQuery { - constructor() { - this._rawQuery = {}; - } - /** - * Get whether or not there any query parameters in this URLQuery. - */ - any() { - return Object.keys(this._rawQuery).length > 0; - } - /** - * Get the keys of the query string. - */ - keys() { - return Object.keys(this._rawQuery); - } - /** - * Set a query parameter with the provided name and value. If the parameterValue is undefined or - * empty, then this will attempt to remove an existing query parameter with the provided - * parameterName. - */ - set(parameterName, parameterValue) { - const caseParameterValue = parameterValue; - if (parameterName) { - if (caseParameterValue !== undefined && caseParameterValue !== null) { - const newValue = Array.isArray(caseParameterValue) - ? caseParameterValue - : caseParameterValue.toString(); - this._rawQuery[parameterName] = newValue; - } - else { - delete this._rawQuery[parameterName]; - } - } - } - /** - * Get the value of the query parameter with the provided name. If no parameter exists with the - * provided parameter name, then undefined will be returned. - */ - get(parameterName) { - return parameterName ? this._rawQuery[parameterName] : undefined; - } - /** - * Get the string representation of this query. The return value will not start with a "?". - */ - toString() { - let result = ""; - for (const parameterName in this._rawQuery) { - if (result) { - result += "&"; - } - const parameterValue = this._rawQuery[parameterName]; - if (Array.isArray(parameterValue)) { - const parameterStrings = []; - for (const parameterValueElement of parameterValue) { - parameterStrings.push(`${parameterName}=${parameterValueElement}`); +// Licensed under the MIT license. +function streamAsyncIterator() { + return tslib.__asyncGenerator(this, arguments, function* streamAsyncIterator_1() { + const reader = this.getReader(); + try { + while (true) { + const { done, value } = yield tslib.__await(reader.read()); + if (done) { + return yield tslib.__await(void 0); } - result += parameterStrings.join("&"); - } - else { - result += `${parameterName}=${parameterValue}`; + yield yield tslib.__await(value); } } - return result; - } - /** - * Parse a URLQuery from the provided text. - */ - static parse(text) { - const result = new URLQuery(); - if (text) { - if (text.startsWith("?")) { - text = text.substring(1); - } - let currentState = "ParameterName"; - let parameterName = ""; - let parameterValue = ""; - for (let i = 0; i < text.length; ++i) { - const currentCharacter = text[i]; - switch (currentState) { - case "ParameterName": - switch (currentCharacter) { - case "=": - currentState = "ParameterValue"; - break; - case "&": - parameterName = ""; - parameterValue = ""; - break; - default: - parameterName += currentCharacter; - break; - } - break; - case "ParameterValue": - switch (currentCharacter) { - case "&": - result.set(parameterName, parameterValue); - parameterName = ""; - parameterValue = ""; - currentState = "ParameterName"; - break; - default: - parameterValue += currentCharacter; - break; - } - break; - default: - throw new Error("Unrecognized URLQuery parse state: " + currentState); - } - } - if (currentState === "ParameterValue") { - result.set(parameterName, parameterValue); - } + finally { + reader.releaseLock(); } - return result; - } + }); } -/** - * A class that handles creating, modifying, and parsing URLs. - */ -class URLBuilder { - /** - * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL - * (such as a host, port, path, or query), those parts will be added to this URL as well. - */ - setScheme(scheme) { - if (!scheme) { - this._scheme = undefined; - } - else { - this.set(scheme, "SCHEME"); - } - } - /** - * Get the scheme that has been set in this URL. - */ - getScheme() { - return this._scheme; - } - /** - * Set the host for this URL. If the provided host contains other parts of a URL (such as a - * port, path, or query), those parts will be added to this URL as well. - */ - setHost(host) { - if (!host) { - this._host = undefined; - } - else { - this.set(host, "SCHEME_OR_HOST"); - } - } - /** - * Get the host that has been set in this URL. - */ - getHost() { - return this._host; - } - /** - * Set the port for this URL. If the provided port contains other parts of a URL (such as a - * path or query), those parts will be added to this URL as well. - */ - setPort(port) { - if (port === undefined || port === null || port === "") { - this._port = undefined; - } - else { - this.set(port.toString(), "PORT"); - } - } - /** - * Get the port that has been set in this URL. - */ - getPort() { - return this._port; - } - /** - * Set the path for this URL. If the provided path contains a query, then it will be added to - * this URL as well. - */ - setPath(path) { - if (!path) { - this._path = undefined; - } - else { - const schemeIndex = path.indexOf("://"); - if (schemeIndex !== -1) { - const schemeStart = path.lastIndexOf("/", schemeIndex); - // Make sure to only grab the URL part of the path before setting the state back to SCHEME - // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" - this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); - } - else { - this.set(path, "PATH"); - } - } - } - /** - * Append the provided path to this URL's existing path. If the provided path contains a query, - * then it will be added to this URL as well. - */ - appendPath(path) { - if (path) { - let currentPath = this.getPath(); - if (currentPath) { - if (!currentPath.endsWith("/")) { - currentPath += "/"; - } - if (path.startsWith("/")) { - path = path.substring(1); - } - path = currentPath + path; - } - this.set(path, "PATH"); - } - } - /** - * Get the path that has been set in this URL. - */ - getPath() { - return this._path; - } - /** - * Set the query in this URL. - */ - setQuery(query) { - if (!query) { - this._query = undefined; - } - else { - this._query = URLQuery.parse(query); - } - } - /** - * Set a query parameter with the provided name and value in this URL's query. If the provided - * query parameter value is undefined or empty, then the query parameter will be removed if it - * existed. - */ - setQueryParameter(queryParameterName, queryParameterValue) { - if (queryParameterName) { - if (!this._query) { - this._query = new URLQuery(); - } - this._query.set(queryParameterName, queryParameterValue); - } - } - /** - * Get the value of the query parameter with the provided query parameter name. If no query - * parameter exists with the provided name, then undefined will be returned. - */ - getQueryParameterValue(queryParameterName) { - return this._query ? this._query.get(queryParameterName) : undefined; - } - /** - * Get the query in this URL. - */ - getQuery() { - return this._query ? this._query.toString() : undefined; - } - /** - * Set the parts of this URL by parsing the provided text using the provided startState. - */ - set(text, startState) { - const tokenizer = new URLTokenizer(text, startState); - while (tokenizer.next()) { - const token = tokenizer.current(); - let tokenPath; - if (token) { - switch (token.type) { - case "SCHEME": - this._scheme = token.text || undefined; - break; - case "HOST": - this._host = token.text || undefined; - break; - case "PORT": - this._port = token.text || undefined; - break; - case "PATH": - tokenPath = token.text || undefined; - if (!this._path || this._path === "/" || tokenPath !== "/") { - this._path = tokenPath; - } - break; - case "QUERY": - this._query = URLQuery.parse(token.text); - break; - default: - throw new Error(`Unrecognized URLTokenType: ${token.type}`); - } - } - } - } - /** - * Serializes the URL as a string. - * @returns the URL as a string. - */ - toString() { - let result = ""; - if (this._scheme) { - result += `${this._scheme}://`; - } - if (this._host) { - result += this._host; - } - if (this._port) { - result += `:${this._port}`; - } - if (this._path) { - if (!this._path.startsWith("/")) { - result += "/"; - } - result += this._path; - } - if (this._query && this._query.any()) { - result += `?${this._query.toString()}`; - } - return result; - } - /** - * If the provided searchValue is found in this URLBuilder, then replace it with the provided - * replaceValue. - */ - replaceAll(searchValue, replaceValue) { - if (searchValue) { - this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); - this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); - this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); - this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); - this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); - } +function makeAsyncIterable(webStream) { + if (!webStream[Symbol.asyncIterator]) { + webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); } - /** - * Parses a given string URL into a new {@link URLBuilder}. - */ - static parse(text) { - const result = new URLBuilder(); - result.set(text, "SCHEME_OR_HOST"); - return result; + if (!webStream.values) { + webStream.values = streamAsyncIterator.bind(webStream); } } -class URLToken { - constructor(text, type) { - this.text = text; - this.type = type; - } - static scheme(text) { - return new URLToken(text, "SCHEME"); - } - static host(text) { - return new URLToken(text, "HOST"); - } - static port(text) { - return new URLToken(text, "PORT"); - } - static path(text) { - return new URLToken(text, "PATH"); - } - static query(text) { - return new URLToken(text, "QUERY"); - } +function nodeStreamFromWebStream(webStream) { + makeAsyncIterable(webStream); + return stream.Readable.fromWeb(webStream); } -/** - * Get whether or not the provided character (single character string) is an alphanumeric (letter or - * digit) character. - */ -function isAlphaNumericCharacter(character) { - const characterCode = character.charCodeAt(0); - return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || - (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || - (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +function toWebStream(stream$1) { + return isWebReadableStream(stream$1) + ? stream$1 + : stream.Readable.toWeb(stream.Readable.from(stream$1)); } -/** - * A class that tokenizes URL strings. - */ -class URLTokenizer { - constructor(_text, state) { - this._text = _text; - this._textLength = _text ? _text.length : 0; - this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; - this._currentIndex = 0; +function toStream(source) { + if (source instanceof Uint8Array) { + return stream.Readable.from(Buffer.from(source)); } - /** - * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer - * hasn't started or has finished tokenizing. - */ - current() { - return this._currentToken; + else if (isBlob(source)) { + return nodeStreamFromWebStream(source.stream()); } - /** - * Advance to the next URLToken and return whether or not a URLToken was found. - */ - next() { - if (!hasCurrentCharacter(this)) { - this._currentToken = undefined; - } - else { - switch (this._currentState) { - case "SCHEME": - nextScheme(this); - break; - case "SCHEME_OR_HOST": - nextSchemeOrHost(this); - break; - case "HOST": - nextHost(this); - break; - case "PORT": - nextPort(this); - break; - case "PATH": - nextPath(this); - break; - case "QUERY": - nextQuery(this); - break; - default: - throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); - } - } - return !!this._currentToken; + else if (isNodeReadableStream(source)) { + return source; } -} -/** - * Read the remaining characters from this Tokenizer's character stream. - */ -function readRemaining(tokenizer) { - let result = ""; - if (tokenizer._currentIndex < tokenizer._textLength) { - result = tokenizer._text.substring(tokenizer._currentIndex); - tokenizer._currentIndex = tokenizer._textLength; + else { + return nodeStreamFromWebStream(source); } - return result; } -/** - * Whether or not this URLTokenizer has a current character. - */ -function hasCurrentCharacter(tokenizer) { - return tokenizer._currentIndex < tokenizer._textLength; -} -/** - * Get the character in the text string at the current index. - */ -function getCurrentCharacter(tokenizer) { - return tokenizer._text[tokenizer._currentIndex]; -} -/** - * Advance to the character in text that is "step" characters ahead. If no step value is provided, - * then step will default to 1. - */ -function nextCharacter(tokenizer, step) { - if (hasCurrentCharacter(tokenizer)) { - if (!step) { - step = 1; - } - tokenizer._currentIndex += step; +function concatenateStreams(sources) { + if (sources.some(isWebReadableStream)) { + throw new Error("Was not expecting a Web stream here"); } + return stream.Readable.from((function () { + return tslib.__asyncGenerator(this, arguments, function* () { + var _a, e_1, _b, _c; + for (const stream of sources) { + try { + for (var _d = true, stream_1 = (e_1 = void 0, tslib.__asyncValues(stream)), stream_1_1; stream_1_1 = yield tslib.__await(stream_1.next()), _a = stream_1_1.done, !_a; _d = true) { + _c = stream_1_1.value; + _d = false; + const chunk = _c; + yield yield tslib.__await(chunk); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = stream_1.return)) yield tslib.__await(_b.call(stream_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); + })()); } -/** - * Starting with the current character, peek "charactersToPeek" number of characters ahead in this - * Tokenizer's stream of characters. - */ -function peekCharacters(tokenizer, charactersToPeek) { - let endIndex = tokenizer._currentIndex + charactersToPeek; - if (tokenizer._textLength < endIndex) { - endIndex = tokenizer._textLength; - } - return tokenizer._text.substring(tokenizer._currentIndex, endIndex); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function generateBoundary() { + return `----AzSDKFormBoundary${coreUtil.randomUUID()}`; } -/** - * Read characters from this Tokenizer until the end of the stream or until the provided condition - * is false when provided the current character. - */ -function readWhile(tokenizer, condition) { +function encodeHeaders(headers) { let result = ""; - while (hasCurrentCharacter(tokenizer)) { - const currentCharacter = getCurrentCharacter(tokenizer); - if (!condition(currentCharacter)) { - break; - } - else { - result += currentCharacter; - nextCharacter(tokenizer); - } + for (const [key, value] of headers) { + result += `${key}: ${value}\r\n`; } return result; } -/** - * Read characters from this Tokenizer until a non-alphanumeric character or the end of the - * character stream is reached. - */ -function readWhileLetterOrDigit(tokenizer) { - return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); -} -/** - * Read characters from this Tokenizer until one of the provided terminating characters is read or - * the end of the character stream is reached. - */ -function readUntilCharacter(tokenizer, ...terminatingCharacters) { - return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); -} -function nextScheme(tokenizer) { - const scheme = readWhileLetterOrDigit(tokenizer); - tokenizer._currentToken = URLToken.scheme(scheme); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; +function getLength(source) { + if (source instanceof Uint8Array) { + return source.byteLength; + } + else if (isBlob(source)) { + // if was created using createFile then -1 means we have an unknown size + return source.size === -1 ? undefined : source.size; } else { - tokenizer._currentState = "HOST"; + return undefined; } } -function nextSchemeOrHost(tokenizer) { - const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentToken = URLToken.host(schemeOrHost); - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === ":") { - if (peekCharacters(tokenizer, 3) === "://") { - tokenizer._currentToken = URLToken.scheme(schemeOrHost); - tokenizer._currentState = "HOST"; - } - else { - tokenizer._currentToken = URLToken.host(schemeOrHost); - tokenizer._currentState = "PORT"; - } - } - else { - tokenizer._currentToken = URLToken.host(schemeOrHost); - if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; +function getTotalLength(sources) { + let total = 0; + for (const source of sources) { + const partLength = getLength(source); + if (partLength === undefined) { + return undefined; } else { - tokenizer._currentState = "QUERY"; + total += partLength; } } + return total; } -function nextHost(tokenizer) { - if (peekCharacters(tokenizer, 3) === "://") { - nextCharacter(tokenizer, 3); - } - const host = readUntilCharacter(tokenizer, ":", "/", "?"); - tokenizer._currentToken = URLToken.host(host); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === ":") { - tokenizer._currentState = "PORT"; - } - else if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextPort(tokenizer) { - if (getCurrentCharacter(tokenizer) === ":") { - nextCharacter(tokenizer); - } - const port = readUntilCharacter(tokenizer, "/", "?"); - tokenizer._currentToken = URLToken.port(port); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextPath(tokenizer) { - const path = readUntilCharacter(tokenizer, "?"); - tokenizer._currentToken = URLToken.path(path); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextQuery(tokenizer) { - if (getCurrentCharacter(tokenizer) === "?") { - nextCharacter(tokenizer); - } - const query = readRemaining(tokenizer); - tokenizer._currentToken = URLToken.query(query); - tokenizer._currentState = "DONE"; -} - -// Copyright (c) Microsoft Corporation. -function createProxyAgent(requestUrl, proxySettings, headers) { - const host = URLBuilder.parse(proxySettings.host).getHost(); - if (!host) { - throw new Error("Expecting a non-empty host in proxy settings."); - } - if (!isValidPort(proxySettings.port)) { - throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); - } - const tunnelOptions = { - proxy: { - host: host, - port: proxySettings.port, - headers: (headers && headers.rawHeaders()) || {}, - }, - }; - if (proxySettings.username && proxySettings.password) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; - } - else if (proxySettings.username) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; +function buildRequestBody(request, parts, boundary) { + const sources = [ + coreUtil.stringToUint8Array(`--${boundary}`, "utf-8"), + ...parts.flatMap((part) => [ + coreUtil.stringToUint8Array("\r\n", "utf-8"), + coreUtil.stringToUint8Array(encodeHeaders(part.headers), "utf-8"), + coreUtil.stringToUint8Array("\r\n", "utf-8"), + part.body, + coreUtil.stringToUint8Array(`\r\n--${boundary}`, "utf-8"), + ]), + coreUtil.stringToUint8Array("--\r\n\r\n", "utf-8"), + ]; + const contentLength = getTotalLength(sources); + if (contentLength) { + request.headers.set("Content-Length", contentLength); } - const isRequestHttps = isUrlHttps(requestUrl); - const isProxyHttps = isUrlHttps(proxySettings.host); - const proxyAgent = { - isHttps: isRequestHttps, - agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), - }; - return proxyAgent; -} -function isUrlHttps(url) { - const urlScheme = URLBuilder.parse(url).getScheme() || ""; - return urlScheme.toLowerCase() === "https"; + request.body = (() => concatenateStreams(sources.map((source) => (typeof source === "function" ? source() : source)).map(toStream))); } -function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { - if (isRequestHttps && isProxyHttps) { - return tunnel__namespace.httpsOverHttps(tunnelOptions); - } - else if (isRequestHttps && !isProxyHttps) { - return tunnel__namespace.httpsOverHttp(tunnelOptions); - } - else if (!isRequestHttps && isProxyHttps) { - return tunnel__namespace.httpOverHttps(tunnelOptions); +/** + * Name of multipart policy + */ +const multipartPolicyName = "multipartPolicy"; +const maxBoundaryLength = 70; +const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); +function assertValidBoundary(boundary) { + if (boundary.length > maxBoundaryLength) { + throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); } - else { - return tunnel__namespace.httpOverHttp(tunnelOptions); + if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { + throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); } } -function isValidPort(port) { - // any port in 0-65535 range is valid (RFC 793) even though almost all implementations - // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports - return 0 <= port && port <= 65535; -} - -// Copyright (c) Microsoft Corporation. -const RedactedString = "REDACTED"; -const defaultAllowedHeaderNames = [ - "x-ms-client-request-id", - "x-ms-return-client-request-id", - "x-ms-useragent", - "x-ms-correlation-request-id", - "x-ms-request-id", - "client-request-id", - "ms-cv", - "return-client-request-id", - "traceparent", - "Access-Control-Allow-Credentials", - "Access-Control-Allow-Headers", - "Access-Control-Allow-Methods", - "Access-Control-Allow-Origin", - "Access-Control-Expose-Headers", - "Access-Control-Max-Age", - "Access-Control-Request-Headers", - "Access-Control-Request-Method", - "Origin", - "Accept", - "Accept-Encoding", - "Cache-Control", - "Connection", - "Content-Length", - "Content-Type", - "Date", - "ETag", - "Expires", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "Last-Modified", - "Pragma", - "Request-Id", - "Retry-After", - "Server", - "Transfer-Encoding", - "User-Agent", - "WWW-Authenticate", -]; -const defaultAllowedQueryParameters = ["api-version"]; -class Sanitizer { - constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { - allowedHeaderNames = Array.isArray(allowedHeaderNames) - ? defaultAllowedHeaderNames.concat(allowedHeaderNames) - : defaultAllowedHeaderNames; - allowedQueryParameters = Array.isArray(allowedQueryParameters) - ? defaultAllowedQueryParameters.concat(allowedQueryParameters) - : defaultAllowedQueryParameters; - this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); - this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); - } - sanitize(obj) { - const seen = new Set(); - return JSON.stringify(obj, (key, value) => { - // Ensure Errors include their interesting non-enumerable members - if (value instanceof Error) { - return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); - } - if (key === "_headersMap") { - return this.sanitizeHeaders(value); - } - else if (key === "url") { - return this.sanitizeUrl(value); - } - else if (key === "query") { - return this.sanitizeQuery(value); - } - else if (key === "body") { - // Don't log the request body - return undefined; +/** + * Pipeline policy for multipart requests + */ +function multipartPolicy() { + return { + name: multipartPolicyName, + sendRequest(request, next) { + var _a; + if (!request.multipartBody) { + return next(request); } - else if (key === "response") { - // Don't log response again - return undefined; + if (request.body) { + throw new Error("multipartBody and regular body cannot be set at the same time"); } - else if (key === "operationSpec") { - // When using sendOperationRequest, the request carries a massive - // field with the autorest spec. No need to log it. - return undefined; + let boundary = request.multipartBody.boundary; + const contentTypeHeader = (_a = request.headers.get("Content-Type")) !== null && _a !== void 0 ? _a : "multipart/mixed"; + const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); + if (!parsedHeader) { + throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); } - else if (Array.isArray(value) || isObject(value)) { - if (seen.has(value)) { - return "[Circular]"; - } - seen.add(value); + const [, contentType, parsedBoundary] = parsedHeader; + if (parsedBoundary && boundary && parsedBoundary !== boundary) { + throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); } - return value; - }, 2); - } - sanitizeHeaders(value) { - return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); - } - sanitizeQuery(value) { - return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); - } - sanitizeObject(value, allowedKeys, accessor) { - if (typeof value !== "object" || value === null) { - return value; - } - const sanitized = {}; - for (const k of Object.keys(value)) { - if (allowedKeys.has(k.toLowerCase())) { - sanitized[k] = accessor(value, k); + boundary !== null && boundary !== void 0 ? boundary : (boundary = parsedBoundary); + if (boundary) { + assertValidBoundary(boundary); } else { - sanitized[k] = RedactedString; - } - } - return sanitized; - } - sanitizeUrl(value) { - if (typeof value !== "string" || value === null) { - return value; - } - const urlBuilder = URLBuilder.parse(value); - const queryString = urlBuilder.getQuery(); - if (!queryString) { - return value; - } - const query = URLQuery.parse(queryString); - for (const k of query.keys()) { - if (!this.allowedQueryParameters.has(k.toLowerCase())) { - query.set(k, RedactedString); + boundary = generateBoundary(); } - } - urlBuilder.setQuery(query.toString()); - return urlBuilder.toString(); - } + request.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); + buildRequestBody(request, request.multipartBody.parts, boundary); + request.multipartBody = undefined; + return next(request); + }, + }; } // Copyright (c) Microsoft Corporation. -const custom = util.inspect.custom; - -// Copyright (c) Microsoft Corporation. -const errorSanitizer = new Sanitizer(); +// Licensed under the MIT license. /** - * An error resulting from an HTTP request to a service endpoint. + * Create a new pipeline with a default set of customizable policies. + * @param options - Options to configure a custom pipeline. */ -class RestError extends Error { - constructor(message, code, statusCode, request, response) { - super(message); - this.name = "RestError"; - this.code = code; - this.statusCode = statusCode; - this.request = request; - this.response = response; - Object.setPrototypeOf(this, RestError.prototype); +function createPipelineFromOptions(options) { + var _a; + const pipeline = createEmptyPipeline(); + if (coreUtil.isNode) { + if (options.tlsOptions) { + pipeline.addPolicy(tlsPolicy(options.tlsOptions)); + } + pipeline.addPolicy(proxyPolicy(options.proxyOptions)); + pipeline.addPolicy(decompressResponsePolicy()); } - /** - * Logging method for util.inspect in Node - */ - [custom]() { - return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + pipeline.addPolicy(formDataPolicy()); + pipeline.addPolicy(userAgentPolicy(options.userAgentOptions)); + pipeline.addPolicy(setClientRequestIdPolicy((_a = options.telemetryOptions) === null || _a === void 0 ? void 0 : _a.clientRequestIdHeaderName)); + // The multipart policy is added after policies with no phase, so that + // policies can be added between it and formDataPolicy to modify + // properties (e.g., making the boundary constant in recorded tests). + pipeline.addPolicy(multipartPolicy(), { afterPhase: "Deserialize" }); + pipeline.addPolicy(defaultRetryPolicy(options.retryOptions), { phase: "Retry" }); + pipeline.addPolicy(tracingPolicy(options.userAgentOptions), { afterPhase: "Retry" }); + if (coreUtil.isNode) { + // Both XHR and Fetch expect to handle redirects automatically, + // so only include this policy when we're in Node. + pipeline.addPolicy(redirectPolicy(options.redirectOptions), { afterPhase: "Retry" }); } + pipeline.addPolicy(logPolicy(options.loggingOptions), { afterPhase: "Sign" }); + return pipeline; } -/** - * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) - */ -RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; -/** - * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. - */ -RestError.PARSE_ERROR = "PARSE_ERROR"; // Copyright (c) Microsoft Corporation. -const logger = logger$1.createClientLogger("core-http"); - -// Copyright (c) Microsoft Corporation. -function getCachedAgent(isHttps, agentCache) { - return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; -} -class ReportTransform extends stream.Transform { - constructor(progressCallback) { - super(); - this.progressCallback = progressCallback; - this.loadedBytes = 0; - } - _transform(chunk, _encoding, callback) { - this.push(chunk); - this.loadedBytes += chunk.length; - this.progressCallback({ loadedBytes: this.loadedBytes }); - callback(undefined); - } -} +// Licensed under the MIT license. +const DEFAULT_TLS_SETTINGS = {}; function isReadableStream(body) { return body && typeof body.pipe === "function"; } -function isStreamComplete(stream, aborter) { +function isStreamComplete(stream) { return new Promise((resolve) => { - stream.once("close", () => { - aborter === null || aborter === void 0 ? void 0 : aborter.abort(); - resolve(); - }); - stream.once("end", resolve); - stream.once("error", resolve); + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); }); } -/** - * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} - */ -function parseHeaders(headers) { - const httpHeaders = new HttpHeaders(); - headers.forEach((value, key) => { - httpHeaders.set(key, value); - }); - return httpHeaders; +function isArrayBuffer(body) { + return body && typeof body.byteLength === "number"; +} +class ReportTransform extends stream.Transform { + // eslint-disable-next-line @typescript-eslint/ban-types + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + try { + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(); + } + catch (e) { + callback(e); + } + } + constructor(progressCallback) { + super(); + this.loadedBytes = 0; + this.progressCallback = progressCallback; + } } /** - * An HTTP client that uses `node-fetch`. + * A HttpClient implementation that uses Node's "https" module to send HTTPS requests. + * @internal */ -class NodeFetchHttpClient { +class NodeHttpClient { constructor() { - // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent - this.proxyAgentMap = new Map(); - this.keepAliveAgents = {}; + this.cachedHttpsAgents = new WeakMap(); } /** - * Provides minimum viable error handling and the logic that executes the abstract methods. - * @param httpRequest - Object representing the outgoing HTTP request. - * @returns An object representing the incoming HTTP response. + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. */ - async sendRequest(httpRequest) { - var _a; - if (!httpRequest && typeof httpRequest !== "object") { - throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); - } - const abortController$1 = new abortController.AbortController(); + async sendRequest(request) { + var _a, _b, _c; + const abortController$1 = new AbortController(); let abortListener; - if (httpRequest.abortSignal) { - if (httpRequest.abortSignal.aborted) { + if (request.abortSignal) { + if (request.abortSignal.aborted) { throw new abortController.AbortError("The operation was aborted."); } abortListener = (event) => { @@ -48256,136 +47690,93 @@ class NodeFetchHttpClient { abortController$1.abort(); } }; - httpRequest.abortSignal.addEventListener("abort", abortListener); + request.abortSignal.addEventListener("abort", abortListener); } - if (httpRequest.timeout) { + if (request.timeout > 0) { setTimeout(() => { abortController$1.abort(); - }, httpRequest.timeout); + }, request.timeout); } - if (httpRequest.formData) { - const formData = httpRequest.formData; - const requestForm = new FormData__default["default"](); - const appendFormValue = (key, value) => { - // value function probably returns a stream so we can provide a fresh stream on each retry - if (typeof value === "function") { - value = value(); - } - if (value && - Object.prototype.hasOwnProperty.call(value, "value") && - Object.prototype.hasOwnProperty.call(value, "options")) { - requestForm.append(key, value.value, value.options); - } - else { - requestForm.append(key, value); - } - }; - for (const formKey of Object.keys(formData)) { - const formValue = formData[formKey]; - if (Array.isArray(formValue)) { - for (let j = 0; j < formValue.length; j++) { - appendFormValue(formKey, formValue[j]); - } - } - else { - appendFormValue(formKey, formValue); - } + const acceptEncoding = request.headers.get("Accept-Encoding"); + const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate")); + let body = typeof request.body === "function" ? request.body() : request.body; + if (body && !request.headers.has("Content-Length")) { + const bodyLength = getBodyLength(body); + if (bodyLength !== null) { + request.headers.set("Content-Length", bodyLength); } - httpRequest.body = requestForm; - httpRequest.formData = undefined; - const contentType = httpRequest.headers.get("Content-Type"); - if (contentType && contentType.indexOf("multipart/form-data") !== -1) { - if (typeof requestForm.getBoundary === "function") { - httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); + } + let responseStream; + try { + if (body && request.onUploadProgress) { + const onUploadProgress = request.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + uploadReportStream.on("error", (e) => { + logger.error("Error in upload progress", e); + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); } else { - // browser will automatically apply a suitable content-type header - httpRequest.headers.remove("Content-Type"); + uploadReportStream.end(body); } + body = uploadReportStream; } - } - let body = httpRequest.body - ? typeof httpRequest.body === "function" - ? httpRequest.body() - : httpRequest.body - : undefined; - if (httpRequest.onUploadProgress && httpRequest.body) { - const onUploadProgress = httpRequest.onUploadProgress; - const uploadReportStream = new ReportTransform(onUploadProgress); - if (isReadableStream(body)) { - body.pipe(uploadReportStream); - } - else { - uploadReportStream.end(body); - } - body = uploadReportStream; - } - const platformSpecificRequestInit = await this.prepareRequest(httpRequest); - const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, - // the types for RequestInit are from the browser, which expects AbortSignal to - // have `reason` and `throwIfAborted`, but these don't exist on our polyfill - // for Node. - signal: abortController$1.signal, redirect: "manual" }, platformSpecificRequestInit); - let operationResponse; - try { - const response = await this.fetch(httpRequest.url, requestInit); - const headers = parseHeaders(response.headers); - const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || - httpRequest.streamResponseBody; - operationResponse = { - headers: headers, - request: httpRequest, - status: response.status, - readableStreamBody: streaming - ? response.body - : undefined, - bodyAsText: !streaming ? await response.text() : undefined, + const res = await this.makeRequest(request, abortController$1, body); + const headers = getResponseHeaders(res); + const status = (_a = res.statusCode) !== null && _a !== void 0 ? _a : 0; + const response = { + status, + headers, + request, }; - const onDownloadProgress = httpRequest.onDownloadProgress; + // Responses to HEAD must not have a body. + // If they do return a body, that body must be ignored. + if (request.method === "HEAD") { + // call resume() and not destroy() to avoid closing the socket + // and losing keep alive + res.resume(); + return response; + } + responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; + const onDownloadProgress = request.onDownloadProgress; if (onDownloadProgress) { - const responseBody = response.body || undefined; - if (isReadableStream(responseBody)) { - const downloadReportStream = new ReportTransform(onDownloadProgress); - responseBody.pipe(downloadReportStream); - operationResponse.readableStreamBody = downloadReportStream; - } - else { - const length = parseInt(headers.get("Content-Length")) || undefined; - if (length) { - // Calling callback for non-stream response for consistency with browser - onDownloadProgress({ loadedBytes: length }); - } - } + const downloadReportStream = new ReportTransform(onDownloadProgress); + downloadReportStream.on("error", (e) => { + logger.error("Error in download progress", e); + }); + responseStream.pipe(downloadReportStream); + responseStream = downloadReportStream; } - await this.processRequest(operationResponse); - return operationResponse; - } - catch (error) { - const fetchError = error; - if (fetchError.code === "ENOTFOUND") { - throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(Number.POSITIVE_INFINITY)) || + ((_c = request.streamResponseStatusCodes) === null || _c === void 0 ? void 0 : _c.has(response.status))) { + response.readableStreamBody = responseStream; } - else if (fetchError.type === "aborted") { - throw new abortController.AbortError("The operation was aborted."); + else { + response.bodyAsText = await streamToText(responseStream); } - throw fetchError; + return response; } finally { // clean up event listener - if (httpRequest.abortSignal && abortListener) { + if (request.abortSignal && abortListener) { let uploadStreamDone = Promise.resolve(); if (isReadableStream(body)) { uploadStreamDone = isStreamComplete(body); } let downloadStreamDone = Promise.resolve(); - if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { - downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1); + if (isReadableStream(responseStream)) { + downloadStreamDone = isStreamComplete(responseStream); } Promise.all([uploadStreamDone, downloadStreamDone]) .then(() => { var _a; - (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); - return; + // eslint-disable-next-line promise/always-return + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } }) .catch((e) => { logger.warning("Error when cleaning up abortListener on httpRequest", e); @@ -48393,1006 +47784,290 @@ class NodeFetchHttpClient { } } } - getOrCreateAgent(httpRequest) { + makeRequest(request, abortController$1, body) { var _a; - const isHttps = isUrlHttps(httpRequest.url); - // At the moment, proxy settings and keepAlive are mutually - // exclusive because the 'tunnel' library currently lacks the - // ability to create a proxy with keepAlive turned on. - if (httpRequest.proxySettings) { - const { host, port, username, password } = httpRequest.proxySettings; - const key = `${host}:${port}:${username}:${password}`; - const proxyAgents = (_a = this.proxyAgentMap.get(key)) !== null && _a !== void 0 ? _a : {}; - let agent = getCachedAgent(isHttps, proxyAgents); - if (agent) { - return agent; - } - const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); - agent = tunnel.agent; - if (tunnel.isHttps) { - proxyAgents.httpsAgent = tunnel.agent; - } - else { - proxyAgents.httpAgent = tunnel.agent; - } - this.proxyAgentMap.set(key, proxyAgents); - return agent; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); } - else if (httpRequest.keepAlive) { - let agent = getCachedAgent(isHttps, this.keepAliveAgents); - if (agent) { - return agent; + const agent = (_a = request.agent) !== null && _a !== void 0 ? _a : this.getOrCreateAgent(request, isInsecure); + const options = { + agent, + hostname: url.hostname, + path: `${url.pathname}${url.search}`, + port: url.port, + method: request.method, + headers: request.headers.toJSON({ preserveCase: true }), + }; + return new Promise((resolve, reject) => { + const req = isInsecure ? http__namespace.request(options, resolve) : https__namespace.request(options, resolve); + req.once("error", (err) => { + var _a; + reject(new RestError(err.message, { code: (_a = err.code) !== null && _a !== void 0 ? _a : RestError.REQUEST_SEND_ERROR, request })); + }); + abortController$1.signal.addEventListener("abort", () => { + const abortError = new abortController.AbortError("The operation was aborted."); + req.destroy(abortError); + reject(abortError); + }); + if (body && isReadableStream(body)) { + body.pipe(req); } - const agentOptions = { - keepAlive: httpRequest.keepAlive, - }; - if (isHttps) { - agent = this.keepAliveAgents.httpsAgent = new https__namespace.Agent(agentOptions); + else if (body) { + if (typeof body === "string" || Buffer.isBuffer(body)) { + req.end(body); + } + else if (isArrayBuffer(body)) { + req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); + } + else { + logger.error("Unrecognized body type", body); + reject(new RestError("Unrecognized body type")); + } } else { - agent = this.keepAliveAgents.httpAgent = new http__namespace.Agent(agentOptions); + // streams don't like "undefined" being passed as data + req.end(); } - return agent; - } - else { - return isHttps ? https__namespace.globalAgent : http__namespace.globalAgent; - } - } - /** - * Uses `node-fetch` to perform the request. - */ - // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs - async fetch(input, init) { - return node_fetch__default["default"](input, init); - } - /** - * Prepares a request based on the provided web resource. - */ - async prepareRequest(httpRequest) { - const requestInit = {}; - // Set the http(s) agent - requestInit.agent = this.getOrCreateAgent(httpRequest); - requestInit.compress = httpRequest.decompressResponse; - return requestInit; - } - /** - * Process an HTTP response. - */ - async processRequest(_operationResponse) { - /* no_op */ - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The different levels of logs that can be used with the HttpPipelineLogger. - */ -exports.HttpPipelineLogLevel = void 0; -(function (HttpPipelineLogLevel) { - /** - * A log level that indicates that no logs will be logged. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; - /** - * An error log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; - /** - * A warning log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; - /** - * An information log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; -})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); - -// Copyright (c) Microsoft Corporation. -/** - * Converts an OperationOptions to a RequestOptionsBase - * - * @param opts - OperationOptions object to convert to RequestOptionsBase - */ -function operationOptionsToRequestOptionsBase(opts) { - const { requestOptions, tracingOptions } = opts, additionalOptions = tslib.__rest(opts, ["requestOptions", "tracingOptions"]); - let result = additionalOptions; - if (requestOptions) { - result = Object.assign(Object.assign({}, result), requestOptions); - } - if (tracingOptions) { - result.tracingContext = tracingOptions.tracingContext; - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - result.spanOptions = tracingOptions === null || tracingOptions === void 0 ? void 0 : tracingOptions.spanOptions; - } - return result; -} - -// Copyright (c) Microsoft Corporation. -/** - * The base class from which all request policies derive. - */ -class BaseRequestPolicy { - /** - * The main method to implement that manipulates a request/response. - */ - constructor( - /** - * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. - */ - _nextPolicy, - /** - * The options that can be passed to a given request policy. - */ - _options) { - this._nextPolicy = _nextPolicy; - this._options = _options; - } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return this._options.shouldLog(logLevel); - } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meat the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - this._options.log(logLevel, message); - } -} -/** - * Optional properties that can be used when creating a RequestPolicy. - */ -class RequestPolicyOptions { - constructor(_logger) { - this._logger = _logger; - } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return (!!this._logger && - logLevel !== exports.HttpPipelineLogLevel.OFF && - logLevel <= this._logger.minimumLogLevel); - } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meet the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - if (this._logger && this.shouldLog(logLevel)) { - this._logger.log(logLevel, message); - } + }); } -} - -// Copyright (c) Microsoft Corporation. -// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed -// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 -// By creating a new copy of the settings each time we instantiate the parser, -// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. -const xml2jsDefaultOptionsV2 = { - explicitCharkey: false, - trim: false, - normalize: false, - normalizeTags: false, - attrkey: XML_ATTRKEY, - explicitArray: true, - ignoreAttrs: false, - mergeAttrs: false, - explicitRoot: true, - validator: undefined, - xmlns: false, - explicitChildren: false, - preserveChildrenOrder: false, - childkey: "$$", - charsAsChildren: false, - includeWhiteChars: false, - async: false, - strict: true, - attrNameProcessors: undefined, - attrValueProcessors: undefined, - tagNameProcessors: undefined, - valueProcessors: undefined, - rootName: "root", - xmldec: { - version: "1.0", - encoding: "UTF-8", - standalone: true, - }, - doctype: undefined, - renderOpts: { - pretty: true, - indent: " ", - newline: "\n", - }, - headless: false, - chunkSize: 10000, - emptyTag: "", - cdata: false, -}; -// The xml2js settings for general XML parsing operations. -const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); -xml2jsParserSettings.explicitArray = false; -// The xml2js settings for general XML building operations. -const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); -xml2jsBuilderSettings.explicitArray = false; -xml2jsBuilderSettings.renderOpts = { - pretty: false, -}; -/** - * Converts given JSON object to XML string - * @param obj - JSON object to be converted into XML string - * @param opts - Options that govern the parsing of given JSON object - */ -function stringifyXML(obj, opts = {}) { - var _a; - xml2jsBuilderSettings.rootName = opts.rootName; - xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const builder = new xml2js__namespace.Builder(xml2jsBuilderSettings); - return builder.buildObject(obj); -} -/** - * Converts given XML string into JSON - * @param str - String containing the XML content to be parsed into JSON - * @param opts - Options that govern the parsing of given xml string - */ -function parseXML(str, opts = {}) { - var _a; - xml2jsParserSettings.explicitRoot = !!opts.includeRoot; - xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const xmlParser = new xml2js__namespace.Parser(xml2jsParserSettings); - return new Promise((resolve, reject) => { - if (!str) { - reject(new Error("Document is empty")); + getOrCreateAgent(request, isInsecure) { + var _a; + const disableKeepAlive = request.disableKeepAlive; + // Handle Insecure requests first + if (isInsecure) { + if (disableKeepAlive) { + // keepAlive:false is the default so we don't need a custom Agent + return http__namespace.globalAgent; + } + if (!this.cachedHttpAgent) { + // If there is no cached agent create a new one and cache it. + this.cachedHttpAgent = new http__namespace.Agent({ keepAlive: true }); + } + return this.cachedHttpAgent; } else { - xmlParser.parseString(str, (err, res) => { - if (err) { - reject(err); - } - else { - resolve(res); - } - }); + if (disableKeepAlive && !request.tlsSettings) { + // When there are no tlsSettings and keepAlive is false + // we don't need a custom agent + return https__namespace.globalAgent; + } + // We use the tlsSettings to index cached clients + const tlsSettings = (_a = request.tlsSettings) !== null && _a !== void 0 ? _a : DEFAULT_TLS_SETTINGS; + // Get the cached agent or create a new one with the + // provided values for keepAlive and tlsSettings + let agent = this.cachedHttpsAgents.get(tlsSettings); + if (agent && agent.options.keepAlive === !disableKeepAlive) { + return agent; + } + logger.info("No cached TLS Agent exist, creating a new Agent"); + agent = new https__namespace.Agent(Object.assign({ + // keepAlive is true if disableKeepAlive is false. + keepAlive: !disableKeepAlive }, tlsSettings)); + this.cachedHttpsAgents.set(tlsSettings, agent); + return agent; } - }); -} - -// Copyright (c) Microsoft Corporation. -/** - * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they - * pass through the HTTP pipeline. - */ -function deserializationPolicy(deserializationContentTypes, parsingOptions) { - return { - create: (nextPolicy, options) => { - return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); - }, - }; -} -const defaultJsonContentTypes = ["application/json", "text/json"]; -const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; -const DefaultDeserializationOptions = { - expectedContentTypes: { - json: defaultJsonContentTypes, - xml: defaultXmlContentTypes, - }, -}; -/** - * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the - * HTTP pipeline. - */ -class DeserializationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { - var _a; - super(nextPolicy, requestPolicyOptions); - this.jsonContentTypes = - (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; - this.xmlContentTypes = - (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; - this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - } - async sendRequest(request) { - return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { - xmlCharKey: this.xmlCharKey, - })); } } -function getOperationResponse(parsedResponse) { - let result; - const request = parsedResponse.request; - const operationSpec = request.operationSpec; - if (operationSpec) { - const operationResponseGetter = request.operationResponseGetter; - if (!operationResponseGetter) { - result = operationSpec.responses[parsedResponse.status]; +function getResponseHeaders(res) { + const headers = createHttpHeaders(); + for (const header of Object.keys(res.headers)) { + const value = res.headers[header]; + if (Array.isArray(value)) { + if (value.length > 0) { + headers.set(header, value[0]); + } } - else { - result = operationResponseGetter(operationSpec, parsedResponse); + else if (value) { + headers.set(header, value); } } - return result; + return headers; } -function shouldDeserializeResponse(parsedResponse) { - const shouldDeserialize = parsedResponse.request.shouldDeserialize; - let result; - if (shouldDeserialize === undefined) { - result = true; - } - else if (typeof shouldDeserialize === "boolean") { - result = shouldDeserialize; +function getDecodedResponseStream(stream, headers) { + const contentEncoding = headers.get("Content-Encoding"); + if (contentEncoding === "gzip") { + const unzip = zlib__namespace.createGunzip(); + stream.pipe(unzip); + return unzip; } - else { - result = shouldDeserialize(parsedResponse); + else if (contentEncoding === "deflate") { + const inflate = zlib__namespace.createInflate(); + stream.pipe(inflate); + return inflate; } - return result; + return stream; } -/** - * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. - * @param jsonContentTypes - Response content types to parse the body as JSON. - * @param xmlContentTypes - Response content types to parse the body as XML. - * @param response - HTTP Response from the pipeline. - * @param options - Options to the serializer, mostly for configuring the XML parser if needed. - * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. - */ -function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { - if (!shouldDeserializeResponse(parsedResponse)) { - return parsedResponse; - } - const operationSpec = parsedResponse.request.operationSpec; - if (!operationSpec || !operationSpec.responses) { - return parsedResponse; - } - const responseSpec = getOperationResponse(parsedResponse); - const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); - if (error) { - throw error; - } - else if (shouldReturnResponse) { - return parsedResponse; - } - // An operation response spec does exist for current status code, so - // use it to deserialize the response. - if (responseSpec) { - if (responseSpec.bodyMapper) { - let valueToDeserialize = parsedResponse.parsedBody; - if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { - valueToDeserialize = - typeof valueToDeserialize === "object" - ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] - : []; - } - try { - parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); - } - catch (innerError) { - const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); - throw restError; - } +function streamToText(stream) { + return new Promise((resolve, reject) => { + const buffer = []; + stream.on("data", (chunk) => { + if (Buffer.isBuffer(chunk)) { + buffer.push(chunk); + } + else { + buffer.push(Buffer.from(chunk)); } - else if (operationSpec.httpMethod === "HEAD") { - // head methods never have a body, but we return a boolean to indicate presence/absence of the resource - parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + }); + stream.on("end", () => { + resolve(Buffer.concat(buffer).toString("utf8")); + }); + stream.on("error", (e) => { + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + reject(e); } - if (responseSpec.headersMapper) { - parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders", options); + else { + reject(new RestError(`Error reading response as text: ${e.message}`, { + code: RestError.PARSE_ERROR, + })); } - } - return parsedResponse; + }); }); } -function isOperationSpecEmpty(operationSpec) { - const expectedStatusCodes = Object.keys(operationSpec.responses); - return (expectedStatusCodes.length === 0 || - (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); -} -function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { - var _a; - const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; - const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) - ? isSuccessByStatus - : !!responseSpec; - if (isExpectedStatusCode) { - if (responseSpec) { - if (!responseSpec.isError) { - return { error: null, shouldReturnResponse: false }; - } - } - else { - return { error: null, shouldReturnResponse: false }; - } - } - const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; - const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || - parsedResponse.request.streamResponseBody; - const initialErrorMessage = streaming - ? `Unexpected status code: ${parsedResponse.status}` - : parsedResponse.bodyAsText; - const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); - // If the item failed but there's no error spec or default spec to deserialize the error, - // we should fail so we just throw the parsed response - if (!errorResponseSpec) { - throw error; +/** @internal */ +function getBodyLength(body) { + if (!body) { + return 0; } - const defaultBodyMapper = errorResponseSpec.bodyMapper; - const defaultHeadersMapper = errorResponseSpec.headersMapper; - try { - // If error response has a body, try to deserialize it using default body mapper. - // Then try to extract error code & message from it - if (parsedResponse.parsedBody) { - const parsedBody = parsedResponse.parsedBody; - let parsedError; - if (defaultBodyMapper) { - let valueToDeserialize = parsedBody; - if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { - valueToDeserialize = - typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; - } - parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); - } - const internalError = parsedBody.error || parsedError || parsedBody; - error.code = internalError.code; - if (internalError.message) { - error.message = internalError.message; - } - if (defaultBodyMapper) { - error.response.parsedBody = parsedError; - } - } - // If error response has headers, try to deserialize it using default header mapper - if (parsedResponse.headers && defaultHeadersMapper) { - error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders"); - } + else if (Buffer.isBuffer(body)) { + return body.length; } - catch (defaultError) { - error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + else if (isReadableStream(body)) { + return null; } - return { error, shouldReturnResponse: false }; -} -function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { - var _a; - const errorHandler = (err) => { - const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; - const errCode = err.code || RestError.PARSE_ERROR; - const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); - return Promise.reject(e); - }; - const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || - operationResponse.request.streamResponseBody; - if (!streaming && operationResponse.bodyAsText) { - const text = operationResponse.bodyAsText; - const contentType = operationResponse.headers.get("Content-Type") || ""; - const contentComponents = !contentType - ? [] - : contentType.split(";").map((component) => component.toLowerCase()); - if (contentComponents.length === 0 || - contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { - return new Promise((resolve) => { - operationResponse.parsedBody = JSON.parse(text); - resolve(operationResponse); - }).catch(errorHandler); - } - else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { - return parseXML(text, opts) - .then((body) => { - operationResponse.parsedBody = body; - return operationResponse; - }) - .catch(errorHandler); - } + else if (isArrayBuffer(body)) { + return body.byteLength; } - return Promise.resolve(operationResponse); -} - -// Copyright (c) Microsoft Corporation. -/** - * By default, HTTP connections are maintained for future requests. - */ -const DefaultKeepAliveOptions = { - enable: true, -}; -/** - * Creates a policy that controls whether HTTP connections are maintained on future requests. - * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. - * @returns An instance of the {@link KeepAlivePolicy} - */ -function keepAlivePolicy(keepAliveOptions) { - return { - create: (nextPolicy, options) => { - return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); - }, - }; -} -/** - * KeepAlivePolicy is a policy used to control keep alive settings for every request. - */ -class KeepAlivePolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - * - * @param nextPolicy - - * @param options - - * @param keepAliveOptions - - */ - constructor(nextPolicy, options, keepAliveOptions) { - super(nextPolicy, options); - this.keepAliveOptions = keepAliveOptions; + else if (typeof body === "string") { + return Buffer.from(body).length; } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.keepAlive = this.keepAliveOptions.enable; - return this._nextPolicy.sendRequest(request); + else { + return null; } } - -// Copyright (c) Microsoft Corporation. -/** - * Methods that are allowed to follow redirects 301 and 302 - */ -const allowedRedirect = ["GET", "HEAD"]; -const DefaultRedirectOptions = { - handleRedirects: true, - maxRetries: 20, -}; -/** - * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. - * @param maximumRetries - Maximum number of redirects to follow. - * @returns An instance of the {@link RedirectPolicy} - */ -function redirectPolicy(maximumRetries = 20) { - return { - create: (nextPolicy, options) => { - return new RedirectPolicy(nextPolicy, options, maximumRetries); - }, - }; -} /** - * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * Create a new HttpClient instance for the NodeJS environment. + * @internal */ -class RedirectPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, maxRetries = 20) { - super(nextPolicy, options); - this.maxRetries = maxRetries; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request) - .then((response) => handleRedirect(this, response, 0)); - } -} -function handleRedirect(policy, response, currentRetries) { - const { request, status } = response; - const locationHeader = response.headers.get("location"); - if (locationHeader && - (status === 300 || - (status === 301 && allowedRedirect.includes(request.method)) || - (status === 302 && allowedRedirect.includes(request.method)) || - (status === 303 && request.method === "POST") || - status === 307) && - (!policy.maxRetries || currentRetries < policy.maxRetries)) { - const builder = URLBuilder.parse(request.url); - builder.setPath(locationHeader); - request.url = builder.toString(); - // POST request with Status code 303 should be converted into a - // redirected GET request if the redirect url is present in the location header - if (status === 303) { - request.method = "GET"; - delete request.body; - } - return policy._nextPolicy - .sendRequest(request) - .then((res) => handleRedirect(policy, res, currentRetries + 1)); - } - return Promise.resolve(response); +function createNodeHttpClient() { + return new NodeHttpClient(); } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const DEFAULT_CLIENT_RETRY_COUNT = 3; -// intervals are in ms -const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; -const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; -const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; -function isNumber(n) { - return typeof n === "number"; -} -/** - * @internal - * Determines if the operation should be retried. - * - * @param retryLimit - Specifies the max number of retries. - * @param predicate - Initial chekck on whether to retry based on given responses or errors - * @param retryData - The retry data. - * @returns True if the operation qualifies for a retry; false otherwise. - */ -function shouldRetry(retryLimit, predicate, retryData, response, error) { - if (!predicate(response, error)) { - return false; - } - return retryData.retryCount < retryLimit; -} /** - * @internal - * Updates the retry data for the next attempt. - * - * @param retryOptions - specifies retry interval, and its lower bound and upper bound. - * @param retryData - The retry data. - * @param err - The operation"s error, if any. + * Create the correct HttpClient for the current environment. */ -function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { - if (err) { - if (retryData.error) { - err.innerError = retryData.error; - } - retryData.error = err; - } - // Adjust retry count - retryData.retryCount++; - // Adjust retry interval - let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; - const boundedRandDelta = retryOptions.retryInterval * 0.8 + - Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); - incrementDelta *= boundedRandDelta; - retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); - return retryData; +function createDefaultHttpClient() { + return createNodeHttpClient(); } // Copyright (c) Microsoft Corporation. -/** - * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. - * @param retryCount - Maximum number of retries. - * @param retryInterval - Base time between retries. - * @param maxRetryInterval - Maximum time to wait between retries. - */ -function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { - return { - create: (nextPolicy, options) => { - return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); - }, - }; +// Licensed under the MIT license. +class PipelineRequestImpl { + constructor(options) { + var _a, _b, _c, _d, _e, _f, _g; + this.url = options.url; + this.body = options.body; + this.headers = (_a = options.headers) !== null && _a !== void 0 ? _a : createHttpHeaders(); + this.method = (_b = options.method) !== null && _b !== void 0 ? _b : "GET"; + this.timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : 0; + this.multipartBody = options.multipartBody; + this.formData = options.formData; + this.disableKeepAlive = (_d = options.disableKeepAlive) !== null && _d !== void 0 ? _d : false; + this.proxySettings = options.proxySettings; + this.streamResponseStatusCodes = options.streamResponseStatusCodes; + this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false; + this.abortSignal = options.abortSignal; + this.tracingOptions = options.tracingOptions; + this.onUploadProgress = options.onUploadProgress; + this.onDownloadProgress = options.onDownloadProgress; + this.requestId = options.requestId || coreUtil.randomUUID(); + this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false; + this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false; + } } /** - * Describes the Retry Mode type. Currently supporting only Exponential. - */ -exports.RetryMode = void 0; -(function (RetryMode) { - /** - * Currently supported retry mode. - * Each time a retry happens, it will take exponentially more time than the last time. - */ - RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; -})(exports.RetryMode || (exports.RetryMode = {})); -const DefaultRetryOptions = { - maxRetries: DEFAULT_CLIENT_RETRY_COUNT, - retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, - maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, -}; -/** - * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * Creates a new pipeline request with the given options. + * This method is to allow for the easy setting of default values and not required. + * @param options - The options to create the request with. */ -class ExponentialRetryPolicy extends BaseRequestPolicy { - /** - * @param nextPolicy - The next RequestPolicy in the pipeline chain. - * @param options - The options for this RequestPolicy. - * @param retryCount - The client retry count. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - */ - constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) - ? maxRetryInterval - : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .then((response) => retry$1(this, request, response)) - .catch((error) => retry$1(this, request, error.response, undefined, error)); - } -} -async function retry$1(policy, request, response, retryData, requestError) { - function shouldPolicyRetry(responseParam) { - const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; - if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { - return false; - } - if (statusCode === undefined || - (statusCode < 500 && statusCode !== 408) || - statusCode === 501 || - statusCode === 505) { - return false; - } - return true; - } - retryData = updateRetryData({ - retryInterval: policy.retryInterval, - minRetryInterval: 0, - maxRetryInterval: policy.maxRetryInterval, - }, retryData, requestError); - const isAborted = request.abortSignal && request.abortSignal.aborted; - if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { - logger.info(`Retrying request in ${retryData.retryInterval}`); - try { - await coreUtil.delay(retryData.retryInterval); - const res = await policy._nextPolicy.sendRequest(request.clone()); - return retry$1(policy, request, res, retryData); - } - catch (err) { - return retry$1(policy, request, response, retryData, err); - } - } - else if (isAborted || requestError || !response) { - // If the operation failed in the end, return all errors instead of just the last one - const err = retryData.error || - new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); - throw err; - } - else { - return response; - } +function createPipelineRequest(options) { + return new PipelineRequestImpl(options); } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Creates a policy that logs information about the outgoing request and the incoming responses. - * @param loggingOptions - Logging options. - * @returns An instance of the {@link LogPolicy} + * The programmatic identifier of the exponentialRetryPolicy. */ -function logPolicy(loggingOptions = {}) { - return { - create: (nextPolicy, options) => { - return new LogPolicy(nextPolicy, options, loggingOptions); - }, - }; -} +const exponentialRetryPolicyName = "exponentialRetryPolicy"; /** - * A policy that logs information about the outgoing request and the incoming responses. + * A policy that attempts to retry requests while introducing an exponentially increasing delay. + * @param options - Options that configure retry logic. */ -class LogPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { - super(nextPolicy, options); - this.logger = logger$1; - this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - get allowedHeaderNames() { - return this.sanitizer.allowedHeaderNames; - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - set allowedHeaderNames(allowedHeaderNames) { - this.sanitizer.allowedHeaderNames = allowedHeaderNames; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - get allowedQueryParameters() { - return this.sanitizer.allowedQueryParameters; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - set allowedQueryParameters(allowedQueryParameters) { - this.sanitizer.allowedQueryParameters = allowedQueryParameters; - } - sendRequest(request) { - if (!this.logger.enabled) - return this._nextPolicy.sendRequest(request); - this.logRequest(request); - return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); - } - logRequest(request) { - this.logger(`Request: ${this.sanitizer.sanitize(request)}`); - } - logResponse(response) { - this.logger(`Response status code: ${response.status}`); - this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); - return response; - } +function exponentialRetryPolicy(options = {}) { + var _a; + return retryPolicy([ + exponentialRetryStrategy(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }); } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Get the path to this parameter's value as a dotted string (a.b.c). - * @param parameter - The parameter to get the path string for. - * @returns The path to this parameter's value as a dotted string. + * Name of the {@link systemErrorRetryPolicy} */ -function getPathStringFromParameter(parameter) { - return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); -} -function getPathStringFromParameterPath(parameterPath, mapper) { - let result; - if (typeof parameterPath === "string") { - result = parameterPath; - } - else if (Array.isArray(parameterPath)) { - result = parameterPath.join("."); - } - else { - result = mapper.serializedName; - } - return result; -} - -// Copyright (c) Microsoft Corporation. +const systemErrorRetryPolicyName = "systemErrorRetryPolicy"; /** - * Gets the list of status codes for streaming responses. - * @internal + * A retry policy that specifically seeks to handle errors in the + * underlying transport layer (e.g. DNS lookup failures) rather than + * retryable error codes from the server itself. + * @param options - Options that customize the policy. */ -function getStreamResponseStatusCodes(operationSpec) { - const result = new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; - if (operationResponse.bodyMapper && - operationResponse.bodyMapper.type.name === MapperType.Stream) { - result.add(Number(statusCode)); - } - } - return result; -} - -// Copyright (c) Microsoft Corporation. -function getDefaultUserAgentKey() { - return Constants.HeaderConstants.USER_AGENT; -} -function getPlatformSpecificData() { - const runtimeInfo = { - key: "Node", - value: process.version, - }; - const osInfo = { - key: "OS", - value: `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`, +function systemErrorRetryPolicy(options = {}) { + var _a; + return { + name: systemErrorRetryPolicyName, + sendRequest: retryPolicy([ + exponentialRetryStrategy(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, }; - return [runtimeInfo, osInfo]; } // Copyright (c) Microsoft Corporation. -function getRuntimeInfo() { - const msRestRuntime = { - key: "core-http", - value: Constants.coreHttpVersion, - }; - return [msRestRuntime]; -} -function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { - return telemetryInfo - .map((info) => { - const value = info.value ? `${valueSeparator}${info.value}` : ""; - return `${info.key}${value}`; - }) - .join(keySeparator); -} -const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +// Licensed under the MIT license. /** - * The default approach to generate user agents. - * Uses static information from this package, plus system information available from the runtime. + * Name of the {@link throttlingRetryPolicy} */ -function getDefaultUserAgentValue() { - const runtimeInfo = getRuntimeInfo(); - const platformSpecificData = getPlatformSpecificData(); - const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); - return userAgent; -} +const throttlingRetryPolicyName = "throttlingRetryPolicy"; /** - * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. - * @param userAgentData - Telemetry information. - * @returns A new {@link UserAgentPolicy}. + * A policy that retries when the server sends a 429 response with a Retry-After header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * + * @param options - Options that configure retry logic. */ -function userAgentPolicy(userAgentData) { - const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null - ? getDefaultUserAgentKey() - : userAgentData.key; - const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null - ? getDefaultUserAgentValue() - : userAgentData.value; +function throttlingRetryPolicy(options = {}) { + var _a; return { - create: (nextPolicy, options) => { - return new UserAgentPolicy(nextPolicy, options, key, value); - }, + name: throttlingRetryPolicyName, + sendRequest: retryPolicy([throttlingRetryStrategy()], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, }; } -/** - * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. - */ -class UserAgentPolicy extends BaseRequestPolicy { - constructor(_nextPolicy, _options, headerKey, headerValue) { - super(_nextPolicy, _options); - this._nextPolicy = _nextPolicy; - this._options = _options; - this.headerKey = headerKey; - this.headerValue = headerValue; - } - sendRequest(request) { - this.addUserAgentHeader(request); - return this._nextPolicy.sendRequest(request); - } - /** - * Adds the user agent header to the outgoing request. - */ - addUserAgentHeader(request) { - if (!request.headers) { - request.headers = new HttpHeaders(); - } - if (!request.headers.get(this.headerKey) && this.headerValue) { - request.headers.set(this.headerKey, this.headerValue); - } - } -} // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -/** - * The format that will be used to join an array of values together for a query parameter value. - */ -exports.QueryCollectionFormat = void 0; -(function (QueryCollectionFormat) { - /** - * CSV: Each pair of segments joined by a single comma. - */ - QueryCollectionFormat["Csv"] = ","; - /** - * SSV: Each pair of segments joined by a single space character. - */ - QueryCollectionFormat["Ssv"] = " "; - /** - * TSV: Each pair of segments joined by a single tab character. - */ - QueryCollectionFormat["Tsv"] = "\t"; - /** - * Pipes: Each pair of segments joined by a single pipe character. - */ - QueryCollectionFormat["Pipes"] = "|"; - /** - * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` - */ - QueryCollectionFormat["Multi"] = "Multi"; -})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); - -// Copyright (c) Microsoft Corporation. // Default options for the cycler if none are provided const DEFAULT_CYCLER_OPTIONS = { forcedRefreshWindowInMs: 1000, @@ -49404,19 +48079,16 @@ const DEFAULT_CYCLER_OPTIONS = { * into an AccessTokenGetter by retrying the unreliable getter in a regular * interval. * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token + * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null. + * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts. + * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception. + * @returns - A promise that, if it resolves, will resolve with an access token. */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { +async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { // This wrapper handles exceptions gracefully as long as we haven't exceeded // the timeout. async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { + if (Date.now() < refreshTimeout) { try { return await getAccessToken(); } @@ -49435,7 +48107,7 @@ async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { } let token = await tryGetAccessToken(); while (token === null) { - await coreUtil.delay(retryIntervalInMs); + await delay(retryIntervalInMs); token = await tryGetAccessToken(); } return token; @@ -49450,14 +48122,14 @@ async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { * * @param credential - the underlying TokenCredential that provides the access * token - * @param scopes - the scopes to request authorization for * @param tokenCyclerOptions - optionally override default settings for the cycler * * @returns - a function that reliably produces a valid access token */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { +function createTokenCycler(credential, tokenCyclerOptions) { let refreshWorker = null; let token = null; + let tenantId; const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); /** * This little holder defines several predicates that we use to construct @@ -49491,7 +48163,7 @@ function createTokenCycler(credential, scopes, tokenCyclerOptions) { * Starts a refresh job or returns the existing job if one is already * running. */ - function refresh(getTokenOptions) { + function refresh(scopes, getTokenOptions) { var _a; if (!cycler.isRefreshing) { // We bind `scopes` here to avoid passing it around a lot @@ -49504,6 +48176,7 @@ function createTokenCycler(credential, scopes, tokenCyclerOptions) { .then((_token) => { refreshWorker = null; token = _token; + tenantId = getTokenOptions.tenantId; return token; }) .catch((reason) => { @@ -49512,12 +48185,13 @@ function createTokenCycler(credential, scopes, tokenCyclerOptions) { // new retry chain. refreshWorker = null; token = null; + tenantId = undefined; throw reason; }); } return refreshWorker; } - return async (tokenOptions) => { + return async (scopes, tokenOptions) => { // // Simple rules: // - If we MUST refresh, then return the refresh task, blocking @@ -49527,1680 +48201,497 @@ function createTokenCycler(credential, scopes, tokenCyclerOptions) { // - Return the token, since it's fine if we didn't return in // step 1. // - if (cycler.mustRefresh) - return refresh(tokenOptions); + // If the tenantId passed in token options is different to the one we have + // Or if we are in claim challenge and the token was rejected and a new access token need to be issued, we need to + // refresh the token with the new tenantId or token. + const mustRefresh = tenantId !== tokenOptions.tenantId || Boolean(tokenOptions.claims) || cycler.mustRefresh; + if (mustRefresh) + return refresh(scopes, tokenOptions); if (cycler.shouldRefresh) { - refresh(tokenOptions); + refresh(scopes, tokenOptions); } return token; }; } -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -function bearerTokenAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - const getToken = createTokenCycler(credential, scopes /* , options */); - class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const { token } = await getToken({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - }); - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - return this._nextPolicy.sendRequest(webResource); - } - } - return { - create: (nextPolicy, options) => { - return new BearerTokenAuthenticationPolicy(nextPolicy, options); - }, - }; -} // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Returns a request policy factory that can be used to create an instance of - * {@link DisableResponseDecompressionPolicy}. + * The programmatic identifier of the bearerTokenAuthenticationPolicy. */ -function disableResponseDecompressionPolicy() { - return { - create: (nextPolicy, options) => { - return new DisableResponseDecompressionPolicy(nextPolicy, options); - }, +const bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; +/** + * Default authorize request handler + */ +async function defaultAuthorizeRequest(options) { + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, }; + const accessToken = await getAccessToken(scopes, getTokenOptions); + if (accessToken) { + options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + } } /** - * A policy to disable response decompression according to Accept-Encoding header - * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. */ -class DisableResponseDecompressionPolicy extends BaseRequestPolicy { - /** - * Creates an instance of DisableResponseDecompressionPolicy. - * - * @param nextPolicy - - * @param options - - */ - // The parent constructor is protected. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.decompressResponse = false; - return this._nextPolicy.sendRequest(request); +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; } + return; } - -// Copyright (c) Microsoft Corporation. /** - * Creates a policy that assigns a unique request id to outgoing requests. - * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + * A policy that can request a token from a TokenCredential implementation and + * then apply it to the Authorization header of a request as a Bearer token. */ -function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { +function bearerTokenAuthenticationPolicy(options) { + var _a; + const { credential, scopes, challengeCallbacks } = options; + const logger$1 = options.logger || logger; + const callbacks = Object.assign({ authorizeRequest: (_a = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) !== null && _a !== void 0 ? _a : defaultAuthorizeRequest, authorizeRequestOnChallenge: challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge }, challengeCallbacks); + // This function encapsulates the entire process of reliably retrieving the token + // The options are left out of the public API until there's demand to configure this. + // Remember to extend `BearerTokenAuthenticationPolicyOptions` with `TokenCyclerOptions` + // in order to pass through the `options` object. + const getAccessToken = credential + ? createTokenCycler(credential /* , options */) + : () => Promise.resolve(null); return { - create: (nextPolicy, options) => { - return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + name: bearerTokenAuthenticationPolicyName, + /** + * If there's no challenge parameter: + * - It will try to retrieve the token using the cache, or the credential's getToken. + * - Then it will try the next policy with or without the retrieved token. + * + * It uses the challenge parameters to: + * - Skip a first attempt to get the token from the credential if there's no cached token, + * since it expects the token to be retrievable only after the challenge. + * - Prepare the outgoing request if the `prepareRequest` method has been provided. + * - Send an initial request to receive the challenge if it fails. + * - Process a challenge if the response contains it. + * - Retrieve a token with the challenge information, then re-send the request. + */ + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + await callbacks.authorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger: logger$1, + }); + let response; + let error; + try { + response = await next(request); + } + catch (err) { + error = err; + response = err.response; + } + if (callbacks.authorizeRequestOnChallenge && + (response === null || response === void 0 ? void 0 : response.status) === 401 && + getChallenge(response)) { + // processes challenge + const shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + response, + getAccessToken, + logger: logger$1, + }); + if (shouldSendRequest) { + return next(request); + } + } + if (error) { + throw error; + } + else { + return response; + } }, }; } -class GenerateClientRequestIdPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _requestIdHeaderName) { - super(nextPolicy, options); - this._requestIdHeaderName = _requestIdHeaderName; - } - sendRequest(request) { - if (!request.headers.contains(this._requestIdHeaderName)) { - request.headers.set(this._requestIdHeaderName, request.requestId); - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -let cachedHttpClient; -function getCachedDefaultHttpClient() { - if (!cachedHttpClient) { - cachedHttpClient = new NodeFetchHttpClient(); - } - return cachedHttpClient; -} // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the ndJsonPolicy. + */ +const ndJsonPolicyName = "ndJsonPolicy"; +/** + * ndJsonPolicy is a policy used to control keep alive settings for every request. + */ function ndJsonPolicy() { return { - create: (nextPolicy, options) => { - return new NdJsonPolicy(nextPolicy, options); + name: ndJsonPolicyName, + async sendRequest(request, next) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return next(request); }, }; } -/** - * NdJsonPolicy that formats a JSON array as newline-delimited JSON - */ -class NdJsonPolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends a request. - */ - async sendRequest(request) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } - } - return this._nextPolicy.sendRequest(request); - } -} // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Stores the patterns specified in NO_PROXY environment variable. - * @internal + * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy. */ -const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); -function loadEnvironmentProxyValue() { - if (!process) { - return undefined; - } - const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); - const allProxy = getEnvironmentValue(Constants.ALL_PROXY); - const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); - return httpsProxy || allProxy || httpProxy; +const auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; +const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; +async function sendAuthorizeRequest(options) { + var _a, _b; + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + return (_b = (_a = (await getAccessToken(scopes, getTokenOptions))) === null || _a === void 0 ? void 0 : _a.token) !== null && _b !== void 0 ? _b : ""; } /** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + * A policy for external tokens to `x-ms-authorization-auxiliary` header. + * This header will be used when creating a cross-tenant application we may need to handle authentication requests + * for resources that are in different tenants. + * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works */ -function isBypassed(uri, noProxyList, bypassedMap) { - if (noProxyList.length === 0) { - return false; - } - const host = URLBuilder.parse(uri).getHost(); - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); - } - let isBypassedFlag = false; - for (const pattern of noProxyList) { - if (pattern[0] === ".") { - // This should match either domain it self or any subdomain or host - // .foo.com will match foo.com it self or *.foo.com - if (host.endsWith(pattern)) { - isBypassedFlag = true; +function auxiliaryAuthenticationHeaderPolicy(options) { + const { credentials, scopes } = options; + const logger$1 = options.logger || logger; + const tokenCyclerMap = new WeakMap(); + return { + name: auxiliaryAuthenticationHeaderPolicyName, + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); } - else { - if (host.length === pattern.length - 1 && host === pattern.slice(1)) { - isBypassedFlag = true; + if (!credentials || credentials.length === 0) { + logger$1.info(`${auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); + return next(request); + } + const tokenPromises = []; + for (const credential of credentials) { + let getAccessToken = tokenCyclerMap.get(credential); + if (!getAccessToken) { + getAccessToken = createTokenCycler(credential); + tokenCyclerMap.set(credential, getAccessToken); } + tokenPromises.push(sendAuthorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger: logger$1, + })); } - } - else { - if (host === pattern) { - isBypassedFlag = true; + const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); + if (auxiliaryTokens.length === 0) { + logger$1.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); + return next(request); } - } - } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); - return isBypassedFlag; -} -/** - * @internal - */ -function loadNoProxy() { - const noProxy = getEnvironmentValue(Constants.NO_PROXY); - noProxyListLoaded = true; - if (noProxy) { - return noProxy - .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); - } - return []; -} -/** - * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. - * @param proxyUrl - URL of the proxy - * @returns The default proxy settings, or undefined. - */ -function getDefaultProxySettings(proxyUrl) { - if (!proxyUrl) { - proxyUrl = loadEnvironmentProxyValue(); - if (!proxyUrl) { - return undefined; - } - } - const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); - const parsedUrl = URLBuilder.parse(urlWithoutAuth); - const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; - return { - host: schema + parsedUrl.getHost(), - port: Number.parseInt(parsedUrl.getPort() || "80"), - username, - password, - }; -} -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -function proxyPolicy(proxySettings, options) { - if (!proxySettings) { - proxySettings = getDefaultProxySettings(); - } - if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); - } - return { - create: (nextPolicy, requestPolicyOptions) => { - return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); - }, - }; -} -function extractAuthFromUrl(url) { - const atIndex = url.indexOf("@"); - if (atIndex === -1) { - return { urlWithoutAuth: url }; - } - const schemeIndex = url.indexOf("://"); - const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; - const auth = url.substring(authStart, atIndex); - const colonIndex = auth.indexOf(":"); - const hasPassword = colonIndex !== -1; - const username = hasPassword ? auth.substring(0, colonIndex) : auth; - const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; - const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); - return { - username, - password, - urlWithoutAuth, - }; -} -class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, proxySettings, customNoProxyList) { - super(nextPolicy, options); - this.proxySettings = proxySettings; - this.customNoProxyList = customNoProxyList; - } - sendRequest(request) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { - request.proxySettings = this.proxySettings; - } - return this._nextPolicy.sendRequest(request); - } + request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); + return next(request); + }, + }; } // Copyright (c) Microsoft Corporation. -function rpRegistrationPolicy(retryTimeout = 30) { - return { - create: (nextPolicy, options) => { - return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); - }, - }; -} -class RPRegistrationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _retryTimeout = 30) { - super(nextPolicy, options); - this._retryTimeout = _retryTimeout; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .then((response) => registerIfNeeded(this, request, response)); - } -} -function registerIfNeeded(policy, request, response) { - if (response.status === 409) { - const rpName = checkRPNotRegisteredError(response.bodyAsText); - if (rpName) { - const urlPrefix = extractSubscriptionUrl(request.url); - return (registerRP(policy, urlPrefix, rpName, request) - // Autoregistration of ${provider} failed for some reason. We will not return this error - // instead will return the initial response with 409 status code back to the user. - // do nothing here as we are returning the original response at the end of this method. - .catch(() => false) - .then((registrationStatus) => { - if (registrationStatus) { - // Retry the original request. We have to change the x-ms-client-request-id - // otherwise Azure endpoint will return the initial 409 (cached) response. - request.headers.set("x-ms-client-request-id", generateUuid()); - return policy._nextPolicy.sendRequest(request.clone()); - } - return response; - })); - } - } - return Promise.resolve(response); -} -/** - * Reuses the headers of the original request and url (if specified). - * @param originalRequest - The original request - * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. - * @returns A new request object with desired headers. - */ -function getRequestEssentials(originalRequest, reuseUrlToo = false) { - const reqOptions = originalRequest.clone(); - if (reuseUrlToo) { - reqOptions.url = originalRequest.url; - } - // We have to change the x-ms-client-request-id otherwise Azure endpoint - // will return the initial 409 (cached) response. - reqOptions.headers.set("x-ms-client-request-id", generateUuid()); - // Set content-type to application/json - reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); - return reqOptions; -} +// Licensed under the MIT license. +const unimplementedMethods = { + arrayBuffer: () => { + throw new Error("Not implemented"); + }, + slice: () => { + throw new Error("Not implemented"); + }, + text: () => { + throw new Error("Not implemented"); + }, +}; /** - * Validates the error code and message associated with 409 response status code. If it matches to that of - * RP not registered then it returns the name of the RP else returns undefined. - * @param body - The response body received after making the original request. - * @returns The name of the RP if condition is satisfied else undefined. + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function to: + * - Create a File object for use in RequestBodyType.formData in environments where the + * global File object is unavailable. + * - Create a File-like object from a readable stream without reading the stream into memory. + * + * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is + * passed in a request's form data map, the stream will not be read into memory + * and instead will be streamed when the request is made. In the event of a retry, the + * stream needs to be read again, so this callback SHOULD return a fresh stream if possible. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. */ -function checkRPNotRegisteredError(body) { - let result, responseBody; - if (body) { - try { - responseBody = JSON.parse(body); - } - catch (err) { - // do nothing; - } - if (responseBody && - responseBody.error && - responseBody.error.message && - responseBody.error.code && - responseBody.error.code === "MissingSubscriptionRegistration") { - const matchRes = responseBody.error.message.match(/.*'(.*)'/i); - if (matchRes) { - result = matchRes.pop(); - } - } - } - return result; +function createFileFromStream(stream, name, options = {}) { + var _a, _b, _c, _d; + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => toWebStream(stream()) }); } /** - * Extracts the first part of the URL, just after subscription: - * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ - * @param url - The original request url - * @returns The url prefix as explained above. + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable. + * + * @param content - the content of the file as a Uint8Array in memory. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. */ -function extractSubscriptionUrl(url) { - let result; - const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); - if (matchRes && matchRes[0]) { - result = matchRes[0]; - } - else { - throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); - } - return result; -} -/** - * Registers the given provider. - * @param policy - The RPRegistrationPolicy this function is being called against. - * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ - * @param provider - The provider name to be registered. - * @param originalRequest - The original request sent by the user that returned a 409 response - * with a message that the provider is not registered. - */ -async function registerRP(policy, urlPrefix, provider, originalRequest) { - const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; - const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; - const reqOptions = getRequestEssentials(originalRequest); - reqOptions.method = "POST"; - reqOptions.url = postUrl; - const response = await policy._nextPolicy.sendRequest(reqOptions); - if (response.status !== 200) { - throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); - } - return getRegistrationStatus(policy, getUrl, originalRequest); -} -/** - * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. - * Polling will happen till the registrationState property of the response body is "Registered". - * @param policy - The RPRegistrationPolicy this function is being called against. - * @param url - The request url for polling - * @param originalRequest - The original request sent by the user that returned a 409 response - * with a message that the provider is not registered. - * @returns True if RP Registration is successful. - */ -async function getRegistrationStatus(policy, url, originalRequest) { - const reqOptions = getRequestEssentials(originalRequest); - reqOptions.url = url; - reqOptions.method = "GET"; - const res = await policy._nextPolicy.sendRequest(reqOptions); - const obj = res.parsedBody; - if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { - return true; - } - else { - await coreUtil.delay(policy._retryTimeout * 1000); - return getRegistrationStatus(policy, url, originalRequest); - } +function createFile(content, name, options = {}) { + var _a, _b, _c; + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream() }); } -// Copyright (c) Microsoft Corporation. -/** - * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. - * @param authenticationProvider - The authentication provider. - * @returns An instance of the {@link SigningPolicy}. - */ -function signingPolicy(authenticationProvider) { - return { - create: (nextPolicy, options) => { - return new SigningPolicy(nextPolicy, options, authenticationProvider); - }, - }; -} -/** - * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. - */ -class SigningPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, authenticationProvider) { - super(nextPolicy, options); - this.authenticationProvider = authenticationProvider; - } - signRequest(request) { - return this.authenticationProvider.signRequest(request); - } - sendRequest(request) { - return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); - } -} +exports.RestError = RestError; +exports.auxiliaryAuthenticationHeaderPolicy = auxiliaryAuthenticationHeaderPolicy; +exports.auxiliaryAuthenticationHeaderPolicyName = auxiliaryAuthenticationHeaderPolicyName; +exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; +exports.bearerTokenAuthenticationPolicyName = bearerTokenAuthenticationPolicyName; +exports.createDefaultHttpClient = createDefaultHttpClient; +exports.createEmptyPipeline = createEmptyPipeline; +exports.createFile = createFile; +exports.createFileFromStream = createFileFromStream; +exports.createHttpHeaders = createHttpHeaders; +exports.createPipelineFromOptions = createPipelineFromOptions; +exports.createPipelineRequest = createPipelineRequest; +exports.decompressResponsePolicy = decompressResponsePolicy; +exports.decompressResponsePolicyName = decompressResponsePolicyName; +exports.defaultRetryPolicy = defaultRetryPolicy; +exports.exponentialRetryPolicy = exponentialRetryPolicy; +exports.exponentialRetryPolicyName = exponentialRetryPolicyName; +exports.formDataPolicy = formDataPolicy; +exports.formDataPolicyName = formDataPolicyName; +exports.getDefaultProxySettings = getDefaultProxySettings; +exports.isRestError = isRestError; +exports.logPolicy = logPolicy; +exports.logPolicyName = logPolicyName; +exports.multipartPolicy = multipartPolicy; +exports.multipartPolicyName = multipartPolicyName; +exports.ndJsonPolicy = ndJsonPolicy; +exports.ndJsonPolicyName = ndJsonPolicyName; +exports.proxyPolicy = proxyPolicy; +exports.proxyPolicyName = proxyPolicyName; +exports.redirectPolicy = redirectPolicy; +exports.redirectPolicyName = redirectPolicyName; +exports.retryPolicy = retryPolicy; +exports.setClientRequestIdPolicy = setClientRequestIdPolicy; +exports.setClientRequestIdPolicyName = setClientRequestIdPolicyName; +exports.systemErrorRetryPolicy = systemErrorRetryPolicy; +exports.systemErrorRetryPolicyName = systemErrorRetryPolicyName; +exports.throttlingRetryPolicy = throttlingRetryPolicy; +exports.throttlingRetryPolicyName = throttlingRetryPolicyName; +exports.tlsPolicy = tlsPolicy; +exports.tlsPolicyName = tlsPolicyName; +exports.tracingPolicy = tracingPolicy; +exports.tracingPolicyName = tracingPolicyName; +exports.userAgentPolicy = userAgentPolicy; +exports.userAgentPolicyName = userAgentPolicyName; +//# sourceMappingURL=index.js.map -// Copyright (c) Microsoft Corporation. -/** - * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". - * @param retryCount - Maximum number of retries. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - * @returns An instance of the {@link SystemErrorRetryPolicy} - */ -function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { - return { - create: (nextPolicy, options) => { - return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); - }, - }; -} -/** - * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". - * @param retryCount - The client retry count. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - */ -class SystemErrorRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.minRetryInterval = isNumber(minRetryInterval) - ? minRetryInterval - : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) - ? maxRetryInterval - : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .catch((error) => retry(this, request, error.response, error)); - } -} -async function retry(policy, request, operationResponse, err, retryData) { - retryData = updateRetryData(policy, retryData, err); - function shouldPolicyRetry(_response, error) { - if (error && - error.code && - (error.code === "ETIMEDOUT" || - error.code === "ESOCKETTIMEDOUT" || - error.code === "ECONNREFUSED" || - error.code === "ECONNRESET" || - error.code === "ENOENT")) { - return true; - } - return false; - } - if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { - // If previous operation ended with an error and the policy allows a retry, do that - try { - await coreUtil.delay(retryData.retryInterval); - return policy._nextPolicy.sendRequest(request.clone()); - } - catch (nestedErr) { - return retry(policy, request, operationResponse, nestedErr, retryData); - } - } - else { - if (err) { - // If the operation failed in the end, return all errors instead of just the last one - return Promise.reject(retryData.error); - } - return operationResponse; - } -} -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Maximum number of retries for the throttling retry policy - */ -const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; +/***/ }), -// Copyright (c) Microsoft Corporation. -const StatusCodes = Constants.HttpConstants.StatusCodes; -/** - * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. - * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. - * - * To learn more, please refer to - * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, - * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and - * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors - * @returns - */ -function throttlingRetryPolicy() { - return { - create: (nextPolicy, options) => { - return new ThrottlingRetryPolicy(nextPolicy, options); - }, - }; -} -const StandardAbortMessage = "The operation was aborted."; -/** - * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. - * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. - * - * To learn more, please refer to - * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, - * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and - * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors - */ -class ThrottlingRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _handleResponse) { - super(nextPolicy, options); - this.numberOfRetries = 0; - this._handleResponse = _handleResponse || this._defaultResponseHandler; - } - async sendRequest(httpRequest) { - const response = await this._nextPolicy.sendRequest(httpRequest.clone()); - if (response.status !== StatusCodes.TooManyRequests && - response.status !== StatusCodes.ServiceUnavailable) { - return response; - } - else { - return this._handleResponse(httpRequest, response); - } - } - async _defaultResponseHandler(httpRequest, httpResponse) { - var _a; - const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); - if (retryAfterHeader) { - const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); - if (delayInMs) { - this.numberOfRetries += 1; - await coreUtil.delay(delayInMs, { - abortSignal: httpRequest.abortSignal, - abortErrorMsg: StandardAbortMessage, - }); - if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw new abortController.AbortError(StandardAbortMessage); - } - if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { - return this.sendRequest(httpRequest); - } - else { - return this._nextPolicy.sendRequest(httpRequest); - } - } - } - return httpResponse; - } - static parseRetryAfterHeader(headerValue) { - const retryAfterInSeconds = Number(headerValue); - if (Number.isNaN(retryAfterInSeconds)) { - return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); - } - else { - return retryAfterInSeconds * 1000; - } - } - static parseDateRetryAfterHeader(headerValue) { - try { - const now = Date.now(); - const date = Date.parse(headerValue); - const diff = date - now; - return Number.isNaN(diff) ? undefined : diff; - } - catch (error) { - return undefined; - } - } -} +/***/ 51333: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var abortController = __nccwpck_require__(52557); +var crypto = __nccwpck_require__(6113); // Copyright (c) Microsoft Corporation. -const createSpan = coreTracing.createSpanFunction({ - packagePrefix: "", - namespace: "", -}); -/** - * Creates a policy that wraps outgoing requests with a tracing span. - * @param tracingOptions - Tracing options. - * @returns An instance of the {@link TracingPolicy} class. - */ -function tracingPolicy(tracingOptions = {}) { - return { - create(nextPolicy, options) { - return new TracingPolicy(nextPolicy, options, tracingOptions); - }, - }; -} +// Licensed under the MIT license. /** - * A policy that wraps outgoing requests with a tracing span. + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. */ -class TracingPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, tracingOptions) { - super(nextPolicy, options); - this.userAgent = tracingOptions.userAgent; - } - async sendRequest(request) { - if (!request.tracingContext) { - return this._nextPolicy.sendRequest(request); - } - const span = this.tryCreateSpan(request); - if (!span) { - return this._nextPolicy.sendRequest(request); - } - try { - const response = await this._nextPolicy.sendRequest(request); - this.tryProcessResponse(span, response); - return response; - } - catch (err) { - this.tryProcessError(span, err); - throw err; - } - } - tryCreateSpan(request) { - var _a; - try { - // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. - // We can pass this as a separate parameter once we upgrade to the latest core-tracing. - const { span } = createSpan(`HTTP ${request.method}`, { - tracingOptions: { - spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), - tracingContext: request.tracingContext, - }, - }); - // If the span is not recording, don't do any more work. - if (!span.isRecording()) { - span.end(); - return undefined; - } - const namespaceFromContext = (_a = request.tracingContext) === null || _a === void 0 ? void 0 : _a.getValue(Symbol.for("az.namespace")); - if (typeof namespaceFromContext === "string") { - span.setAttribute("az.namespace", namespaceFromContext); - } - span.setAttributes({ - "http.method": request.method, - "http.url": request.url, - requestId: request.requestId, - }); - if (this.userAgent) { - span.setAttribute("http.user_agent", this.userAgent); - } - // set headers - const spanContext = span.spanContext(); - const traceParentHeader = coreTracing.getTraceParentHeader(spanContext); - if (traceParentHeader && coreTracing.isSpanContextValid(spanContext)) { - request.headers.set("traceparent", traceParentHeader); - const traceState = spanContext.traceState && spanContext.traceState.serialize(); - // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent - if (traceState) { - request.headers.set("tracestate", traceState); - } - } - return span; +function createAbortablePromise(buildPromise, options) { + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; + return new Promise((resolve, reject) => { + function rejectOnAbort() { + reject(new abortController.AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); } - catch (error) { - logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`); - return undefined; + function removeListeners() { + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); } - } - tryProcessError(span, err) { - try { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: err.message, - }); - if (err.statusCode) { - span.setAttribute("http.status_code", err.statusCode); - } - span.end(); + function onAbort() { + cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); + removeListeners(); + rejectOnAbort(); } - catch (error) { - logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + return rejectOnAbort(); } - } - tryProcessResponse(span, response) { try { - span.setAttribute("http.status_code", response.status); - const serviceRequestId = response.headers.get("x-ms-request-id"); - if (serviceRequestId) { - span.setAttribute("serviceRequestId", serviceRequestId); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.OK, + buildPromise((x) => { + removeListeners(); + resolve(x); + }, (x) => { + removeListeners(); + reject(x); }); - span.end(); } - catch (error) { - logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + catch (err) { + reject(err); } - } + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); + }); } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const StandardAbortMessage = "The delay was aborted."; /** - * ServiceClient sends service requests and receives responses. - */ -class ServiceClient { - /** - * The ServiceClient constructor - * @param credentials - The credentials used for authentication with the service. - * @param options - The service client options that govern the behavior of the client. - */ - constructor(credentials, - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ - options) { - if (!options) { - options = {}; - } - this._withCredentials = options.withCredentials || false; - this._httpClient = options.httpClient || getCachedDefaultHttpClient(); - this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); - let requestPolicyFactories; - if (Array.isArray(options.requestPolicyFactories)) { - logger.info("ServiceClient: using custom request policies"); - requestPolicyFactories = options.requestPolicyFactories; - } - else { - let authPolicyFactory = undefined; - if (coreAuth.isTokenCredential(credentials)) { - logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); - // Create a wrapped RequestPolicyFactory here so that we can provide the - // correct scope to the BearerTokenAuthenticationPolicy at the first time - // one is requested. This is needed because generated ServiceClient - // implementations do not set baseUri until after ServiceClient's constructor - // is finished, leaving baseUri empty at the time when it is needed to - // build the correct scope name. - const wrappedPolicyFactory = () => { - let bearerTokenPolicyFactory = undefined; - // eslint-disable-next-line @typescript-eslint/no-this-alias - const serviceClient = this; - const serviceClientOptions = options; - return { - create(nextPolicy, createOptions) { - const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); - if (!credentialScopes) { - throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); - } - if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { - bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); - } - return bearerTokenPolicyFactory.create(nextPolicy, createOptions); - }, - }; - }; - authPolicyFactory = wrappedPolicyFactory(); - } - else if (credentials && typeof credentials.signRequest === "function") { - logger.info("ServiceClient: creating signing policy from provided credentials"); - authPolicyFactory = signingPolicy(credentials); - } - else if (credentials !== undefined && credentials !== null) { - throw new Error("The credentials argument must implement the TokenCredential interface"); - } - logger.info("ServiceClient: using default request policies"); - requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options); - if (options.requestPolicyFactories) { - // options.requestPolicyFactories can also be a function that manipulates - // the default requestPolicyFactories array - const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); - if (newRequestPolicyFactories) { - requestPolicyFactories = newRequestPolicyFactories; - } - } - } - this._requestPolicyFactories = requestPolicyFactories; - } - /** - * Send the provided httpRequest. - */ - sendRequest(options) { - if (options === null || options === undefined || typeof options !== "object") { - throw new Error("options cannot be null or undefined and it must be of type object."); - } - let httpRequest; - try { - if (isWebResourceLike(options)) { - options.validateRequestProperties(); - httpRequest = options; - } - else { - httpRequest = new WebResource(); - httpRequest = httpRequest.prepare(options); - } - } - catch (error) { - return Promise.reject(error); - } - let httpPipeline = this._httpClient; - if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { - for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { - httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); - } - } - return httpPipeline.sendRequest(httpRequest); - } - /** - * Send an HTTP request that is populated using the provided OperationSpec. - * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. - * @param operationSpec - The OperationSpec to use to populate the httpRequest. - * @param callback - The callback to call when the response is received. - */ - async sendOperationRequest(operationArguments, operationSpec, callback) { - var _a; - if (typeof operationArguments.options === "function") { - callback = operationArguments.options; - operationArguments.options = undefined; - } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; - const httpRequest = new WebResource(); - let result; - try { - const baseUri = operationSpec.baseUrl || this.baseUri; - if (!baseUri) { - throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); - } - httpRequest.method = operationSpec.httpMethod; - httpRequest.operationSpec = operationSpec; - const requestUrl = URLBuilder.parse(baseUri); - if (operationSpec.path) { - requestUrl.appendPath(operationSpec.path); - } - if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { - for (const urlParameter of operationSpec.urlParameters) { - let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); - urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); - if (!urlParameter.skipEncoding) { - urlParameterValue = encodeURIComponent(urlParameterValue); - } - requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); - } - } - if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { - for (const queryParameter of operationSpec.queryParameters) { - let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); - if (queryParameterValue !== undefined && queryParameterValue !== null) { - queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null) { - if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { - if (queryParameterValue.length === 0) { - // The collection is empty, no need to try serializing the current queryParam - continue; - } - else { - for (const index in queryParameterValue) { - const item = queryParameterValue[index]; - queryParameterValue[index] = - item === undefined || item === null ? "" : item.toString(); - } - } - } - else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || - queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); - } - } - if (!queryParameter.skipEncoding) { - if (Array.isArray(queryParameterValue)) { - for (const index in queryParameterValue) { - if (queryParameterValue[index] !== undefined && - queryParameterValue[index] !== null) { - queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); - } - } - } - else { - queryParameterValue = encodeURIComponent(queryParameterValue); - } - } - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); - } - requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); - } - } - } - httpRequest.url = requestUrl.toString(); - const contentType = operationSpec.contentType || this.requestContentType; - if (contentType && operationSpec.requestBody) { - httpRequest.headers.set("Content-Type", contentType); - } - if (operationSpec.headerParameters) { - for (const headerParameter of operationSpec.headerParameters) { - let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); - if (headerValue !== undefined && headerValue !== null) { - headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); - const headerCollectionPrefix = headerParameter.mapper - .headerCollectionPrefix; - if (headerCollectionPrefix) { - for (const key of Object.keys(headerValue)) { - httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); - } - } - else { - httpRequest.headers.set(headerParameter.mapper.serializedName || - getPathStringFromParameter(headerParameter), headerValue); - } - } - } - } - const options = operationArguments.options; - if (options) { - if (options.customHeaders) { - for (const customHeaderName in options.customHeaders) { - httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); - } - } - if (options.abortSignal) { - httpRequest.abortSignal = options.abortSignal; - } - if (options.timeout) { - httpRequest.timeout = options.timeout; - } - if (options.onUploadProgress) { - httpRequest.onUploadProgress = options.onUploadProgress; - } - if (options.onDownloadProgress) { - httpRequest.onDownloadProgress = options.onDownloadProgress; - } - if (options.spanOptions) { - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - httpRequest.spanOptions = options.spanOptions; - } - if (options.tracingContext) { - httpRequest.tracingContext = options.tracingContext; - } - if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { - httpRequest.shouldDeserialize = options.shouldDeserialize; - } - } - httpRequest.withCredentials = this._withCredentials; - serializeRequestBody(this, httpRequest, operationArguments, operationSpec); - if (httpRequest.streamResponseStatusCodes === undefined) { - httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); - } - let rawResponse; - let sendRequestError; - try { - rawResponse = await this.sendRequest(httpRequest); - } - catch (error) { - sendRequestError = error; - } - if (sendRequestError) { - if (sendRequestError.response) { - sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || - operationSpec.responses["default"]); - } - result = Promise.reject(sendRequestError); - } - else { - result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); - } - } - catch (error) { - result = Promise.reject(error); - } - const cb = callback; - if (cb) { - result - .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) - .catch((err) => cb(err)); - } - return result; - } -} -function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { - var _a, _b, _c, _d, _e, _f; - const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; - const updatedOptions = { - rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", - includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, - xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, - }; - const xmlCharKey = serializerOptions.xmlCharKey; - if (operationSpec.requestBody && operationSpec.requestBody.mapper) { - httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); - const bodyMapper = operationSpec.requestBody.mapper; - const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; - const typeName = bodyMapper.type.name; - try { - if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { - const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); - httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); - const isStream = typeName === MapperType.Stream; - if (operationSpec.isXML) { - const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; - const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); - if (typeName === MapperType.Sequence) { - httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { - rootName: xmlName || serializedName, - xmlCharKey, - }); - } - else if (!isStream) { - httpRequest.body = stringifyXML(value, { - rootName: xmlName || serializedName, - xmlCharKey, - }); - } - } - else if (typeName === MapperType.String && - (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match("text/plain")) || operationSpec.mediaType === "text")) { - // the String serializer has validated that request body is a string - // so just send the string. - return; - } - else if (!isStream) { - httpRequest.body = JSON.stringify(httpRequest.body); - } - } - } - catch (error) { - throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); - } - } - else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { - httpRequest.formData = {}; - for (const formDataParameter of operationSpec.formDataParameters) { - const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); - if (formDataParameterValue !== undefined && formDataParameterValue !== null) { - const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); - httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); - } - } - } -} -/** - * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself - */ -function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { - // Composite and Sequence schemas already got their root namespace set during serialization - // We just need to add xmlns to the other schema types - if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { - const result = {}; - result[options.xmlCharKey] = serializedValue; - result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; - return result; - } - return serializedValue; -} -function getValueOrFunctionResult(value, defaultValueCreator) { - let result; - if (typeof value === "string") { - result = value; - } - else { - result = defaultValueCreator(); - if (typeof value === "function") { - result = value(result); - } - } - return result; -} -function createDefaultRequestPolicyFactories(authPolicyFactory, options) { - const factories = []; - if (options.generateClientRequestIdHeader) { - factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); - } - if (authPolicyFactory) { - factories.push(authPolicyFactory); - } - const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); - const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); - if (userAgentHeaderName && userAgentHeaderValue) { - factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); - } - factories.push(redirectPolicy()); - factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); - if (!options.noRetryPolicy) { - factories.push(exponentialRetryPolicy()); - factories.push(systemErrorRetryPolicy()); - factories.push(throttlingRetryPolicy()); - } - factories.push(deserializationPolicy(options.deserializationContentTypes)); - if (coreUtil.isNode) { - factories.push(proxyPolicy(options.proxySettings)); - } - factories.push(logPolicy({ logger: logger.info })); - return factories; -} -/** - * Creates an HTTP pipeline based on the given options. - * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. - * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. - * @returns A set of options that can be passed to create a new {@link ServiceClient}. + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs */ -function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { - const requestPolicyFactories = []; - if (pipelineOptions.sendStreamingJson) { - requestPolicyFactories.push(ndJsonPolicy()); - } - let userAgentValue = undefined; - if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { - const userAgentInfo = []; - userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); - // Add the default user agent value if it isn't already specified - // by the userAgentPrefix option. - const defaultUserAgentInfo = getDefaultUserAgentValue(); - if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { - userAgentInfo.push(defaultUserAgentInfo); - } - userAgentValue = userAgentInfo.join(" "); - } - const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); - const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); - const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); - if (coreUtil.isNode) { - requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); - } - const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); - const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); - requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); - if (redirectOptions.handleRedirects) { - requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); - } - if (authPolicyFactory) { - requestPolicyFactories.push(authPolicyFactory); - } - requestPolicyFactories.push(logPolicy(loggingOptions)); - if (coreUtil.isNode && pipelineOptions.decompressResponse === false) { - requestPolicyFactories.push(disableResponseDecompressionPolicy()); - } - return { - httpClient: pipelineOptions.httpClient, - requestPolicyFactories, - }; -} -function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { - return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); -} -function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { - var _a; - let value; - if (typeof parameterPath === "string") { - parameterPath = [parameterPath]; - } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; - if (Array.isArray(parameterPath)) { - if (parameterPath.length > 0) { - if (parameterMapper.isConstant) { - value = parameterMapper.defaultValue; - } - else { - let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); - if (!propertySearchResult.propertyFound) { - propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); - } - let useDefaultValue = false; - if (!propertySearchResult.propertyFound) { - useDefaultValue = - parameterMapper.required || - (parameterPath[0] === "options" && parameterPath.length === 2); - } - value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; - } - // Serialize just for validation purposes. - const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); - serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); - } - } - else { - if (parameterMapper.required) { - value = {}; - } - for (const propertyName in parameterPath) { - const propertyMapper = parameterMapper.type.modelProperties[propertyName]; - const propertyPath = parameterPath[propertyName]; - const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); - // Serialize just for validation purposes. - const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); - serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); - if (propertyValue !== undefined && propertyValue !== null) { - if (!value) { - value = {}; - } - value[propertyName] = propertyValue; - } - } - } - return value; -} -function getPropertyFromParameterPath(parent, parameterPath) { - const result = { propertyFound: false }; - let i = 0; - for (; i < parameterPath.length; ++i) { - const parameterPathPart = parameterPath[i]; - // Make sure to check inherited properties too, so don't use hasOwnProperty(). - if (parent !== undefined && parent !== null && parameterPathPart in parent) { - parent = parent[parameterPathPart]; - } - else { - break; - } - } - if (i === parameterPath.length) { - result.propertyValue = parent; - result.propertyFound = true; - } - return result; +function delay(timeInMs, options) { + let token; + const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; + return createAbortablePromise((resolve) => { + token = setTimeout(resolve, timeInMs); + }, { + cleanupBeforeAbort: () => clearTimeout(token), + abortSignal, + abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, + }); } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). - * @param _response - Wrapper object for http response. - * @param responseSpec - Mappers for how to parse the response properties. - * @returns - A normalized response object. + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. */ -function flattenResponse(_response, responseSpec) { - const parsedHeaders = _response.parsedHeaders; - const bodyMapper = responseSpec && responseSpec.bodyMapper; - const addOperationResponse = (obj) => { - return Object.defineProperty(obj, "_response", { - value: _response, - }); - }; - if (bodyMapper) { - const typeName = bodyMapper.type.name; - if (typeName === "Stream") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); - } - const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; - const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); - if (typeName === "Sequence" || isPageableResponse) { - const arrayResponse = [...(_response.parsedBody || [])]; - for (const key of Object.keys(modelProperties)) { - if (modelProperties[key].serializedName) { - arrayResponse[key] = _response.parsedBody[key]; - } - } - if (parsedHeaders) { - for (const key of Object.keys(parsedHeaders)) { - arrayResponse[key] = parsedHeaders[key]; - } - } - addOperationResponse(arrayResponse); - return arrayResponse; - } - if (typeName === "Composite" || typeName === "Dictionary") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); - } - } - if (bodyMapper || - _response.request.method === "HEAD" || - isPrimitiveType(_response.parsedBody)) { - // primitive body types and HEAD booleans - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); +async function cancelablePromiseRace(abortablePromiseBuilders, options) { + var _a, _b; + const aborter = new abortController.AbortController(); + function abortHandler() { + aborter.abort(); } - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); -} -function getCredentialScopes(options, baseUri) { - if (options === null || options === void 0 ? void 0 : options.credentialScopes) { - return options.credentialScopes; + (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler); + try { + return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); } - if (baseUri) { - return `${baseUri}/.default`; + finally { + aborter.abort(); + (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler); } - return undefined; -} - -// Copyright (c) Microsoft Corporation. -/** - * This function is only here for compatibility. Use createSpanFunction in core-tracing. - * - * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. - * @hidden - - * @param spanConfig - The name of the operation being performed. - * @param tracingOptions - The options for the underlying http request. - */ -function createSpanFunction(args) { - return coreTracing.createSpanFunction(args); } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Defines the default token refresh buffer duration. - */ -const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes -/** - * Provides an {@link AccessTokenCache} implementation which clears - * the cached {@link AccessToken}'s after the expiresOnTimestamp has - * passed. - * - * @deprecated No longer used in the bearer authorization policy. + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. */ -class ExpiringAccessTokenCache { - /** - * Constructs an instance of {@link ExpiringAccessTokenCache} with - * an optional expiration buffer time. - */ - constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { - this.cachedToken = undefined; - this.tokenRefreshBufferMs = tokenRefreshBufferMs; - } - /** - * Saves an access token into the internal in-memory cache. - * @param accessToken - Access token or undefined to clear the cache. - */ - setCachedToken(accessToken) { - this.cachedToken = accessToken; - } - /** - * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. - */ - getCachedToken() { - if (this.cachedToken && - Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { - this.cachedToken = undefined; - } - return this.cachedToken; - } +function getRandomIntegerInclusive(min, max) { + // Make sure inputs are integers. + min = Math.ceil(min); + max = Math.floor(max); + // Pick a random offset from zero to the size of the range. + // Since Math.random() can never return 1, we have to make the range one larger + // in order to be inclusive of the maximum value after we take the floor. + const offset = Math.floor(Math.random() * (max - min + 1)); + return offset + min; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. - * - * @deprecated No longer used in the bearer authorization policy. + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. */ -class AccessTokenRefresher { - constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { - this.credential = credential; - this.scopes = scopes; - this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; - this.lastCalled = 0; - } - /** - * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying - * that we are ready for a new refresh. - */ - isReady() { - // We're only ready for a new refresh if the required milliseconds have passed. - return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); - } - /** - * Stores the time in which it is called, - * then requests a new token, - * then sets this.promise to undefined, - * then returns the token. - */ - async getToken(options) { - this.lastCalled = Date.now(); - const token = await this.credential.getToken(this.scopes, options); - this.promise = undefined; - return token || undefined; - } - /** - * Requests a new token if we're not currently waiting for a new token. - * Returns null if the required time between each call hasn't been reached. - */ - refresh(options) { - if (!this.promise) { - this.promise = this.getToken(options); - } - return this.promise; - } +function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); } // Copyright (c) Microsoft Corporation. -const HeaderConstants = Constants.HeaderConstants; -const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +// Licensed under the MIT license. /** - * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. */ -class BasicAuthenticationCredentials { - /** - * Creates a new BasicAuthenticationCredentials object. - * - * @param userName - User name. - * @param password - Password. - * @param authorizationScheme - The authorization scheme. - */ - constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { - /** - * Authorization scheme. Defaults to "Basic". - * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes - */ - this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; - if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { - throw new Error("userName cannot be null or undefined and must be of type string."); - } - if (password === null || password === undefined || typeof password.valueOf() !== "string") { - throw new Error("password cannot be null or undefined and must be of type string."); - } - this.userName = userName; - this.password = password; - this.authorizationScheme = authorizationScheme; - } - /** - * Signs a request with the Authentication header. - * - * @param webResource - The WebResourceLike to be signed. - * @returns The signed request object. - */ - signRequest(webResource) { - const credentials = `${this.userName}:${this.password}`; - const encodedCredentials = `${this.authorizationScheme} ${encodeString(credentials)}`; - if (!webResource.headers) - webResource.headers = new HttpHeaders(); - webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); - return Promise.resolve(webResource); +function isError(e) { + if (isObject(e)) { + const hasName = typeof e.name === "string"; + const hasMessage = typeof e.message === "string"; + return hasName && hasMessage; } + return false; } - -// Copyright (c) Microsoft Corporation. /** - * Authenticates to a service using an API key. + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input */ -class ApiKeyCredentials { - /** - * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. - */ - constructor(options) { - if (!options || (options && !options.inHeader && !options.inQuery)) { - throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); - } - this.inHeader = options.inHeader; - this.inQuery = options.inQuery; +function getErrorMessage(e) { + if (isError(e)) { + return e.message; } - /** - * Signs a request with the values provided in the inHeader and inQuery parameter. - * - * @param webResource - The WebResourceLike to be signed. - * @returns The signed request object. - */ - signRequest(webResource) { - if (!webResource) { - return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); - } - if (this.inHeader) { - if (!webResource.headers) { - webResource.headers = new HttpHeaders(); + else { + let stringified; + try { + if (typeof e === "object" && e) { + stringified = JSON.stringify(e); } - for (const headerName in this.inHeader) { - webResource.headers.set(headerName, this.inHeader[headerName]); + else { + stringified = String(e); } } - if (this.inQuery) { - if (!webResource.url) { - return Promise.reject(new Error(`url cannot be null in the request object.`)); - } - if (webResource.url.indexOf("?") < 0) { - webResource.url += "?"; - } - for (const key in this.inQuery) { - if (!webResource.url.endsWith("?")) { - webResource.url += "&"; - } - webResource.url += `${key}=${this.inQuery[key]}`; - } + catch (err) { + stringified = "[unable to stringify input]"; } - return Promise.resolve(webResource); + return `Unknown error ${stringified}`; } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * A {@link TopicCredentials} object used for Azure Event Grid. + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. */ -class TopicCredentials extends ApiKeyCredentials { - /** - * Creates a new EventGrid TopicCredentials object. - * - * @param topicKey - The EventGrid topic key - */ - constructor(topicKey) { - if (!topicKey || (topicKey && typeof topicKey !== "string")) { - throw new Error("topicKey cannot be null or undefined and must be of type string."); - } - const options = { - inHeader: { - "aeg-sas-key": topicKey, - }, - }; - super(options); - } +async function computeSha256Hmac(key, stringToSign, encoding) { + const decodedKey = Buffer.from(key, "base64"); + return crypto.createHmac("sha256", decodedKey).update(stringToSign).digest(encoding); } - -Object.defineProperty(exports, "delay", ({ - enumerable: true, - get: function () { return coreUtil.delay; } -})); -Object.defineProperty(exports, "isNode", ({ - enumerable: true, - get: function () { return coreUtil.isNode; } -})); -Object.defineProperty(exports, "isTokenCredential", ({ - enumerable: true, - get: function () { return coreAuth.isTokenCredential; } -})); -exports.AccessTokenRefresher = AccessTokenRefresher; -exports.ApiKeyCredentials = ApiKeyCredentials; -exports.BaseRequestPolicy = BaseRequestPolicy; -exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; -exports.Constants = Constants; -exports.DefaultHttpClient = NodeFetchHttpClient; -exports.ExpiringAccessTokenCache = ExpiringAccessTokenCache; -exports.HttpHeaders = HttpHeaders; -exports.MapperType = MapperType; -exports.RequestPolicyOptions = RequestPolicyOptions; -exports.RestError = RestError; -exports.Serializer = Serializer; -exports.ServiceClient = ServiceClient; -exports.TopicCredentials = TopicCredentials; -exports.URLBuilder = URLBuilder; -exports.URLQuery = URLQuery; -exports.WebResource = WebResource; -exports.XML_ATTRKEY = XML_ATTRKEY; -exports.XML_CHARKEY = XML_CHARKEY; -exports.applyMixins = applyMixins; -exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; -exports.createPipelineFromOptions = createPipelineFromOptions; -exports.createSpanFunction = createSpanFunction; -exports.deserializationPolicy = deserializationPolicy; -exports.deserializeResponseBody = deserializeResponseBody; -exports.disableResponseDecompressionPolicy = disableResponseDecompressionPolicy; -exports.encodeUri = encodeUri; -exports.executePromisesSequentially = executePromisesSequentially; -exports.exponentialRetryPolicy = exponentialRetryPolicy; -exports.flattenResponse = flattenResponse; -exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; -exports.generateUuid = generateUuid; -exports.getDefaultProxySettings = getDefaultProxySettings; -exports.getDefaultUserAgentValue = getDefaultUserAgentValue; -exports.isDuration = isDuration; -exports.isValidUuid = isValidUuid; -exports.keepAlivePolicy = keepAlivePolicy; -exports.logPolicy = logPolicy; -exports.operationOptionsToRequestOptionsBase = operationOptionsToRequestOptionsBase; -exports.parseXML = parseXML; -exports.promiseToCallback = promiseToCallback; -exports.promiseToServiceCallback = promiseToServiceCallback; -exports.proxyPolicy = proxyPolicy; -exports.redirectPolicy = redirectPolicy; -exports.serializeObject = serializeObject; -exports.signingPolicy = signingPolicy; -exports.stringifyXML = stringifyXML; -exports.stripRequest = stripRequest; -exports.stripResponse = stripResponse; -exports.systemErrorRetryPolicy = systemErrorRetryPolicy; -exports.throttlingRetryPolicy = throttlingRetryPolicy; -exports.tracingPolicy = tracingPolicy; -exports.userAgentPolicy = userAgentPolicy; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9067: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * A static-key-based credential that supports updating - * the underlying key value. + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. */ -class AzureKeyCredential { - /** - * Create an instance of an AzureKeyCredential for use - * with a service client. - * - * @param key - The initial value of the key to use in authentication - */ - constructor(key) { - if (!key) { - throw new Error("key must be a non-empty string"); - } - this._key = key; - } - /** - * The value of the key to be used in authentication - */ - get key() { - return this._key; - } - /** - * Change the value of the key. - * - * Updates will take effect upon the next request after - * updating the key value. - * - * @param newKey - The new key value to be used - */ - update(newKey) { - this._key = newKey; - } +async function computeSha256Hash(content, encoding) { + return crypto.createHash("sha256").update(content).digest(encoding); } // Copyright (c) Microsoft Corporation. @@ -51208,17 +48699,14 @@ class AzureKeyCredential { /** * Helper TypeGuard that checks if something is defined or not. * @param thing - Anything - * @internal */ function isDefined(thing) { return typeof thing !== "undefined" && thing !== null; } /** * Helper TypeGuard that checks if the input is an object with the specified properties. - * Note: The properties may be inherited. * @param thing - Anything. * @param properties - The name of the properties that should appear in the object. - * @internal */ function isObjectWithProperties(thing, properties) { if (!isDefined(thing) || typeof thing !== "object") { @@ -51233,1736 +48721,2191 @@ function isObjectWithProperties(thing, properties) { } /** * Helper TypeGuard that checks if the input is an object with the specified property. - * Note: The property may be inherited. * @param thing - Any object. * @param property - The name of the property that should appear in the object. - * @internal */ function objectHasProperty(thing, property) { - return typeof thing === "object" && property in thing; + return (isDefined(thing) && typeof thing === "object" && property in thing); } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* + * NOTE: When moving this file, please update "react-native" section in package.json. + */ /** - * A static name/key-based credential that supports updating - * the underlying name and key values. + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. */ -class AzureNamedKeyCredential { - /** - * Create an instance of an AzureNamedKeyCredential for use - * with a service client. - * - * @param name - The initial value of the name to use in authentication. - * @param key - The initial value of the key to use in authentication. - */ - constructor(name, key) { - if (!name || !key) { - throw new TypeError("name and key must be non-empty strings"); +function generateUUID() { + let uuid = ""; + for (let i = 0; i < 32; i++) { + // Generate a random number between 0 and 15 + const randomNumber = Math.floor(Math.random() * 16); + // Set the UUID version to 4 in the 13th position + if (i === 12) { + uuid += "4"; } - this._name = name; - this._key = key; - } - /** - * The value of the key to be used in authentication. - */ - get key() { - return this._key; - } - /** - * The value of the name to be used in authentication. - */ - get name() { - return this._name; - } - /** - * Change the value of the key. - * - * Updates will take effect upon the next request after - * updating the key value. - * - * @param newName - The new name value to be used. - * @param newKey - The new key value to be used. - */ - update(newName, newKey) { - if (!newName || !newKey) { - throw new TypeError("newName and newKey must be non-empty strings"); + else if (i === 16) { + // Set the UUID variant to "10" in the 17th position + uuid += (randomNumber & 0x3) | 0x8; + } + else { + // Add a random hexadecimal digit to the UUID string + uuid += randomNumber.toString(16); + } + // Add hyphens to the UUID string at the appropriate positions + if (i === 7 || i === 11 || i === 15 || i === 19) { + uuid += "-"; } - this._name = newName; - this._key = newKey; } + return uuid; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var _a$1; +// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. +let uuidFunction = typeof ((_a$1 = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a$1 === void 0 ? void 0 : _a$1.randomUUID) === "function" + ? globalThis.crypto.randomUUID.bind(globalThis.crypto) + : crypto.randomUUID; +// Not defined in earlier versions of Node.js 14 +if (!uuidFunction) { + uuidFunction = generateUUID; } /** - * Tests an object to determine whether it implements NamedKeyCredential. + * Generated Universally Unique Identifier * - * @param credential - The assumed NamedKeyCredential to be tested. + * @returns RFC4122 v4 UUID. */ -function isNamedKeyCredential(credential) { - return (isObjectWithProperties(credential, ["name", "key"]) && - typeof credential.key === "string" && - typeof credential.name === "string"); +function randomUUID() { + return uuidFunction(); } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var _a, _b, _c, _d; /** - * A static-signature-based credential that supports updating - * the underlying signature value. + * A constant that indicates whether the environment the code is running is a Web Browser. */ -class AzureSASCredential { - /** - * Create an instance of an AzureSASCredential for use - * with a service client. - * - * @param signature - The initial value of the shared access signature to use in authentication - */ - constructor(signature) { - if (!signature) { - throw new Error("shared access signature must be a non-empty string"); - } - this._signature = signature; - } - /** - * The value of the shared access signature to be used in authentication - */ - get signature() { - return this._signature; - } - /** - * Change the value of the signature. - * - * Updates will take effect upon the next request after - * updating the signature value. - * - * @param newSignature - The new shared access signature value to be used - */ - update(newSignature) { - if (!newSignature) { - throw new Error("shared access signature must be a non-empty string"); - } - this._signature = newSignature; - } -} +// eslint-disable-next-line @azure/azure-sdk/ts-no-window +const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; /** - * Tests an object to determine whether it implements SASCredential. - * - * @param credential - The assumed SASCredential to be tested. + * A constant that indicates whether the environment the code is running is a Web Worker. */ -function isSASCredential(credential) { - return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); -} +const isWebWorker = typeof self === "object" && + typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && + (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || + ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || + ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +const isDeno = typeof Deno !== "undefined" && + typeof Deno.version !== "undefined" && + typeof Deno.version.deno !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +const isNode = typeof process !== "undefined" && + Boolean(process.version) && + Boolean((_d = process.versions) === null || _d === void 0 ? void 0 : _d.node) && + // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions + !isDeno; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js +const isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Tests an object to determine whether it implements TokenCredential. - * - * @param credential - The assumed TokenCredential to be tested. + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string */ -function isTokenCredential(credential) { - // Check for an object with a 'getToken' function and possibly with - // a 'signRequest' function. We do this check to make sure that - // a ServiceClientCredentials implementor (like TokenClientCredentials - // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if - // it doesn't actually implement TokenCredential also. - const castCredential = credential; - return (castCredential && - typeof castCredential.getToken === "function" && - (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); -} +function uint8ArrayToString(bytes, format) { + return Buffer.from(bytes).toString(format); +} +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +function stringToUint8Array(value, format) { + return Buffer.from(value, format); +} -exports.AzureKeyCredential = AzureKeyCredential; -exports.AzureNamedKeyCredential = AzureNamedKeyCredential; -exports.AzureSASCredential = AzureSASCredential; -exports.isNamedKeyCredential = isNamedKeyCredential; -exports.isSASCredential = isSASCredential; -exports.isTokenCredential = isTokenCredential; +exports.cancelablePromiseRace = cancelablePromiseRace; +exports.computeSha256Hash = computeSha256Hash; +exports.computeSha256Hmac = computeSha256Hmac; +exports.createAbortablePromise = createAbortablePromise; +exports.delay = delay; +exports.getErrorMessage = getErrorMessage; +exports.getRandomIntegerInclusive = getRandomIntegerInclusive; +exports.isBrowser = isBrowser; +exports.isBun = isBun; +exports.isDefined = isDefined; +exports.isDeno = isDeno; +exports.isError = isError; +exports.isNode = isNode; +exports.isObject = isObject; +exports.isObjectWithProperties = isObjectWithProperties; +exports.isReactNative = isReactNative; +exports.isWebWorker = isWebWorker; +exports.objectHasProperty = objectHasProperty; +exports.randomUUID = randomUUID; +exports.stringToUint8Array = stringToUint8Array; +exports.uint8ArrayToString = uint8ArrayToString; //# sourceMappingURL=index.js.map /***/ }), -/***/ 31754: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 33608: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var api = __nccwpck_require__(65163); +const validator = __nccwpck_require__(11826); +const XMLParser = __nccwpck_require__(86873); +const XMLBuilder = __nccwpck_require__(17974); -// Copyright (c) Microsoft Corporation. -(function (SpanKind) { - /** Default value. Indicates that the span is used internally. */ - SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; - /** - * Indicates that the span covers server-side handling of an RPC or other - * remote request. - */ - SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; - /** - * Indicates that the span covers the client-side wrapper around an RPC or - * other remote request. - */ - SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; - /** - * Indicates that the span describes producer sending a message to a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; - /** - * Indicates that the span describes consumer receiving a message from a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; -})(exports.SpanKind || (exports.SpanKind = {})); -/** - * Return the span if one exists - * - * @param context - context to get span from - */ -function getSpan(context) { - return api.trace.getSpan(context); -} -/** - * Set the span on a context - * - * @param context - context to use as parent - * @param span - span to set active - */ -function setSpan(context, span) { - return api.trace.setSpan(context, span); -} -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context - context to set active span on - * @param spanContext - span context to be wrapped - */ -function setSpanContext(context, spanContext) { - return api.trace.setSpanContext(context, spanContext); -} -/** - * Get the span context of the span if it exists. - * - * @param context - context to get values from - */ -function getSpanContext(context) { - return api.trace.getSpanContext(context); -} -/** - * Returns true of the given {@link SpanContext} is valid. - * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. - * - * @param context - the {@link SpanContext} to validate. - * - * @returns true if the {@link SpanContext} is valid, false otherwise. - */ -function isSpanContextValid(context) { - return api.trace.isSpanContextValid(context); -} -function getTracer(name, version) { - return api.trace.getTracer(name || "azure/core-tracing", version); +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder } -/** Entrypoint for context API */ -const context = api.context; -(function (SpanStatusCode) { - /** - * The default status. - */ - SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; - /** - * The operation has been validated by an Application developer or - * Operator to have completed successfully. - */ - SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; - /** - * The operation contains an error. - */ - SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; -})(exports.SpanStatusCode || (exports.SpanStatusCode = {})); -// Copyright (c) Microsoft Corporation. -function isTracingDisabled() { - var _a; - if (typeof process === "undefined") { - // not supported in browser for now without polyfills - return false; - } - const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { - return false; +/***/ }), + +/***/ 81133: +/***/ ((module) => { + +function getIgnoreAttributesFn(ignoreAttributes) { + if (typeof ignoreAttributes === 'function') { + return ignoreAttributes } - return Boolean(azureTracingDisabledValue); -} -/** - * Creates a function that can be used to create spans using the global tracer. - * - * Usage: - * - * ```typescript - * // once - * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); - * - * // in each operation - * const span = createSpan("deleteConfigurationSetting", operationOptions); - * // code... - * span.end(); - * ``` - * - * @hidden - * @param args - allows configuration of the prefix for each span as well as the az.namespace field. - */ -function createSpanFunction(args) { - return function (operationName, operationOptions) { - const tracer = getTracer(); - const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; - const spanOptions = Object.assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); - const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; - let span; - if (isTracingDisabled()) { - span = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); - } - else { - span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); - } - if (args.namespace) { - span.setAttribute("az.namespace", args.namespace); - } - let newSpanOptions = tracingOptions.spanOptions || {}; - if (span.isRecording() && args.namespace) { - newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + if (Array.isArray(ignoreAttributes)) { + return (attrName) => { + for (const pattern of ignoreAttributes) { + if (typeof pattern === 'string' && attrName === pattern) { + return true + } + if (pattern instanceof RegExp && pattern.test(attrName)) { + return true + } + } } - const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); - const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); - return { - span, - updatedOptions: newOperationOptions - }; - }; + } + return () => false } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const VERSION = "00"; -/** - * Generates a `SpanContext` given a `traceparent` header value. - * @param traceParent - Serialized span context data as a `traceparent` header value. - * @returns The `SpanContext` generated from the `traceparent` value. - */ -function extractSpanContextFromTraceParentHeader(traceParentHeader) { - const parts = traceParentHeader.split("-"); - if (parts.length !== 4) { - return; - } - const [version, traceId, spanId, traceOptions] = parts; - if (version !== VERSION) { - return; +module.exports = getIgnoreAttributesFn + +/***/ }), + +/***/ 96760: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); } - const traceFlags = parseInt(traceOptions, 16); - const spanContext = { - spanId, - traceId, - traceFlags - }; - return spanContext; -} + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + /** - * Generates a `traceparent` value given a span context. - * @param spanContext - Contains context for a specific span. - * @returns The `spanContext` represented as a `traceparent` value. + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a */ -function getTraceParentHeader(spanContext) { - const missingFields = []; - if (!spanContext.traceId) { - missingFields.push("traceId"); - } - if (!spanContext.spanId) { - missingFields.push("spanId"); - } - if (missingFields.length) { - return; +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } } - const flags = spanContext.traceFlags || 0 /* NONE */; - const hexFlags = flags.toString(16); - const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; - // https://www.w3.org/TR/trace-context/#traceparent-header-field-values - return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; -} + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ -exports.context = context; -exports.createSpanFunction = createSpanFunction; -exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader; -exports.getSpan = getSpan; -exports.getSpanContext = getSpanContext; -exports.getTraceParentHeader = getTraceParentHeader; -exports.getTracer = getTracer; -exports.isSpanContextValid = isSpanContextValid; -exports.setSpan = setSpan; -exports.setSpanContext = setSpanContext; -//# sourceMappingURL=index.js.map +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; /***/ }), -/***/ 24918: +/***/ 11826: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); +const util = __nccwpck_require__(96760); -var abortController = __nccwpck_require__(52557); -var crypto = __nccwpck_require__(6113); +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -var _a$1; -/** - * A constant that indicates whether the environment the code is running is Node.JS. - */ -const isNode = typeof process !== "undefined" && Boolean(process.version) && Boolean((_a$1 = process.versions) === null || _a$1 === void 0 ? void 0 : _a$1.node); +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); -// Copyright (c) Microsoft Corporation. -/** - * Creates an abortable promise. - * @param buildPromise - A function that takes the resolve and reject functions as parameters. - * @param options - The options for the abortable promise. - * @returns A promise that can be aborted. - */ -function createAbortablePromise(buildPromise, options) { - const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; - return new Promise((resolve, reject) => { - function rejectOnAbort() { - reject(new abortController.AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; } - function removeListeners() { - abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; } - function onAbort() { - cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); - removeListeners(); - rejectOnAbort(); + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; } - if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { - return rejectOnAbort(); + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); } - try { - buildPromise((x) => { - removeListeners(); - resolve(x); - }, (x) => { - removeListeners(); - reject(x); - }); + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); } - catch (err) { - reject(err); + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; } - abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); - }); -} -// Copyright (c) Microsoft Corporation. -const StandardAbortMessage = "The delay was aborted."; -/** - * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. - * @param timeInMs - The number of milliseconds to be delayed. - * @param options - The options for delay - currently abort options - * @returns Promise that is resolved after timeInMs - */ -function delay(timeInMs, options) { - let token; - const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; - return createAbortablePromise((resolve) => { - token = setTimeout(resolve, timeInMs); - }, { - cleanupBeforeAbort: () => clearTimeout(token), - abortSignal, - abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, - }); -} + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} /** - * Returns a random integer value between a lower and upper bound, - * inclusive of both bounds. - * Note that this uses Math.random and isn't secure. If you need to use - * this for any kind of security purpose, find a better source of random. - * @param min - The smallest integer value allowed. - * @param max - The largest integer value allowed. + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i */ -function getRandomIntegerInclusive(min, max) { - // Make sure inputs are integers. - min = Math.ceil(min); - max = Math.floor(max); - // Pick a random offset from zero to the size of the range. - // Since Math.random() can never return 1, we have to make the range one larger - // in order to be inclusive of the maximum value after we take the floor. - const offset = Math.floor(Math.random() * (max - min + 1)); - return offset + min; +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helper to determine when an input is a generic JS object. - * @returns true when input is an object type that is not null, Array, RegExp, or Date. - */ -function isObject(input) { - return (typeof input === "object" && - input !== null && - !Array.isArray(input) && - !(input instanceof RegExp) && - !(input instanceof Date)); +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; } -// Copyright (c) Microsoft Corporation. +const doubleQuote = '"'; +const singleQuote = "'"; + /** - * Typeguard for an error object shape (has name and message) - * @param e - Something caught by a catch clause. + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i */ -function isError(e) { - if (isObject(e)) { - const hasName = typeof e.name === "string"; - const hasMessage = typeof e.message === "string"; - return hasName && hasMessage; +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } } + attrStr += xmlData[i]; + } + if (startChar !== '') { return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; } + /** - * Given what is thought to be an error object, return the message if possible. - * If the message is missing, returns a stringified version of the input. - * @param e - Something thrown from a try block - * @returns The error message or a string of the input + * Select all the attributes whether valid or invalid. */ -function getErrorMessage(e) { - if (isError(e)) { - return e.message; +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); } - else { - let stringified; - try { - if (typeof e === "object" && e) { - stringified = JSON.stringify(e); - } - else { - stringified = String(e); - } - } - catch (err) { - stringified = "[unable to stringify input]"; - } - return `Unknown error ${stringified}`; + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; } -// Copyright (c) Microsoft Corporation. -/** - * Generates a SHA-256 HMAC signature. - * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. - * @param stringToSign - The data to be signed. - * @param encoding - The textual encoding to use for the returned HMAC digest. - */ -async function computeSha256Hmac(key, stringToSign, encoding) { - const decodedKey = Buffer.from(key, "base64"); - return crypto.createHmac("sha256", decodedKey).update(stringToSign).digest(encoding); +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; } -/** - * Generates a SHA-256 hash. - * @param content - The data to be included in the hash. - * @param encoding - The textual encoding to use for the returned hash. - */ -async function computeSha256Hash(content, encoding) { - return crypto.createHash("sha256").update(content).digest(encoding); + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helper TypeGuard that checks if something is defined or not. - * @param thing - Anything - */ -function isDefined(thing) { - return typeof thing !== "undefined" && thing !== null; +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; } -/** - * Helper TypeGuard that checks if the input is an object with the specified properties. - * @param thing - Anything. - * @param properties - The name of the properties that should appear in the object. - */ -function isObjectWithProperties(thing, properties) { - if (!isDefined(thing) || typeof thing !== "object") { - return false; - } - for (const property of properties) { - if (!objectHasProperty(thing, property)) { - return false; - } - } - return true; + +function validateAttrName(attrName) { + return util.isName(attrName); } -/** - * Helper TypeGuard that checks if the input is an object with the specified property. - * @param thing - Any object. - * @param property - The name of the property that should appear in the object. - */ -function objectHasProperty(thing, property) { - return (isDefined(thing) && typeof thing === "object" && property in thing); + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/* - * NOTE: When moving this file, please update "react-native" section in package.json. - */ -/** - * Generated Universally Unique Identifier - * - * @returns RFC4122 v4 UUID. - */ -function generateUUID() { - let uuid = ""; - for (let i = 0; i < 32; i++) { - // Generate a random number between 0 and 15 - const randomNumber = Math.floor(Math.random() * 16); - // Set the UUID version to 4 in the 13th position - if (i === 12) { - uuid += "4"; - } - else if (i === 16) { - // Set the UUID variant to "10" in the 17th position - uuid += (randomNumber & 0x3) | 0x8; - } - else { - // Add a random hexadecimal digit to the UUID string - uuid += randomNumber.toString(16); - } - // Add hyphens to the UUID string at the appropriate positions - if (i === 7 || i === 11 || i === 15 || i === 19) { - uuid += "-"; - } - } - return uuid; +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -var _a; -// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. -let uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" - ? globalThis.crypto.randomUUID.bind(globalThis.crypto) - : crypto.randomUUID; -// Not defined in earlier versions of Node.js 14 -if (!uuidFunction) { - uuidFunction = generateUUID; +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; } -/** - * Generated Universally Unique Identifier - * - * @returns RFC4122 v4 UUID. - */ -function randomUUID() { - return uuidFunction(); + + +/***/ }), + +/***/ 17974: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(17945); +const getIgnoreAttributesFn = __nccwpck_require__(81133) + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes === true || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } } -exports.computeSha256Hash = computeSha256Hash; -exports.computeSha256Hmac = computeSha256Hmac; -exports.createAbortablePromise = createAbortablePromise; -exports.delay = delay; -exports.getErrorMessage = getErrorMessage; -exports.getRandomIntegerInclusive = getRandomIntegerInclusive; -exports.isDefined = isDefined; -exports.isError = isError; -exports.isNode = isNode; -exports.isObject = isObject; -exports.isObjectWithProperties = isObjectWithProperties; -exports.objectHasProperty = objectHasProperty; -exports.randomUUID = randomUUID; -//# sourceMappingURL=index.js.map +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0, []).val; + } +}; +Builder.prototype.j2x = function(jObj, level, ajPath) { + let attrStr = ''; + let val = ''; + const jPath = ajPath.join('.') + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr && !this.ignoreAttributesFn(attr, jPath)) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + } else if (!attr) { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1, ajPath.concat(key)); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level, ajPath) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level, ajPath) + } + } + } + return {attrStr: attrStr, val: val}; +}; -/***/ }), +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} -/***/ 82107: -/***/ ((module) => { - -/****************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, Symbol, Reflect, Promise, SuppressedError */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __esDecorate; -var __runInitializers; -var __propKey; -var __setFunctionName; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __classPrivateFieldIn; -var __createBinding; -var __addDisposableResource; -var __disposeResources; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { - function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } - var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; - var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; - var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); - var _, done = false; - for (var i = decorators.length - 1; i >= 0; i--) { - var context = {}; - for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context.access[p] = contextIn.access[p]; - context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); - if (kind === "accessor") { - if (result === void 0) continue; - if (result === null || typeof result !== "object") throw new TypeError("Object expected"); - if (_ = accept(result.get)) descriptor.get = _; - if (_ = accept(result.set)) descriptor.set = _; - if (_ = accept(result.init)) initializers.unshift(_); - } - else if (_ = accept(result)) { - if (kind === "field") initializers.unshift(_); - else descriptor[key] = _; - } - } - if (target) Object.defineProperty(target, contextIn.name, descriptor); - done = true; - }; - - __runInitializers = function (thisArg, initializers, value) { - var useValue = arguments.length > 2; - for (var i = 0; i < initializers.length; i++) { - value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); - } - return useValue ? value : void 0; - }; - - __propKey = function (x) { - return typeof x === "symbol" ? x : "".concat(x); - }; - - __setFunctionName = function (f, name, prefix) { - if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; - return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - __classPrivateFieldIn = function (state, receiver) { - if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); - return typeof state === "function" ? receiver === state : state.has(receiver); - }; - - __addDisposableResource = function (env, value, async) { - if (value !== null && value !== void 0) { - if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); - var dispose; - if (async) { - if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); - dispose = value[Symbol.asyncDispose]; - } - if (dispose === void 0) { - if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); - dispose = value[Symbol.dispose]; - } - if (typeof dispose !== "function") throw new TypeError("Object not disposable."); - env.stack.push({ value: value, dispose: dispose, async: async }); - } - else if (async) { - env.stack.push({ async: true }); - } - return value; - }; - - var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { - var e = new Error(message); - return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; - }; - - __disposeResources = function (env) { - function fail(e) { - env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; - env.hasError = true; - } - function next() { - while (env.stack.length) { - var rec = env.stack.pop(); - try { - var result = rec.dispose && rec.dispose.call(rec.value); - if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); - } - catch (e) { - fail(e); - } - } - if (env.hasError) throw env.error; - } - return next(); - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__esDecorate", __esDecorate); - exporter("__runInitializers", __runInitializers); - exporter("__propKey", __propKey); - exporter("__setFunctionName", __setFunctionName); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); - exporter("__classPrivateFieldIn", __classPrivateFieldIn); - exporter("__addDisposableResource", __addDisposableResource); - exporter("__disposeResources", __disposeResources); -}); - - -/***/ }), - -/***/ 43415: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +function processTextOrObjNode (object, key, level, ajPath) { + const result = this.j2x(object, level + 1, ajPath.concat(key)); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } } -})); - -var _v = _interopRequireDefault(__nccwpck_require__(14757)); - -var _v2 = _interopRequireDefault(__nccwpck_require__(19982)); +} -var _v3 = _interopRequireDefault(__nccwpck_require__(85393)); +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { +/***/ 17945: +/***/ ((module) => { -"use strict"; +const EOL = "\n"; +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } - return _crypto.default.createHash('md5').update(bytes).digest(); + return xmlStr; } -var _default = md5; -exports["default"] = _default; - -/***/ }), +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} -/***/ 657: -/***/ ((__unused_webpack_module, exports) => { +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} -"use strict"; +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; /***/ }), -/***/ 67079: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - +/***/ 86888: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +const util = __nccwpck_require__(96760); -var _validate = _interopRequireDefault(__nccwpck_require__(47724)); +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } +function readEntityExp(xmlData,i){ + //External entities are not supported + // - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + //Parameter entities are not supported + // - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); } -var _default = parse; -exports["default"] = _default; +module.exports = readDocType; + /***/ }), -/***/ 90690: +/***/ 75468: /***/ ((__unused_webpack_module, exports) => { -"use strict"; +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; /***/ }), -/***/ 10979: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 59807: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +///@ts-check -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate +const util = __nccwpck_require__(96760); +const xmlNode = __nccwpck_require__(80116); +const readDocType = __nccwpck_require__(86888); +const toNumber = __nccwpck_require__(14526); +const getIgnoreAttributesFn = __nccwpck_require__(81133) -let poolPtr = rnds8Pool.length; +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); - poolPtr = 0; +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) } - return rnds8Pool.slice(poolPtr, poolPtr += 16); } -/***/ }), - -/***/ 36631: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } } - - return _crypto.default.createHash('sha1').update(bytes).digest(); } -var _default = sha1; -exports["default"] = _default; - -/***/ }), - -/***/ 74794: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(47724)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - /** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities */ -const byteToHex = []; - -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); -} - -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields - - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } } - - return uuid; } -var _default = stringify; -exports["default"] = _default; - -/***/ }), - -/***/ 14757: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(10979)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(74794)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; - -let _clockseq; // Previous uuid creation time - - -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); - - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; } - - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + if (tags.length === 2) { + tagname = prefix + tags[1]; } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - - - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested - - - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); } + return tagname; +} - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` - - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` - - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); - b[i++] = clockseq & 0xff; // `node` +function buildAttributesMap(attrStr, jPath, tagName) { + if (this.options.ignoreAttributes !== true && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + if (this.ignoreAttributesFn(attrName, jPath)) { + continue + } + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs } - - return buf || (0, _stringify.default)(b); } -var _default = v1; -exports["default"] = _default; +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); -/***/ }), + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } -/***/ 19982: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } -"use strict"; + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { -var _v = _interopRequireDefault(__nccwpck_require__(44085)); + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); -var _md = _interopRequireDefault(__nccwpck_require__(64153)); + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; + } -/***/ }), -/***/ 44085: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); -"use strict"; + textData = this.saveTextToParentTag(textData, currentNode, jPath); + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; + textData = this.saveTextToParentTag(textData, currentNode, jPath); -var _stringify = _interopRequireDefault(__nccwpck_require__(74794)); + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; -var _parse = _interopRequireDefault(__nccwpck_require__(67079)); + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } - const bytes = []; + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } } + return xmlObj.child; +} - return bytes; +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } } -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; +const replaceEntitiesValue = function(val){ -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); } - - return buf; } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } } -/***/ }), +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} -/***/ 85393: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } -"use strict"; + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') -var _rng = _interopRequireDefault(__nccwpck_require__(10979)); + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} -var _stringify = _interopRequireDefault(__nccwpck_require__(74794)); +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -function v4(options, buf, offset) { - options = options || {}; +module.exports = OrderedObjParser; - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` +/***/ }), - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided +/***/ 86873: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (buf) { - offset = offset || 0; +const { buildOptions} = __nccwpck_require__(75468); +const OrderedObjParser = __nccwpck_require__(59807); +const { prettify} = __nccwpck_require__(17365); +const validator = __nccwpck_require__(11826); - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); } - return buf; - } - - return (0, _stringify.default)(rnds); + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } } -var _default = v4; -exports["default"] = _default; +module.exports = XMLParser; /***/ }), -/***/ 48788: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 17365: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(44085)); - -var _sha = _interopRequireDefault(__nccwpck_require__(36631)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; -/***/ }), + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); -/***/ 47724: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } -"use strict"; + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} -var _regex = _interopRequireDefault(__nccwpck_require__(90690)); +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); + return false; } +exports.prettify = prettify; -var _default = validate; -exports["default"] = _default; /***/ }), -/***/ 78947: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 80116: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(47724)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; - return parseInt(uuid.substr(14, 1), 16); -} -var _default = version; -exports["default"] = _default; +module.exports = XmlNode; /***/ }), -/***/ 27094: +/***/ 82076: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -52970,17744 +50913,16875 @@ exports["default"] = _default; Object.defineProperty(exports, "__esModule", ({ value: true })); -var logger$1 = __nccwpck_require__(3233); +var crypto = __nccwpck_require__(6113); +var logger$5 = __nccwpck_require__(3233); +var uuid$3 = __nccwpck_require__(50378); +var tslib = __nccwpck_require__(4351); +var stableStringify = __nccwpck_require__(30969); +var PriorityQueue = __nccwpck_require__(77986); +var semaphore = __nccwpck_require__(33165); +var coreRestPipeline = __nccwpck_require__(88121); +var nodeAbortController = __nccwpck_require__(85220); +var universalUserAgent = __nccwpck_require__(45030); +var JSBI = __nccwpck_require__(51778); var abortController = __nccwpck_require__(52557); -var coreUtil = __nccwpck_require__(9354); -// Copyright (c) Microsoft Corporation. -/** - * The `@azure/logger` configuration for this package. - * @internal - */ -const logger = logger$1.createClientLogger("core-lro"); +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + +var stableStringify__default = /*#__PURE__*/_interopDefaultLegacy(stableStringify); +var PriorityQueue__default = /*#__PURE__*/_interopDefaultLegacy(PriorityQueue); +var semaphore__default = /*#__PURE__*/_interopDefaultLegacy(semaphore); +var JSBI__default = /*#__PURE__*/_interopDefaultLegacy(JSBI); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -/** - * The default time interval to wait before sending the next polling request. - */ -const POLL_INTERVAL_IN_MS = 2000; -/** - * The closed set of terminal states. - */ -const terminalStates = ["succeeded", "canceled", "failed"]; +const DEFAULT_PARTITION_KEY_PATH = "/_partitionKey"; // eslint-disable-line @typescript-eslint/prefer-as-const // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Deserializes the state - */ -function deserializeState(serializedState) { - try { - return JSON.parse(serializedState).state; - } - catch (e) { - throw new Error(`Unable to deserialize input state: ${serializedState}`); - } -} -function setStateError(inputs) { - const { state, stateProxy, isOperationError } = inputs; - return (error) => { - if (isOperationError(error)) { - stateProxy.setError(state, error); - stateProxy.setFailed(state); - } - throw error; - }; -} -function appendReadableErrorMessage(currentMessage, innerMessage) { - let message = currentMessage; - if (message.slice(-1) !== ".") { - message = message + "."; - } - return message + " " + innerMessage; -} -function simplifyError(err) { - let message = err.message; - let code = err.code; - let curErr = err; - while (curErr.innererror) { - curErr = curErr.innererror; - code = curErr.code; - message = appendReadableErrorMessage(message, curErr.message); - } - return { - code, - message, - }; -} -function processOperationStatus(result) { - const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; - switch (status) { - case "succeeded": { - stateProxy.setSucceeded(state); - break; - } - case "failed": { - const err = getError === null || getError === void 0 ? void 0 : getError(response); - let postfix = ""; - if (err) { - const { code, message } = simplifyError(err); - postfix = `. ${code}. ${message}`; - } - const errStr = `The long-running operation has failed${postfix}`; - stateProxy.setError(state, new Error(errStr)); - stateProxy.setFailed(state); - logger.warning(errStr); - break; - } - case "canceled": { - stateProxy.setCanceled(state); - break; - } - } - if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || - (isDone === undefined && - ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { - stateProxy.setResult(state, buildResult({ - response, - state, - processResult, - })); - } -} -function buildResult(inputs) { - const { processResult, response, state } = inputs; - return processResult ? processResult(response, state) : response; -} -/** - * Initiates the long-running operation. + * @hidden */ -async function initOperation(inputs) { - const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; - const { operationLocation, resourceLocation, metadata, response } = await init(); - if (operationLocation) - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); - const config = { - metadata, - operationLocation, - resourceLocation, - }; - logger.verbose(`LRO: Operation description:`, config); - const state = stateProxy.initState(config); - const status = getOperationStatus({ response, state, operationLocation }); - processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); - return state; +const Constants$1 = { + HttpHeaders: { + Authorization: "authorization", + ETag: "etag", + MethodOverride: "X-HTTP-Method", + Slug: "Slug", + ContentType: "Content-Type", + LastModified: "Last-Modified", + ContentEncoding: "Content-Encoding", + CharacterSet: "CharacterSet", + UserAgent: "User-Agent", + IfModifiedSince: "If-Modified-Since", + IfMatch: "If-Match", + IfNoneMatch: "If-None-Match", + ContentLength: "Content-Length", + AcceptEncoding: "Accept-Encoding", + KeepAlive: "Keep-Alive", + CacheControl: "Cache-Control", + TransferEncoding: "Transfer-Encoding", + ContentLanguage: "Content-Language", + ContentLocation: "Content-Location", + ContentMd5: "Content-Md5", + ContentRange: "Content-Range", + Accept: "Accept", + AcceptCharset: "Accept-Charset", + AcceptLanguage: "Accept-Language", + IfRange: "If-Range", + IfUnmodifiedSince: "If-Unmodified-Since", + MaxForwards: "Max-Forwards", + ProxyAuthorization: "Proxy-Authorization", + AcceptRanges: "Accept-Ranges", + ProxyAuthenticate: "Proxy-Authenticate", + RetryAfter: "Retry-After", + SetCookie: "Set-Cookie", + WwwAuthenticate: "Www-Authenticate", + Origin: "Origin", + Host: "Host", + AccessControlAllowOrigin: "Access-Control-Allow-Origin", + AccessControlAllowHeaders: "Access-Control-Allow-Headers", + KeyValueEncodingFormat: "application/x-www-form-urlencoded", + WrapAssertionFormat: "wrap_assertion_format", + WrapAssertion: "wrap_assertion", + WrapScope: "wrap_scope", + SimpleToken: "SWT", + HttpDate: "date", + Prefer: "Prefer", + Location: "Location", + Referer: "referer", + A_IM: "A-IM", + // Query + Query: "x-ms-documentdb-query", + IsQuery: "x-ms-documentdb-isquery", + IsQueryPlan: "x-ms-cosmos-is-query-plan-request", + SupportedQueryFeatures: "x-ms-cosmos-supported-query-features", + QueryVersion: "x-ms-cosmos-query-version", + // Our custom Azure Cosmos DB headers + Continuation: "x-ms-continuation", + ContinuationToken: "x-ms-continuation-token", + PageSize: "x-ms-max-item-count", + ItemCount: "x-ms-item-count", + // Request sender generated. Simply echoed by backend. + ActivityId: "x-ms-activity-id", + PreTriggerInclude: "x-ms-documentdb-pre-trigger-include", + PreTriggerExclude: "x-ms-documentdb-pre-trigger-exclude", + PostTriggerInclude: "x-ms-documentdb-post-trigger-include", + PostTriggerExclude: "x-ms-documentdb-post-trigger-exclude", + IndexingDirective: "x-ms-indexing-directive", + SessionToken: "x-ms-session-token", + ConsistencyLevel: "x-ms-consistency-level", + XDate: "x-ms-date", + CollectionPartitionInfo: "x-ms-collection-partition-info", + CollectionServiceInfo: "x-ms-collection-service-info", + // Deprecated, use RetryAfterInMs instead. + RetryAfterInMilliseconds: "x-ms-retry-after-ms", + RetryAfterInMs: "x-ms-retry-after-ms", + IsFeedUnfiltered: "x-ms-is-feed-unfiltered", + ResourceTokenExpiry: "x-ms-documentdb-expiry-seconds", + EnableScanInQuery: "x-ms-documentdb-query-enable-scan", + EmitVerboseTracesInQuery: "x-ms-documentdb-query-emit-traces", + EnableCrossPartitionQuery: "x-ms-documentdb-query-enablecrosspartition", + ParallelizeCrossPartitionQuery: "x-ms-documentdb-query-parallelizecrosspartitionquery", + ResponseContinuationTokenLimitInKB: "x-ms-documentdb-responsecontinuationtokenlimitinkb", + // QueryMetrics + // Request header to tell backend to give you query metrics. + PopulateQueryMetrics: "x-ms-documentdb-populatequerymetrics", + // Response header that holds the serialized version of query metrics. + QueryMetrics: "x-ms-documentdb-query-metrics", + // IndexMetrics + // Request header to tell backend to give you index metrics. + PopulateIndexMetrics: "x-ms-cosmos-populateindexmetrics", + // Response header that holds the serialized version of index metrics. + IndexUtilization: "x-ms-cosmos-index-utilization", + // Version headers and values + Version: "x-ms-version", + // Owner name + OwnerFullName: "x-ms-alt-content-path", + // Owner ID used for name based request in session token. + OwnerId: "x-ms-content-path", + // Partition Key + PartitionKey: "x-ms-documentdb-partitionkey", + PartitionKeyRangeID: "x-ms-documentdb-partitionkeyrangeid", + // Epk Range headers + StartEpk: "x-ms-start-epk", + EndEpk: "x-ms-end-epk", + // Read Feed Type + ReadFeedKeyType: "x-ms-read-key-type", + // Quota Info + MaxEntityCount: "x-ms-root-entity-max-count", + CurrentEntityCount: "x-ms-root-entity-current-count", + CollectionQuotaInMb: "x-ms-collection-quota-mb", + CollectionCurrentUsageInMb: "x-ms-collection-usage-mb", + MaxMediaStorageUsageInMB: "x-ms-max-media-storage-usage-mb", + CurrentMediaStorageUsageInMB: "x-ms-media-storage-usage-mb", + RequestCharge: "x-ms-request-charge", + PopulateQuotaInfo: "x-ms-documentdb-populatequotainfo", + MaxResourceQuota: "x-ms-resource-quota", + // Offer header + OfferType: "x-ms-offer-type", + OfferThroughput: "x-ms-offer-throughput", + AutoscaleSettings: "x-ms-cosmos-offer-autopilot-settings", + // Custom RUs/minute headers + DisableRUPerMinuteUsage: "x-ms-documentdb-disable-ru-per-minute-usage", + IsRUPerMinuteUsed: "x-ms-documentdb-is-ru-per-minute-used", + OfferIsRUPerMinuteThroughputEnabled: "x-ms-offer-is-ru-per-minute-throughput-enabled", + // Index progress headers + IndexTransformationProgress: "x-ms-documentdb-collection-index-transformation-progress", + LazyIndexingProgress: "x-ms-documentdb-collection-lazy-indexing-progress", + // Upsert header + IsUpsert: "x-ms-documentdb-is-upsert", + // Sub status of the error + SubStatus: "x-ms-substatus", + // StoredProcedure related headers + EnableScriptLogging: "x-ms-documentdb-script-enable-logging", + ScriptLogResults: "x-ms-documentdb-script-log-results", + // Multi-Region Write + ALLOW_MULTIPLE_WRITES: "x-ms-cosmos-allow-tentative-writes", + // Bulk/Batch header + IsBatchRequest: "x-ms-cosmos-is-batch-request", + IsBatchAtomic: "x-ms-cosmos-batch-atomic", + BatchContinueOnError: "x-ms-cosmos-batch-continue-on-error", + // Dedicated Gateway Headers + DedicatedGatewayPerRequestCacheStaleness: "x-ms-dedicatedgateway-max-age", + // Cache Refresh header + ForceRefresh: "x-ms-force-refresh", + // Priority Based throttling header + PriorityLevel: "x-ms-cosmos-priority-level", + }, + // GlobalDB related constants + WritableLocations: "writableLocations", + ReadableLocations: "readableLocations", + LocationUnavailableExpirationTimeInMs: 5 * 60 * 1000, + // ServiceDocument Resource + ENABLE_MULTIPLE_WRITABLE_LOCATIONS: "enableMultipleWriteLocations", + // Background refresh time + DefaultUnavailableLocationExpirationTimeMS: 5 * 60 * 1000, + // Client generated retry count response header + ThrottleRetryCount: "x-ms-throttle-retry-count", + ThrottleRetryWaitTimeInMs: "x-ms-throttle-retry-wait-time-ms", + // Platform + CurrentVersion: "2020-07-15", + AzureNamespace: "Azure.Cosmos", + AzurePackageName: "@azure/cosmos", + SDKName: "azure-cosmos-js", + SDKVersion: "4.0.0", + // Diagnostics + CosmosDbDiagnosticLevelEnvVarName: "AZURE_COSMOSDB_DIAGNOSTICS_LEVEL", + // Bulk Operations + DefaultMaxBulkRequestBodySizeInBytes: 220201, + Quota: { + CollectionSize: "collectionSize", + }, + Path: { + Root: "/", + DatabasesPathSegment: "dbs", + CollectionsPathSegment: "colls", + UsersPathSegment: "users", + DocumentsPathSegment: "docs", + PermissionsPathSegment: "permissions", + StoredProceduresPathSegment: "sprocs", + TriggersPathSegment: "triggers", + UserDefinedFunctionsPathSegment: "udfs", + ConflictsPathSegment: "conflicts", + AttachmentsPathSegment: "attachments", + PartitionKeyRangesPathSegment: "pkranges", + SchemasPathSegment: "schemas", + OffersPathSegment: "offers", + TopologyPathSegment: "topology", + DatabaseAccountPathSegment: "databaseaccount", + }, + PartitionKeyRange: { + // Partition Key Range Constants + MinInclusive: "minInclusive", + MaxExclusive: "maxExclusive", + Id: "id", + }, + QueryRangeConstants: { + // Partition Key Range Constants + MinInclusive: "minInclusive", + MaxExclusive: "maxExclusive", + min: "min", + }, + /** + * @deprecated Use EffectivePartitionKeyConstants instead + */ + EffectiveParitionKeyConstants: { + MinimumInclusiveEffectivePartitionKey: "", + MaximumExclusiveEffectivePartitionKey: "FF", + }, + EffectivePartitionKeyConstants: { + MinimumInclusiveEffectivePartitionKey: "", + MaximumExclusiveEffectivePartitionKey: "FF", + }, +}; +/** + * @hidden + */ +exports.ResourceType = void 0; +(function (ResourceType) { + ResourceType["none"] = ""; + ResourceType["database"] = "dbs"; + ResourceType["offer"] = "offers"; + ResourceType["user"] = "users"; + ResourceType["permission"] = "permissions"; + ResourceType["container"] = "colls"; + ResourceType["conflicts"] = "conflicts"; + ResourceType["sproc"] = "sprocs"; + ResourceType["udf"] = "udfs"; + ResourceType["trigger"] = "triggers"; + ResourceType["item"] = "docs"; + ResourceType["pkranges"] = "pkranges"; + ResourceType["partitionkey"] = "partitionKey"; +})(exports.ResourceType || (exports.ResourceType = {})); +/** + * @hidden + */ +exports.HTTPMethod = void 0; +(function (HTTPMethod) { + HTTPMethod["get"] = "GET"; + HTTPMethod["patch"] = "PATCH"; + HTTPMethod["post"] = "POST"; + HTTPMethod["put"] = "PUT"; + HTTPMethod["delete"] = "DELETE"; +})(exports.HTTPMethod || (exports.HTTPMethod = {})); +/** + * @hidden + */ +exports.OperationType = void 0; +(function (OperationType) { + OperationType["Create"] = "create"; + OperationType["Replace"] = "replace"; + OperationType["Upsert"] = "upsert"; + OperationType["Delete"] = "delete"; + OperationType["Read"] = "read"; + OperationType["Query"] = "query"; + OperationType["Execute"] = "execute"; + OperationType["Batch"] = "batch"; + OperationType["Patch"] = "patch"; +})(exports.OperationType || (exports.OperationType = {})); +/** + * @hidden + */ +var CosmosKeyType; +(function (CosmosKeyType) { + CosmosKeyType["PrimaryMaster"] = "PRIMARY_MASTER"; + CosmosKeyType["SecondaryMaster"] = "SECONDARY_MASTER"; + CosmosKeyType["PrimaryReadOnly"] = "PRIMARY_READONLY"; + CosmosKeyType["SecondaryReadOnly"] = "SECONDARY_READONLY"; +})(CosmosKeyType || (CosmosKeyType = {})); +/** + * @hidden + */ +var CosmosContainerChildResourceKind; +(function (CosmosContainerChildResourceKind) { + CosmosContainerChildResourceKind["Item"] = "ITEM"; + CosmosContainerChildResourceKind["StoredProcedure"] = "STORED_PROCEDURE"; + CosmosContainerChildResourceKind["UserDefinedFunction"] = "USER_DEFINED_FUNCTION"; + CosmosContainerChildResourceKind["Trigger"] = "TRIGGER"; +})(CosmosContainerChildResourceKind || (CosmosContainerChildResourceKind = {})); +/** + * @hidden + */ +var PermissionScopeValues; +(function (PermissionScopeValues) { + /** + * Values which set permission Scope applicable to control plane related operations. + */ + PermissionScopeValues[PermissionScopeValues["ScopeAccountReadValue"] = 1] = "ScopeAccountReadValue"; + PermissionScopeValues[PermissionScopeValues["ScopeAccountListDatabasesValue"] = 2] = "ScopeAccountListDatabasesValue"; + PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReadValue"] = 4] = "ScopeDatabaseReadValue"; + PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReadOfferValue"] = 8] = "ScopeDatabaseReadOfferValue"; + PermissionScopeValues[PermissionScopeValues["ScopeDatabaseListContainerValue"] = 16] = "ScopeDatabaseListContainerValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReadValue"] = 32] = "ScopeContainerReadValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReadOfferValue"] = 64] = "ScopeContainerReadOfferValue"; + PermissionScopeValues[PermissionScopeValues["ScopeAccountCreateDatabasesValue"] = 1] = "ScopeAccountCreateDatabasesValue"; + PermissionScopeValues[PermissionScopeValues["ScopeAccountDeleteDatabasesValue"] = 2] = "ScopeAccountDeleteDatabasesValue"; + PermissionScopeValues[PermissionScopeValues["ScopeDatabaseDeleteValue"] = 4] = "ScopeDatabaseDeleteValue"; + PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReplaceOfferValue"] = 8] = "ScopeDatabaseReplaceOfferValue"; + PermissionScopeValues[PermissionScopeValues["ScopeDatabaseCreateContainerValue"] = 16] = "ScopeDatabaseCreateContainerValue"; + PermissionScopeValues[PermissionScopeValues["ScopeDatabaseDeleteContainerValue"] = 32] = "ScopeDatabaseDeleteContainerValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceValue"] = 64] = "ScopeContainerReplaceValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteValue"] = 128] = "ScopeContainerDeleteValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceOfferValue"] = 256] = "ScopeContainerReplaceOfferValue"; + PermissionScopeValues[PermissionScopeValues["ScopeAccountReadAllAccessValue"] = 65535] = "ScopeAccountReadAllAccessValue"; + PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReadAllAccessValue"] = 124] = "ScopeDatabaseReadAllAccessValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainersReadAllAccessValue"] = 96] = "ScopeContainersReadAllAccessValue"; + PermissionScopeValues[PermissionScopeValues["ScopeAccountWriteAllAccessValue"] = 65535] = "ScopeAccountWriteAllAccessValue"; + PermissionScopeValues[PermissionScopeValues["ScopeDatabaseWriteAllAccessValue"] = 508] = "ScopeDatabaseWriteAllAccessValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainersWriteAllAccessValue"] = 448] = "ScopeContainersWriteAllAccessValue"; + /** + * Values which set permission Scope applicable to data plane related operations. + */ + PermissionScopeValues[PermissionScopeValues["ScopeContainerExecuteQueriesValue"] = 1] = "ScopeContainerExecuteQueriesValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReadFeedsValue"] = 2] = "ScopeContainerReadFeedsValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReadStoredProceduresValue"] = 4] = "ScopeContainerReadStoredProceduresValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReadUserDefinedFunctionsValue"] = 8] = "ScopeContainerReadUserDefinedFunctionsValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReadTriggersValue"] = 16] = "ScopeContainerReadTriggersValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReadConflictsValue"] = 32] = "ScopeContainerReadConflictsValue"; + PermissionScopeValues[PermissionScopeValues["ScopeItemReadValue"] = 64] = "ScopeItemReadValue"; + PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureReadValue"] = 128] = "ScopeStoredProcedureReadValue"; + PermissionScopeValues[PermissionScopeValues["ScopeUserDefinedFunctionReadValue"] = 256] = "ScopeUserDefinedFunctionReadValue"; + PermissionScopeValues[PermissionScopeValues["ScopeTriggerReadValue"] = 512] = "ScopeTriggerReadValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateItemsValue"] = 1] = "ScopeContainerCreateItemsValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceItemsValue"] = 2] = "ScopeContainerReplaceItemsValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerUpsertItemsValue"] = 4] = "ScopeContainerUpsertItemsValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteItemsValue"] = 8] = "ScopeContainerDeleteItemsValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateStoredProceduresValue"] = 16] = "ScopeContainerCreateStoredProceduresValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceStoredProceduresValue"] = 32] = "ScopeContainerReplaceStoredProceduresValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteStoredProceduresValue"] = 64] = "ScopeContainerDeleteStoredProceduresValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerExecuteStoredProceduresValue"] = 128] = "ScopeContainerExecuteStoredProceduresValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateTriggersValue"] = 256] = "ScopeContainerCreateTriggersValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceTriggersValue"] = 512] = "ScopeContainerReplaceTriggersValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteTriggersValue"] = 1024] = "ScopeContainerDeleteTriggersValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateUserDefinedFunctionsValue"] = 2048] = "ScopeContainerCreateUserDefinedFunctionsValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceUserDefinedFunctionsValue"] = 4096] = "ScopeContainerReplaceUserDefinedFunctionsValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteUserDefinedFunctionSValue"] = 8192] = "ScopeContainerDeleteUserDefinedFunctionSValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteCONFLICTSValue"] = 16384] = "ScopeContainerDeleteCONFLICTSValue"; + PermissionScopeValues[PermissionScopeValues["ScopeItemReplaceValue"] = 65536] = "ScopeItemReplaceValue"; + PermissionScopeValues[PermissionScopeValues["ScopeItemUpsertValue"] = 131072] = "ScopeItemUpsertValue"; + PermissionScopeValues[PermissionScopeValues["ScopeItemDeleteValue"] = 262144] = "ScopeItemDeleteValue"; + PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureReplaceValue"] = 1048576] = "ScopeStoredProcedureReplaceValue"; + PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureDeleteValue"] = 2097152] = "ScopeStoredProcedureDeleteValue"; + PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureExecuteValue"] = 4194304] = "ScopeStoredProcedureExecuteValue"; + PermissionScopeValues[PermissionScopeValues["ScopeUserDefinedFunctionReplaceValue"] = 8388608] = "ScopeUserDefinedFunctionReplaceValue"; + PermissionScopeValues[PermissionScopeValues["ScopeUserDefinedFunctionDeleteValue"] = 16777216] = "ScopeUserDefinedFunctionDeleteValue"; + PermissionScopeValues[PermissionScopeValues["ScopeTriggerReplaceValue"] = 33554432] = "ScopeTriggerReplaceValue"; + PermissionScopeValues[PermissionScopeValues["ScopeTriggerDeleteValue"] = 67108864] = "ScopeTriggerDeleteValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerReadAllAccessValue"] = 4294967295] = "ScopeContainerReadAllAccessValue"; + PermissionScopeValues[PermissionScopeValues["ScopeItemReadAllAccessValue"] = 65] = "ScopeItemReadAllAccessValue"; + PermissionScopeValues[PermissionScopeValues["ScopeContainerWriteAllAccessValue"] = 4294967295] = "ScopeContainerWriteAllAccessValue"; + PermissionScopeValues[PermissionScopeValues["ScopeItemWriteAllAccessValue"] = 458767] = "ScopeItemWriteAllAccessValue"; + PermissionScopeValues[PermissionScopeValues["NoneValue"] = 0] = "NoneValue"; +})(PermissionScopeValues || (PermissionScopeValues = {})); +/** + * @hidden + */ +exports.SasTokenPermissionKind = void 0; +(function (SasTokenPermissionKind) { + SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateItems"] = 1] = "ContainerCreateItems"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceItems"] = 2] = "ContainerReplaceItems"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerUpsertItems"] = 4] = "ContainerUpsertItems"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteItems"] = 128] = "ContainerDeleteItems"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerExecuteQueries"] = 1] = "ContainerExecuteQueries"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadFeeds"] = 2] = "ContainerReadFeeds"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateStoreProcedure"] = 16] = "ContainerCreateStoreProcedure"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadStoreProcedure"] = 4] = "ContainerReadStoreProcedure"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceStoreProcedure"] = 32] = "ContainerReplaceStoreProcedure"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteStoreProcedure"] = 64] = "ContainerDeleteStoreProcedure"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateTriggers"] = 256] = "ContainerCreateTriggers"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadTriggers"] = 16] = "ContainerReadTriggers"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceTriggers"] = 512] = "ContainerReplaceTriggers"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteTriggers"] = 1024] = "ContainerDeleteTriggers"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateUserDefinedFunctions"] = 2048] = "ContainerCreateUserDefinedFunctions"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadUserDefinedFunctions"] = 8] = "ContainerReadUserDefinedFunctions"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceUserDefinedFunctions"] = 4096] = "ContainerReplaceUserDefinedFunctions"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteUserDefinedFunctions"] = 8192] = "ContainerDeleteUserDefinedFunctions"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerExecuteStoredProcedure"] = 128] = "ContainerExecuteStoredProcedure"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadConflicts"] = 32] = "ContainerReadConflicts"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteConflicts"] = 16384] = "ContainerDeleteConflicts"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadAny"] = 64] = "ContainerReadAny"; + SasTokenPermissionKind[SasTokenPermissionKind["ContainerFullAccess"] = 4294967295] = "ContainerFullAccess"; + SasTokenPermissionKind[SasTokenPermissionKind["ItemReadAny"] = 65536] = "ItemReadAny"; + SasTokenPermissionKind[SasTokenPermissionKind["ItemFullAccess"] = 65] = "ItemFullAccess"; + SasTokenPermissionKind[SasTokenPermissionKind["ItemRead"] = 64] = "ItemRead"; + SasTokenPermissionKind[SasTokenPermissionKind["ItemReplace"] = 65536] = "ItemReplace"; + SasTokenPermissionKind[SasTokenPermissionKind["ItemUpsert"] = 131072] = "ItemUpsert"; + SasTokenPermissionKind[SasTokenPermissionKind["ItemDelete"] = 262144] = "ItemDelete"; + SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureRead"] = 128] = "StoreProcedureRead"; + SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureReplace"] = 1048576] = "StoreProcedureReplace"; + SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureDelete"] = 2097152] = "StoreProcedureDelete"; + SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureExecute"] = 4194304] = "StoreProcedureExecute"; + SasTokenPermissionKind[SasTokenPermissionKind["UserDefinedFuntionRead"] = 256] = "UserDefinedFuntionRead"; + SasTokenPermissionKind[SasTokenPermissionKind["UserDefinedFuntionReplace"] = 8388608] = "UserDefinedFuntionReplace"; + SasTokenPermissionKind[SasTokenPermissionKind["UserDefinedFuntionDelete"] = 16777216] = "UserDefinedFuntionDelete"; + SasTokenPermissionKind[SasTokenPermissionKind["TriggerRead"] = 512] = "TriggerRead"; + SasTokenPermissionKind[SasTokenPermissionKind["TriggerReplace"] = 33554432] = "TriggerReplace"; + SasTokenPermissionKind[SasTokenPermissionKind["TriggerDelete"] = 67108864] = "TriggerDelete"; +})(exports.SasTokenPermissionKind || (exports.SasTokenPermissionKind = {})); + +const trimLeftSlashes = new RegExp("^[/]+"); +const trimRightSlashes = new RegExp("[/]+$"); +const illegalResourceIdCharacters = new RegExp("[/\\\\?#]"); +const illegalItemResourceIdCharacters = new RegExp("[/\\\\#]"); +/** @hidden */ +function jsonStringifyAndEscapeNonASCII(arg) { + // TODO: better way for this? Not sure. + // escapes non-ASCII characters as \uXXXX + return JSON.stringify(arg).replace(/[\u007F-\uFFFF]/g, (m) => { + return "\\u" + ("0000" + m.charCodeAt(0).toString(16)).slice(-4); + }); } -async function pollOperationHelper(inputs) { - const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs; - const response = await poll(operationLocation, options).catch(setStateError({ - state, - stateProxy, - isOperationError, - })); - const status = getOperationStatus(response, state); - logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${terminalStates.includes(status) ? "Stopped" : "Running"}`); - if (status === "succeeded") { - const resourceLocation = getResourceLocation(response, state); - if (resourceLocation !== undefined) { - return { - response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), - status, - }; - } +/** + * @hidden + */ +function parseLink(resourcePath) { + if (resourcePath.length === 0) { + /* for DatabaseAccount case, both type and objectBody will be undefined. */ + return { + type: undefined, + objectBody: undefined, + }; } - return { response, status }; -} -/** Polls the long-running operation. */ -async function pollOperation(inputs) { - const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult, } = inputs; - const { operationLocation } = state.config; - if (operationLocation !== undefined) { - const { response, status } = await pollOperationHelper({ - poll, - getOperationStatus, - state, - stateProxy, - operationLocation, - getResourceLocation, - isOperationError, - options, - }); - processOperationStatus({ - status, - response, - state, - stateProxy, - isDone, - processResult, - getError, - setErrorAsResult, - }); - if (!terminalStates.includes(status)) { - const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); - if (intervalInMs) - setDelay(intervalInMs); - const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); - if (location !== undefined) { - const isUpdated = operationLocation !== location; - state.config.operationLocation = location; - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); - } - else - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); - } - updateState === null || updateState === void 0 ? void 0 : updateState(state, response); + if (resourcePath[resourcePath.length - 1] !== "/") { + resourcePath = resourcePath + "/"; + } + if (resourcePath[0] !== "/") { + resourcePath = "/" + resourcePath; + } + /* + The path will be in the form of /[resourceType]/[resourceId]/ .... + /[resourceType]//[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/ + or /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/[resourceType]/[resourceId]/ .... + /[resourceType]/[resourceId]/ + The result of split will be in the form of + [[[resourceType], [resourceId] ... ,[resourceType], [resourceId], ""] + In the first case, to extract the resourceId it will the element before last ( at length -2 ) + and the type will be before it ( at length -3 ) + In the second case, to extract the resource type it will the element before last ( at length -2 ) + */ + const pathParts = resourcePath.split("/"); + let id; + let type; + if (pathParts.length % 2 === 0) { + // request in form /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]. + id = pathParts[pathParts.length - 2]; + type = pathParts[pathParts.length - 3]; + } + else { + // request in form /[resourceType]/[resourceId]/ .... /[resourceType]/. + id = pathParts[pathParts.length - 3]; + type = pathParts[pathParts.length - 2]; } + const result = { + type, + objectBody: { + id, + self: resourcePath, + }, + }; + return result; } - -// Copyright (c) Microsoft Corporation. -function getOperationLocationPollingUrl(inputs) { - const { azureAsyncOperation, operationLocation } = inputs; - return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; +/** + * @hidden + */ +function isReadRequest(operationType) { + return operationType === exports.OperationType.Read || operationType === exports.OperationType.Query; } -function getLocationHeader(rawResponse) { - return rawResponse.headers["location"]; +/** + * @hidden + */ +function sleep(time) { + return new Promise((resolve) => { + setTimeout(() => { + resolve(); + }, time); + }); } -function getOperationLocationHeader(rawResponse) { - return rawResponse.headers["operation-location"]; +/** + * @hidden + */ +function getContainerLink(link) { + return link.split("/").slice(0, 4).join("/"); } -function getAzureAsyncOperationHeader(rawResponse) { - return rawResponse.headers["azure-asyncoperation"]; +/** + * @hidden + */ +function prepareURL(endpoint, path) { + return trimSlashes(endpoint) + path; } -function findResourceLocation(inputs) { - const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; - switch (requestMethod) { - case "PUT": { - return requestPath; +/** + * @hidden + */ +function trimSlashes(source) { + return source.replace(trimLeftSlashes, "").replace(trimRightSlashes, ""); +} +/** + * @hidden + */ +function parsePath(path) { + const pathParts = []; + let currentIndex = 0; + const throwError = () => { + throw new Error("Path " + path + " is invalid at index " + currentIndex); + }; + const getEscapedToken = () => { + const quote = path[currentIndex]; + let newIndex = ++currentIndex; + for (;;) { + newIndex = path.indexOf(quote, newIndex); + if (newIndex === -1) { + throwError(); + } + if (path[newIndex - 1] !== "\\") { + break; + } + ++newIndex; } - case "DELETE": { - return undefined; + const token = path.substr(currentIndex, newIndex - currentIndex); + currentIndex = newIndex + 1; + return token; + }; + const getToken = () => { + const newIndex = path.indexOf("/", currentIndex); + let token = null; + if (newIndex === -1) { + token = path.substr(currentIndex); + currentIndex = path.length; } - default: { - switch (resourceLocationConfig) { - case "azure-async-operation": { - return undefined; - } - case "original-uri": { - return requestPath; - } - case "location": - default: { - return location; - } - } + else { + token = path.substr(currentIndex, newIndex - currentIndex); + currentIndex = newIndex; + } + token = token.trim(); + return token; + }; + while (currentIndex < path.length) { + if (path[currentIndex] !== "/") { + throwError(); + } + if (++currentIndex === path.length) { + break; + } + if (path[currentIndex] === '"' || path[currentIndex] === "'") { + pathParts.push(getEscapedToken()); + } + else { + pathParts.push(getToken()); } } + return pathParts; } -function inferLroMode(inputs) { - const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; - const operationLocation = getOperationLocationHeader(rawResponse); - const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); - const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); - const location = getLocationHeader(rawResponse); - const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); - if (pollingUrl !== undefined) { - return { - mode: "OperationLocation", - operationLocation: pollingUrl, - resourceLocation: findResourceLocation({ - requestMethod: normalizedRequestMethod, - location, - requestPath, - resourceLocationConfig, - }), - }; - } - else if (location !== undefined) { - return { - mode: "ResourceLocation", - operationLocation: location, - }; - } - else if (normalizedRequestMethod === "PUT" && requestPath) { - return { - mode: "Body", - operationLocation: requestPath, - }; - } - else { - return undefined; +/** + * @hidden + */ +function isResourceValid(resource, err) { + // TODO: fix strictness issues so that caller contexts respects the types of the functions + if (resource.id) { + if (typeof resource.id !== "string") { + err.message = "Id must be a string."; + return false; + } + if (resource.id.indexOf("/") !== -1 || + resource.id.indexOf("\\") !== -1 || + resource.id.indexOf("?") !== -1 || + resource.id.indexOf("#") !== -1) { + err.message = "Id contains illegal chars."; + return false; + } + if (resource.id[resource.id.length - 1] === " ") { + err.message = "Id ends with a space."; + return false; + } } + return true; } -function transformStatus(inputs) { - const { status, statusCode } = inputs; - if (typeof status !== "string" && status !== undefined) { - throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); - } - switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { - case undefined: - return toOperationStatus(statusCode); - case "succeeded": - return "succeeded"; - case "failed": - return "failed"; - case "running": - case "accepted": - case "started": - case "canceling": - case "cancelling": - return "running"; - case "canceled": - case "cancelled": - return "canceled"; - default: { - logger.verbose(`LRO: unrecognized operation status: ${status}`); - return status; +/** + * @hidden + */ +function isItemResourceValid(resource, err) { + // TODO: fix strictness issues so that caller contexts respects the types of the functions + if (resource.id) { + if (typeof resource.id !== "string") { + err.message = "Id must be a string."; + return false; + } + if (resource.id.indexOf("/") !== -1 || + resource.id.indexOf("\\") !== -1 || + resource.id.indexOf("#") !== -1) { + err.message = "Id contains illegal chars."; + return false; } } + return true; } -function getStatus(rawResponse) { - var _a; - const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return transformStatus({ status, statusCode: rawResponse.statusCode }); -} -function getProvisioningState(rawResponse) { - var _a, _b; - const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; - return transformStatus({ status, statusCode: rawResponse.statusCode }); +/** @hidden */ +function getIdFromLink(resourceLink) { + resourceLink = trimSlashes(resourceLink); + return resourceLink; } -function toOperationStatus(statusCode) { - if (statusCode === 202) { - return "running"; - } - else if (statusCode < 300) { - return "succeeded"; +/** @hidden */ +function getPathFromLink(resourceLink, resourceType) { + resourceLink = trimSlashes(resourceLink); + if (resourceType) { + return "/" + encodeURI(resourceLink) + "/" + resourceType; } else { - return "failed"; + return "/" + encodeURI(resourceLink); } } -function parseRetryAfter({ rawResponse }) { - const retryAfter = rawResponse.headers["retry-after"]; - if (retryAfter !== undefined) { - // Retry-After header value is either in HTTP date format, or in seconds - const retryAfterInSeconds = parseInt(retryAfter); - return isNaN(retryAfterInSeconds) - ? calculatePollingIntervalFromDate(new Date(retryAfter)) - : retryAfterInSeconds * 1000; +/** + * @hidden + */ +function isStringNullOrEmpty(inputString) { + // checks whether string is null, undefined, empty or only contains space + return !inputString || /^\s*$/.test(inputString); +} +/** + * @hidden + */ +function trimSlashFromLeftAndRight(inputString) { + if (typeof inputString !== "string") { + throw new Error("invalid input: input is not string"); } - return undefined; + return inputString.replace(trimLeftSlashes, "").replace(trimRightSlashes, ""); } -function getErrorFromResponse(response) { - const error = response.flatResponse.error; - if (!error) { - logger.warning(`The long-running operation failed but there is no error property in the response's body`); - return; +/** + * @hidden + */ +function validateResourceId(resourceId) { + // if resourceId is not a string or is empty throw an error + if (typeof resourceId !== "string" || isStringNullOrEmpty(resourceId)) { + throw new Error("Resource ID must be a string and cannot be undefined, null or empty"); } - if (!error.code || !error.message) { - logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); - return; + // if resource id contains illegal characters throw an error + if (illegalResourceIdCharacters.test(resourceId)) { + throw new Error("Illegal characters ['/', '\\', '#', '?'] cannot be used in Resource ID"); } - return error; + return true; } -function calculatePollingIntervalFromDate(retryAfterDate) { - const timeNow = Math.floor(new Date().getTime()); - const retryAfterTime = retryAfterDate.getTime(); - if (timeNow < retryAfterTime) { - return retryAfterTime - timeNow; +/** + * @hidden + */ +function validateItemResourceId(resourceId) { + // if resourceId is not a string or is empty throw an error + if (typeof resourceId !== "string" || isStringNullOrEmpty(resourceId)) { + throw new Error("Resource ID must be a string and cannot be undefined, null or empty"); } - return undefined; -} -function getStatusFromInitialResponse(inputs) { - const { response, state, operationLocation } = inputs; - function helper() { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case undefined: - return toOperationStatus(response.rawResponse.statusCode); - case "Body": - return getOperationStatus(response, state); - default: - return "running"; - } + // if resource id contains illegal characters throw an error + if (illegalItemResourceIdCharacters.test(resourceId)) { + throw new Error("Illegal characters ['/', '\\', '#'] cannot be used in Resource ID"); } - const status = helper(); - return status === "running" && operationLocation === undefined ? "succeeded" : status; + return true; } /** - * Initiates the long-running operation. + * @hidden */ -async function initHttpOperation(inputs) { - const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; - return initOperation({ - init: async () => { - const response = await lro.sendInitialRequest(); - const config = inferLroMode({ - rawResponse: response.rawResponse, - requestPath: lro.requestPath, - requestMethod: lro.requestMethod, - resourceLocationConfig, - }); - return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); - }, - stateProxy, - processResult: processResult - ? ({ flatResponse }, state) => processResult(flatResponse, state) - : ({ flatResponse }) => flatResponse, - getOperationStatus: getStatusFromInitialResponse, - setErrorAsResult, - }); -} -function getOperationLocation({ rawResponse }, state) { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case "OperationLocation": { - return getOperationLocationPollingUrl({ - operationLocation: getOperationLocationHeader(rawResponse), - azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse), - }); - } - case "ResourceLocation": { - return getLocationHeader(rawResponse); - } - case "Body": - default: { - return undefined; - } +function getResourceIdFromPath(resourcePath) { + if (!resourcePath || typeof resourcePath !== "string") { + return null; } -} -function getOperationStatus({ rawResponse }, state) { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case "OperationLocation": { - return getStatus(rawResponse); - } - case "ResourceLocation": { - return toOperationStatus(rawResponse.statusCode); - } - case "Body": { - return getProvisioningState(rawResponse); - } - default: - throw new Error(`Internal error: Unexpected operation mode: ${mode}`); + const trimmedPath = trimSlashFromLeftAndRight(resourcePath); + const pathSegments = trimmedPath.split("/"); + // number of segments of a path must always be even + if (pathSegments.length % 2 !== 0) { + return null; } + return pathSegments[pathSegments.length - 1]; } -function getResourceLocation({ flatResponse }, state) { - if (typeof flatResponse === "object") { - const resourceLocation = flatResponse.resourceLocation; - if (resourceLocation !== undefined) { - state.config.resourceLocation = resourceLocation; - } +/** + * @hidden + */ +function parseConnectionString(connectionString) { + const keyValueStrings = connectionString.split(";"); + const { AccountEndpoint, AccountKey } = keyValueStrings.reduce((connectionObject, keyValueString) => { + const [key, ...value] = keyValueString.split("="); + connectionObject[key] = value.join("="); + return connectionObject; + }, {}); + if (!AccountEndpoint || !AccountKey) { + throw new Error("Could not parse the provided connection string"); } - return state.config.resourceLocation; -} -function isOperationError(e) { - return e.name === "RestError"; -} -/** Polls the long-running operation. */ -async function pollHttpOperation(inputs) { - const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs; - return pollOperation({ - state, - stateProxy, - setDelay, - processResult: processResult - ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) - : ({ flatResponse }) => flatResponse, - getError: getErrorFromResponse, - updateState, - getPollingInterval: parseRetryAfter, - getOperationLocation, - getOperationStatus, - isOperationError, - getResourceLocation, - options, - /** - * The expansion here is intentional because `lro` could be an object that - * references an inner this, so we need to preserve a reference to it. - */ - poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), - setErrorAsResult, - }); + return { + endpoint: AccountEndpoint, + key: AccountKey, + }; } // Copyright (c) Microsoft Corporation. -const createStateProxy$1 = () => ({ - /** - * The state at this point is created to be of type OperationState. - * It will be updated later to be of type TState when the - * customer-provided callback, `updateState`, is called during polling. - */ - initState: (config) => ({ status: "running", config }), - setCanceled: (state) => (state.status = "canceled"), - setError: (state, error) => (state.error = error), - setResult: (state, result) => (state.result = result), - setRunning: (state) => (state.status = "running"), - setSucceeded: (state) => (state.status = "succeeded"), - setFailed: (state) => (state.status = "failed"), - getError: (state) => state.error, - getResult: (state) => state.result, - isCanceled: (state) => state.status === "canceled", - isFailed: (state) => state.status === "failed", - isRunning: (state) => state.status === "running", - isSucceeded: (state) => state.status === "succeeded", -}); +// Licensed under the MIT license. /** - * Returns a poller factory. + * @hidden */ -function buildCreatePoller(inputs) { - const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful, } = inputs; - return async ({ init, poll }, options) => { - const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; - const stateProxy = createStateProxy$1(); - const withOperationLocation = withOperationLocationCallback - ? (() => { - let called = false; - return (operationLocation, isUpdated) => { - if (isUpdated) - withOperationLocationCallback(operationLocation); - else if (!called) - withOperationLocationCallback(operationLocation); - called = true; - }; - })() - : undefined; - const state = restoreFrom - ? deserializeState(restoreFrom) - : await initOperation({ - init, - stateProxy, - processResult, - getOperationStatus: getStatusFromInitialResponse, - withOperationLocation, - setErrorAsResult: !resolveOnUnsuccessful, - }); - let resultPromise; - const abortController$1 = new abortController.AbortController(); - const handlers = new Map(); - const handleProgressEvents = async () => handlers.forEach((h) => h(state)); - const cancelErrMsg = "Operation was canceled"; - let currentPollIntervalInMs = intervalInMs; - const poller = { - getOperationState: () => state, - getResult: () => state.result, - isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), - isStopped: () => resultPromise === undefined, - stopPolling: () => { - abortController$1.abort(); - }, - toString: () => JSON.stringify({ - state, - }), - onProgress: (callback) => { - const s = Symbol(); - handlers.set(s, callback); - return () => handlers.delete(s); - }, - pollUntilDone: (pollOptions) => (resultPromise !== null && resultPromise !== void 0 ? resultPromise : (resultPromise = (async () => { - const { abortSignal: inputAbortSignal } = pollOptions || {}; - const { signal: abortSignal } = inputAbortSignal - ? new abortController.AbortController([inputAbortSignal, abortController$1.signal]) - : abortController$1; - if (!poller.isDone()) { - await poller.poll({ abortSignal }); - while (!poller.isDone()) { - await coreUtil.delay(currentPollIntervalInMs, { abortSignal }); - await poller.poll({ abortSignal }); - } - } - if (resolveOnUnsuccessful) { - return poller.getResult(); - } - else { - switch (state.status) { - case "succeeded": - return poller.getResult(); - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - case "notStarted": - case "running": - throw new Error(`Polling completed without succeeding or failing`); - } - } - })().finally(() => { - resultPromise = undefined; - }))), - async poll(pollOptions) { - if (resolveOnUnsuccessful) { - if (poller.isDone()) - return; - } - else { - switch (state.status) { - case "succeeded": - return; - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - } - } - await pollOperation({ - poll, - state, - stateProxy, - getOperationLocation, - isOperationError, - withOperationLocation, - getPollingInterval, - getOperationStatus: getStatusFromPollResponse, - getResourceLocation, - processResult, - getError, - updateState, - options: pollOptions, - setDelay: (pollIntervalInMs) => { - currentPollIntervalInMs = pollIntervalInMs; - }, - setErrorAsResult: !resolveOnUnsuccessful, - }); - await handleProgressEvents(); - if (!resolveOnUnsuccessful) { - switch (state.status) { - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - } - } - }, - }; - return poller; - }; -} +const StatusCodes = { + // Success + Ok: 200, + Created: 201, + Accepted: 202, + NoContent: 204, + NotModified: 304, + // Client error + BadRequest: 400, + Unauthorized: 401, + Forbidden: 403, + NotFound: 404, + MethodNotAllowed: 405, + RequestTimeout: 408, + Conflict: 409, + Gone: 410, + PreconditionFailed: 412, + RequestEntityTooLarge: 413, + TooManyRequests: 429, + RetryWith: 449, + // Server Error + InternalServerError: 500, + ServiceUnavailable: 503, + // System codes + ENOTFOUND: "ENOTFOUND", + // Operation pause and cancel. These are FAKE status codes for QOS logging purpose only. + OperationPaused: 1200, + OperationCancelled: 1201, +}; +/** + * @hidden + */ +const SubStatusCodes = { + Unknown: 0, + // 400: Bad Request Substatus + CrossPartitionQueryNotServable: 1004, + // 410: StatusCodeType_Gone: substatus + PartitionKeyRangeGone: 1002, + CompletingSplit: 1007, + // 404: NotFound Substatus + ReadSessionNotAvailable: 1002, + // 403: Forbidden Substatus + WriteForbidden: 3, + DatabaseAccountNotFound: 1008, +}; // Copyright (c) Microsoft Corporation. /** - * Creates a poller that can be used to poll a long-running operation. - * @param lro - Description of the long-running operation - * @param options - options to configure the poller - * @returns an initialized poller + * Would be used when creating or deleting a DocumentCollection + * or a User in Azure Cosmos DB database service + * @hidden + * Given a database id, this creates a database link. + * @param databaseId - The database id + * @returns A database link in the format of `dbs/{0}` + * with `{0}` being a Uri escaped version of the databaseId */ -async function createHttpPoller(lro, options) { - const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {}; - return buildCreatePoller({ - getStatusFromInitialResponse, - getStatusFromPollResponse: getOperationStatus, - isOperationError, - getOperationLocation, - getResourceLocation, - getPollingInterval: parseRetryAfter, - getError: getErrorFromResponse, - resolveOnUnsuccessful, - })({ - init: async () => { - const response = await lro.sendInitialRequest(); - const config = inferLroMode({ - rawResponse: response.rawResponse, - requestPath: lro.requestPath, - requestMethod: lro.requestMethod, - resourceLocationConfig, - }); - return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); - }, - poll: lro.sendPollRequest, - }, { - intervalInMs, - withOperationLocation, - restoreFrom, - updateState, - processResult: processResult - ? ({ flatResponse }, state) => processResult(flatResponse, state) - : ({ flatResponse }) => flatResponse, - }); +function createDatabaseUri(databaseId) { + databaseId = trimSlashFromLeftAndRight(databaseId); + validateResourceId(databaseId); + return Constants$1.Path.DatabasesPathSegment + "/" + databaseId; +} +/** + * Given a database and collection id, this creates a collection link. + * Would be used when updating or deleting a DocumentCollection, creating a + * Document, a StoredProcedure, a Trigger, a UserDefinedFunction, or when executing a query + * with CreateDocumentQuery in Azure Cosmos DB database service. + * @param databaseId - The database id + * @param collectionId - The collection id + * @returns A collection link in the format of `dbs/{0}/colls/{1}` + * with `{0}` being a Uri escaped version of the databaseId and `{1}` being collectionId + * @hidden + */ +function createDocumentCollectionUri(databaseId, collectionId) { + collectionId = trimSlashFromLeftAndRight(collectionId); + validateResourceId(collectionId); + return (createDatabaseUri(databaseId) + "/" + Constants$1.Path.CollectionsPathSegment + "/" + collectionId); +} +/** + * Given a database and user id, this creates a user link. + * Would be used when creating a Permission, or when replacing or deleting + * a User in Azure Cosmos DB database service + * @param databaseId - The database id + * @param userId - The user id + * @returns A user link in the format of `dbs/{0}/users/{1}` + * with `{0}` being a Uri escaped version of the databaseId and `{1}` being userId + * @hidden + */ +function createUserUri(databaseId, userId) { + userId = trimSlashFromLeftAndRight(userId); + validateResourceId(userId); + return createDatabaseUri(databaseId) + "/" + Constants$1.Path.UsersPathSegment + "/" + userId; +} +/** + * Given a database and collection id, this creates a collection link. + * Would be used when creating an Attachment, or when replacing + * or deleting a Document in Azure Cosmos DB database service + * @param databaseId - The database id + * @param collectionId - The collection id + * @param documentId - The document id + * @returns A document link in the format of + * `dbs/{0}/colls/{1}/docs/{2}` with `{0}` being a Uri escaped version of + * the databaseId, `{1}` being collectionId and `{2}` being the documentId + * @hidden + */ +function createDocumentUri(databaseId, collectionId, documentId) { + documentId = trimSlashFromLeftAndRight(documentId); + validateItemResourceId(documentId); + return (createDocumentCollectionUri(databaseId, collectionId) + + "/" + + Constants$1.Path.DocumentsPathSegment + + "/" + + documentId); +} +/** + * Given a database, collection and document id, this creates a document link. + * Would be used when replacing or deleting a Permission in Azure Cosmos DB database service. + * @param databaseId -The database Id + * @param userId -The user Id + * @param permissionId - The permissionId + * @returns A permission link in the format of `dbs/{0}/users/{1}/permissions/{2}` + * with `{0}` being a Uri escaped version of the databaseId, `{1}` being userId and `{2}` being permissionId + * @hidden + */ +function createPermissionUri(databaseId, userId, permissionId) { + permissionId = trimSlashFromLeftAndRight(permissionId); + validateResourceId(permissionId); + return (createUserUri(databaseId, userId) + + "/" + + Constants$1.Path.PermissionsPathSegment + + "/" + + permissionId); +} +/** + * Given a database, collection and stored proc id, this creates a stored proc link. + * Would be used when replacing, executing, or deleting a StoredProcedure in + * Azure Cosmos DB database service. + * @param databaseId -The database Id + * @param collectionId -The collection Id + * @param storedProcedureId -The stored procedure Id + * @returns A stored procedure link in the format of + * `dbs/{0}/colls/{1}/sprocs/{2}` with `{0}` being a Uri escaped version of the databaseId, + * `{1}` being collectionId and `{2}` being the storedProcedureId + * @hidden + */ +function createStoredProcedureUri(databaseId, collectionId, storedProcedureId) { + storedProcedureId = trimSlashFromLeftAndRight(storedProcedureId); + validateResourceId(storedProcedureId); + return (createDocumentCollectionUri(databaseId, collectionId) + + "/" + + Constants$1.Path.StoredProceduresPathSegment + + "/" + + storedProcedureId); +} +/** + * Given a database, collection and trigger id, this creates a trigger link. + * Would be used when replacing, executing, or deleting a Trigger in Azure Cosmos DB database service + * @param databaseId -The database Id + * @param collectionId -The collection Id + * @param triggerId -The trigger Id + * @returns A trigger link in the format of + * `dbs/{0}/colls/{1}/triggers/{2}` with `{0}` being a Uri escaped version of the databaseId, + * `{1}` being collectionId and `{2}` being the triggerId + * @hidden + */ +function createTriggerUri(databaseId, collectionId, triggerId) { + triggerId = trimSlashFromLeftAndRight(triggerId); + validateResourceId(triggerId); + return (createDocumentCollectionUri(databaseId, collectionId) + + "/" + + Constants$1.Path.TriggersPathSegment + + "/" + + triggerId); +} +/** + * Given a database, collection and udf id, this creates a udf link. + * Would be used when replacing, executing, or deleting a UserDefinedFunction in + * Azure Cosmos DB database service + * @param databaseId -The database Id + * @param collectionId -The collection Id + * @param udfId -The User Defined Function Id + * @returns A udf link in the format of `dbs/{0}/colls/{1}/udfs/{2}` + * with `{0}` being a Uri escaped version of the databaseId, `{1}` being collectionId and `{2}` being the udfId + * @hidden + */ +function createUserDefinedFunctionUri(databaseId, collectionId, udfId) { + udfId = trimSlashFromLeftAndRight(udfId); + validateResourceId(udfId); + return (createDocumentCollectionUri(databaseId, collectionId) + + "/" + + Constants$1.Path.UserDefinedFunctionsPathSegment + + "/" + + udfId); } // Copyright (c) Microsoft Corporation. -const createStateProxy = () => ({ - initState: (config) => ({ config, isStarted: true }), - setCanceled: (state) => (state.isCancelled = true), - setError: (state, error) => (state.error = error), - setResult: (state, result) => (state.result = result), - setRunning: (state) => (state.isStarted = true), - setSucceeded: (state) => (state.isCompleted = true), - setFailed: () => { - /** empty body */ - }, - getError: (state) => state.error, - getResult: (state) => state.result, - isCanceled: (state) => !!state.isCancelled, - isFailed: (state) => !!state.error, - isRunning: (state) => !!state.isStarted, - isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), -}); -class GenericPollOperation { - constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { - this.state = state; - this.lro = lro; - this.setErrorAsResult = setErrorAsResult; - this.lroResourceLocationConfig = lroResourceLocationConfig; - this.processResult = processResult; - this.updateState = updateState; - this.isDone = isDone; - } - setPollerConfig(pollerConfig) { - this.pollerConfig = pollerConfig; +async function hmac(key, message) { + return crypto.createHmac("sha256", Buffer.from(key, "base64")).update(message).digest("base64"); +} + +// Copyright (c) Microsoft Corporation. +async function generateHeaders(masterKey, method, resourceType = exports.ResourceType.none, resourceId = "", date = new Date()) { + if (masterKey.startsWith("type=sas&")) { + return { + [Constants$1.HttpHeaders.Authorization]: encodeURIComponent(masterKey), + [Constants$1.HttpHeaders.XDate]: date.toUTCString(), + }; } - async update(options) { - var _a; - const stateProxy = createStateProxy(); - if (!this.state.isStarted) { - this.state = Object.assign(Object.assign({}, this.state), (await initHttpOperation({ - lro: this.lro, - stateProxy, - resourceLocationConfig: this.lroResourceLocationConfig, - processResult: this.processResult, - setErrorAsResult: this.setErrorAsResult, - }))); - } - const updateState = this.updateState; - const isDone = this.isDone; - if (!this.state.isCompleted && this.state.error === undefined) { - await pollHttpOperation({ - lro: this.lro, - state: this.state, - stateProxy, - processResult: this.processResult, - updateState: updateState - ? (state, { rawResponse }) => updateState(state, rawResponse) - : undefined, - isDone: isDone - ? ({ flatResponse }, state) => isDone(flatResponse, state) - : undefined, - options, - setDelay: (intervalInMs) => { - this.pollerConfig.intervalInMs = intervalInMs; - }, - setErrorAsResult: this.setErrorAsResult, - }); + const sig = await signature(masterKey, method, resourceType, resourceId, date); + return { + [Constants$1.HttpHeaders.Authorization]: sig, + [Constants$1.HttpHeaders.XDate]: date.toUTCString(), + }; +} +async function signature(masterKey, method, resourceType, resourceId = "", date = new Date()) { + const type = "master"; + const version = "1.0"; + const text = method.toLowerCase() + + "\n" + + resourceType.toLowerCase() + + "\n" + + resourceId + + "\n" + + date.toUTCString().toLowerCase() + + "\n" + + "" + + "\n"; + const signed = await hmac(masterKey, text); + return encodeURIComponent("type=" + type + "&ver=" + version + "&sig=" + signed); +} + +// Copyright (c) Microsoft Corporation. +/** + * @hidden + */ +async function setAuthorizationHeader(clientOptions, verb, path, resourceId, resourceType, headers) { + if (clientOptions.permissionFeed) { + clientOptions.resourceTokens = {}; + for (const permission of clientOptions.permissionFeed) { + const id = getResourceIdFromPath(permission.resource); + if (!id) { + throw new Error(`authorization error: ${id} \ + is an invalid resourceId in permissionFeed`); + } + clientOptions.resourceTokens[id] = permission._token; // TODO: any } - (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); - return this; } - async cancel() { - logger.error("`cancelOperation` is deprecated because it wasn't implemented"); - return this; + if (clientOptions.key) { + await setAuthorizationTokenHeaderUsingMasterKey(verb, resourceId, resourceType, headers, clientOptions.key); } - /** - * Serializes the Poller operation. - */ - toString() { - return JSON.stringify({ - state: this.state, - }); + else if (clientOptions.resourceTokens) { + headers[Constants$1.HttpHeaders.Authorization] = encodeURIComponent(getAuthorizationTokenUsingResourceTokens(clientOptions.resourceTokens, path, resourceId)); + } + else if (clientOptions.tokenProvider) { + headers[Constants$1.HttpHeaders.Authorization] = encodeURIComponent(await clientOptions.tokenProvider({ verb, path, resourceId, resourceType, headers })); } } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * When a poller is manually stopped through the `stopPolling` method, - * the poller will be rejected with an instance of the PollerStoppedError. + * The default function for setting header token using the masterKey + * @hidden */ -class PollerStoppedError extends Error { - constructor(message) { - super(message); - this.name = "PollerStoppedError"; - Object.setPrototypeOf(this, PollerStoppedError.prototype); +async function setAuthorizationTokenHeaderUsingMasterKey(verb, resourceId, resourceType, headers, masterKey) { + // TODO This should live in cosmos-sign + if (resourceType === exports.ResourceType.offer) { + resourceId = resourceId && resourceId.toLowerCase(); } + headers = Object.assign(headers, await generateHeaders(masterKey, verb, resourceType, resourceId)); } /** - * When the operation is cancelled, the poller will be rejected with an instance - * of the PollerCancelledError. + * @hidden */ -class PollerCancelledError extends Error { - constructor(message) { - super(message); - this.name = "PollerCancelledError"; - Object.setPrototypeOf(this, PollerCancelledError.prototype); +// TODO: Resource tokens +function getAuthorizationTokenUsingResourceTokens(resourceTokens, path, resourceId) { + if (resourceTokens && Object.keys(resourceTokens).length > 0) { + // For database account access(through getDatabaseAccount API), path and resourceId are "", + // so in this case we return the first token to be used for creating the auth header as the + // service will accept any token in this case + if (!path && !resourceId) { + return resourceTokens[Object.keys(resourceTokens)[0]]; + } + // If we have exact resource token for the path use it + if (resourceId && resourceTokens[resourceId]) { + return resourceTokens[resourceId]; + } + // minimum valid path /dbs + if (!path || path.length < 4) { + // TODO: This should throw an error + return null; + } + path = trimSlashFromLeftAndRight(path); + const pathSegments = (path && path.split("/")) || []; + // Item path + if (pathSegments.length === 6) { + // Look for a container token matching the item path + const containerPath = pathSegments.slice(0, 4).map(decodeURIComponent).join("/"); + if (resourceTokens[containerPath]) { + return resourceTokens[containerPath]; + } + } + // TODO remove in v4: This is legacy behavior that lets someone use a resource token pointing ONLY at an ID + // It was used when _rid was exposed by the SDK, but now that we are using user provided ids it is not needed + // However removing it now would be a breaking change + // if it's an incomplete path like /dbs/db1/colls/, start from the parent resource + let index = pathSegments.length % 2 === 0 ? pathSegments.length - 1 : pathSegments.length - 2; + for (; index > 0; index -= 2) { + const id = decodeURI(pathSegments[index]); + if (resourceTokens[id]) { + return resourceTokens[id]; + } + } } + // TODO: This should throw an error + return null; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** Determines the connection behavior of the CosmosClient. Note, we currently only support Gateway Mode. */ +exports.ConnectionMode = void 0; +(function (ConnectionMode) { + /** Gateway mode talks to an intermediate gateway which handles the direct communication with your individual partitions. */ + ConnectionMode[ConnectionMode["Gateway"] = 0] = "Gateway"; +})(exports.ConnectionMode || (exports.ConnectionMode = {})); + /** - * A class that represents the definition of a program that polls through consecutive requests - * until it reaches a state of completion. - * - * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. - * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. - * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. - * - * ```ts - * const poller = new MyPoller(); - * - * // Polling just once: - * await poller.poll(); - * - * // We can try to cancel the request here, by calling: - * // - * // await poller.cancelOperation(); - * // - * - * // Getting the final result: - * const result = await poller.pollUntilDone(); - * ``` - * - * The Poller is defined by two types, a type representing the state of the poller, which - * must include a basic set of properties from `PollOperationState`, - * and a return type defined by `TResult`, which can be anything. - * - * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having - * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. - * - * ```ts - * class Client { - * public async makePoller: PollerLike { - * const poller = new MyPoller({}); - * // It might be preferred to return the poller after the first request is made, - * // so that some information can be obtained right away. - * await poller.poll(); - * return poller; - * } - * } - * - * const poller: PollerLike = myClient.makePoller(); - * ``` - * - * A poller can be created through its constructor, then it can be polled until it's completed. - * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. - * At any point in time, the intermediate forms of the result type can be requested without delay. - * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. - * - * ```ts - * const poller = myClient.makePoller(); - * const state: MyOperationState = poller.getOperationState(); - * - * // The intermediate result can be obtained at any time. - * const result: MyResult | undefined = poller.getResult(); + * @hidden + */ +const defaultConnectionPolicy = Object.freeze({ + connectionMode: exports.ConnectionMode.Gateway, + requestTimeout: 60000, + enableEndpointDiscovery: true, + preferredLocations: [], + retryOptions: { + maxRetryAttemptCount: 9, + fixedRetryIntervalInMilliseconds: 0, + maxWaitTimeInSeconds: 30, + }, + useMultipleWriteLocations: true, + endpointRefreshRateInMs: 300000, + enableBackgroundEndpointRefreshing: true, +}); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Represents the consistency levels supported for Azure Cosmos DB client operations.
+ * The requested ConsistencyLevel must match or be weaker than that provisioned for the database account. + * Consistency levels. * - * // The final result can only be obtained after the poller finishes. - * const result: MyResult = await poller.pollUntilDone(); - * ``` + * Consistency levels by order of strength are Strong, BoundedStaleness, Session, Consistent Prefix, and Eventual. * + * See https://aka.ms/cosmos-consistency for more detailed documentation on Consistency Levels. */ -// eslint-disable-next-line no-use-before-define -class Poller { - /** - * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. - * - * When writing an implementation of a Poller, this implementation needs to deal with the initialization - * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's - * operation has already been defined, at least its basic properties. The code below shows how to approach - * the definition of the constructor of a new custom poller. - * - * ```ts - * export class MyPoller extends Poller { - * constructor({ - * // Anything you might need outside of the basics - * }) { - * let state: MyOperationState = { - * privateProperty: private, - * publicProperty: public, - * }; - * - * const operation = { - * state, - * update, - * cancel, - * toString - * } - * - * // Sending the operation to the parent's constructor. - * super(operation); - * - * // You can assign more local properties here. - * } - * } - * ``` - * - * Inside of this constructor, a new promise is created. This will be used to - * tell the user when the poller finishes (see `pollUntilDone()`). The promise's - * resolve and reject methods are also used internally to control when to resolve - * or reject anyone waiting for the poller to finish. - * - * The constructor of a custom implementation of a poller is where any serialized version of - * a previous poller's operation should be deserialized into the operation sent to the - * base constructor. For example: - * - * ```ts - * export class MyPoller extends Poller { - * constructor( - * baseOperation: string | undefined - * ) { - * let state: MyOperationState = {}; - * if (baseOperation) { - * state = { - * ...JSON.parse(baseOperation).state, - * ...state - * }; - * } - * const operation = { - * state, - * // ... - * } - * super(operation); - * } - * } - * ``` - * - * @param operation - Must contain the basic properties of `PollOperation`. - */ - constructor(operation) { - /** controls whether to throw an error if the operation failed or was canceled. */ - this.resolveOnUnsuccessful = false; - this.stopped = true; - this.pollProgressCallbacks = []; - this.operation = operation; - this.promise = new Promise((resolve, reject) => { - this.resolve = resolve; - this.reject = reject; - }); - // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. - // The above warning would get thrown if `poller.poll` is called, it returns an error, - // and pullUntilDone did not have a .catch or await try/catch on it's return value. - this.promise.catch(() => { - /* intentionally blank */ - }); - } +exports.ConsistencyLevel = void 0; +(function (ConsistencyLevel) { /** - * Starts a loop that will break only if the poller is done - * or if the poller is stopped. + * Strong Consistency guarantees that read operations always return the value that was last written. */ - async startPolling(pollOptions = {}) { - if (this.stopped) { - this.stopped = false; - } - while (!this.isStopped() && !this.isDone()) { - await this.poll(pollOptions); - await this.delay(); - } - } + ConsistencyLevel["Strong"] = "Strong"; /** - * pollOnce does one polling, by calling to the update method of the underlying - * poll operation to make any relevant change effective. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. - * - * @param options - Optional properties passed to the operation's update method. + * Bounded Staleness guarantees that reads are not too out-of-date. + * This can be configured based on number of operations (MaxStalenessPrefix) or time (MaxStalenessIntervalInSeconds). */ - async pollOnce(options = {}) { - if (!this.isDone()) { - this.operation = await this.operation.update({ - abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this), - }); - } - this.processUpdatedState(); - } + ConsistencyLevel["BoundedStaleness"] = "BoundedStaleness"; /** - * fireProgress calls the functions passed in via onProgress the method of the poller. - * - * It loops over all of the callbacks received from onProgress, and executes them, sending them - * the current operation state. - * - * @param state - The current operation state. + * Session Consistency guarantees monotonic reads (you never read old data, then new, then old again), + * monotonic writes (writes are ordered) and read your writes (your writes are immediately visible to your reads) + * within any single session. */ - fireProgress(state) { - for (const callback of this.pollProgressCallbacks) { - callback(state); - } - } + ConsistencyLevel["Session"] = "Session"; /** - * Invokes the underlying operation's cancel method. + * Eventual Consistency guarantees that reads will return a subset of writes. + * All writes will be eventually be available for reads. */ - async cancelOnce(options = {}) { - this.operation = await this.operation.cancel(options); - } + ConsistencyLevel["Eventual"] = "Eventual"; /** - * Returns a promise that will resolve once a single polling request finishes. - * It does this by calling the update method of the Poller's operation. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. - * - * @param options - Optional properties passed to the operation's update method. + * ConsistentPrefix Consistency guarantees that reads will return some prefix of all writes with no gaps. + * All writes will be eventually be available for reads. */ - poll(options = {}) { - if (!this.pollOncePromise) { - this.pollOncePromise = this.pollOnce(options); - const clearPollOncePromise = () => { - this.pollOncePromise = undefined; - }; - this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); - } - return this.pollOncePromise; - } - processUpdatedState() { - if (this.operation.state.error) { - this.stopped = true; - if (!this.resolveOnUnsuccessful) { - this.reject(this.operation.state.error); - throw this.operation.state.error; - } - } - if (this.operation.state.isCancelled) { - this.stopped = true; - if (!this.resolveOnUnsuccessful) { - const error = new PollerCancelledError("Operation was canceled"); - this.reject(error); - throw error; - } - } - if (this.isDone() && this.resolve) { - // If the poller has finished polling, this means we now have a result. - // However, it can be the case that TResult is instantiated to void, so - // we are not expecting a result anyway. To assert that we might not - // have a result eventually after finishing polling, we cast the result - // to TResult. - this.resolve(this.getResult()); - } - } + ConsistencyLevel["ConsistentPrefix"] = "ConsistentPrefix"; +})(exports.ConsistencyLevel || (exports.ConsistencyLevel = {})); + +// Copyright (c) Microsoft Corporation. +/** + * Represents a DatabaseAccount in the Azure Cosmos DB database service. + */ +class DatabaseAccount { /** - * Returns a promise that will resolve once the underlying operation is completed. + * The self-link for Databases in the databaseAccount. + * @deprecated Use `databasesLink` */ - async pollUntilDone(pollOptions = {}) { - if (this.stopped) { - this.startPolling(pollOptions).catch(this.reject); - } - // This is needed because the state could have been updated by - // `cancelOperation`, e.g. the operation is canceled or an error occurred. - this.processUpdatedState(); - return this.promise; + get DatabasesLink() { + return this.databasesLink; } /** - * Invokes the provided callback after each polling is completed, - * sending the current state of the poller's operation. - * - * It returns a method that can be used to stop receiving updates on the given callback function. + * The self-link for Media in the databaseAccount. + * @deprecated Use `mediaLink` */ - onProgress(callback) { - this.pollProgressCallbacks.push(callback); - return () => { - this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); - }; + get MediaLink() { + return this.mediaLink; } /** - * Returns true if the poller has finished polling. + * Attachment content (media) storage quota in MBs ( Retrieved from gateway ). + * @deprecated use `maxMediaStorageUsageInMB` */ - isDone() { - const state = this.operation.state; - return Boolean(state.isCompleted || state.isCancelled || state.error); + get MaxMediaStorageUsageInMB() { + return this.maxMediaStorageUsageInMB; } /** - * Stops the poller from continuing to poll. + * Current attachment content (media) usage in MBs (Retrieved from gateway ) + * + * Value is returned from cached information updated periodically and is not guaranteed + * to be real time. + * + * @deprecated use `currentMediaStorageUsageInMB` */ - stopPolling() { - if (!this.stopped) { - this.stopped = true; - if (this.reject) { - this.reject(new PollerStoppedError("This poller is already stopped")); - } - } + get CurrentMediaStorageUsageInMB() { + return this.currentMediaStorageUsageInMB; } /** - * Returns true if the poller is stopped. + * Gets the UserConsistencyPolicy settings. + * @deprecated use `consistencyPolicy` */ - isStopped() { - return this.stopped; + get ConsistencyPolicy() { + return this.consistencyPolicy; } - /** - * Attempts to cancel the underlying operation. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. - * - * If it's called again before it finishes, it will throw an error. - * - * @param options - Optional properties passed to the operation's update method. - */ - cancelOperation(options = {}) { - if (!this.cancelPromise) { - this.cancelPromise = this.cancelOnce(options); + // TODO: body - any + constructor(body, headers) { + /** The list of writable locations for a geo-replicated database account. */ + this.writableLocations = []; + /** The list of readable locations for a geo-replicated database account. */ + this.readableLocations = []; + this.databasesLink = "/dbs/"; + this.mediaLink = "/media/"; + this.maxMediaStorageUsageInMB = headers[Constants$1.HttpHeaders.MaxMediaStorageUsageInMB]; + this.currentMediaStorageUsageInMB = headers[Constants$1.HttpHeaders.CurrentMediaStorageUsageInMB]; + this.consistencyPolicy = body.userConsistencyPolicy + ? body.userConsistencyPolicy.defaultConsistencyLevel + : exports.ConsistencyLevel.Session; + if (body[Constants$1.WritableLocations] && body.id !== "localhost") { + this.writableLocations = body[Constants$1.WritableLocations]; } - else if (options.abortSignal) { - throw new Error("A cancel request is currently pending"); + if (body[Constants$1.ReadableLocations] && body.id !== "localhost") { + this.readableLocations = body[Constants$1.ReadableLocations]; + } + if (body[Constants$1.ENABLE_MULTIPLE_WRITABLE_LOCATIONS]) { + this.enableMultipleWritableLocations = + body[Constants$1.ENABLE_MULTIPLE_WRITABLE_LOCATIONS] === true || + body[Constants$1.ENABLE_MULTIPLE_WRITABLE_LOCATIONS] === "true"; } - return this.cancelPromise; } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** Defines a target data type of an index path specification in the Azure Cosmos DB service. */ +exports.DataType = void 0; +(function (DataType) { + /** Represents a numeric data type. */ + DataType["Number"] = "Number"; + /** Represents a string data type. */ + DataType["String"] = "String"; + /** Represents a point data type. */ + DataType["Point"] = "Point"; + /** Represents a line string data type. */ + DataType["LineString"] = "LineString"; + /** Represents a polygon data type. */ + DataType["Polygon"] = "Polygon"; + /** Represents a multi-polygon data type. */ + DataType["MultiPolygon"] = "MultiPolygon"; +})(exports.DataType || (exports.DataType = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Specifies the supported indexing modes. + */ +exports.IndexingMode = void 0; +(function (IndexingMode) { /** - * Returns the state of the operation. - * - * Even though TState will be the same type inside any of the methods of any extension of the Poller class, - * implementations of the pollers can customize what's shared with the public by writing their own - * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller - * and a public type representing a safe to share subset of the properties of the internal state. - * Their definition of getOperationState can then return their public type. - * - * Example: - * - * ```ts - * // Let's say we have our poller's operation state defined as: - * interface MyOperationState extends PollOperationState { - * privateProperty?: string; - * publicProperty?: string; - * } - * - * // To allow us to have a true separation of public and private state, we have to define another interface: - * interface PublicState extends PollOperationState { - * publicProperty?: string; - * } - * - * // Then, we define our Poller as follows: - * export class MyPoller extends Poller { - * // ... More content is needed here ... - * - * public getOperationState(): PublicState { - * const state: PublicState = this.operation.state; - * return { - * // Properties from PollOperationState - * isStarted: state.isStarted, - * isCompleted: state.isCompleted, - * isCancelled: state.isCancelled, - * error: state.error, - * result: state.result, - * - * // The only other property needed by PublicState. - * publicProperty: state.publicProperty - * } - * } - * } - * ``` + * Index is updated synchronously with a create or update operation. * - * You can see this in the tests of this repository, go to the file: - * `../test/utils/testPoller.ts` - * and look for the getOperationState implementation. - */ - getOperationState() { - return this.operation.state; - } - /** - * Returns the result value of the operation, - * regardless of the state of the poller. - * It can return undefined or an incomplete form of the final TResult value - * depending on the implementation. + * With consistent indexing, query behavior is the same as the default consistency level for the container. + * The index is always kept up to date with the data. */ - getResult() { - const state = this.operation.state; - return state.result; - } + IndexingMode["consistent"] = "consistent"; /** - * Returns a serialized version of the poller's operation - * by invoking the operation's toString method. + * Index is updated asynchronously with respect to a create or update operation. + * + * With lazy indexing, queries are eventually consistent. The index is updated when the container is idle. */ - toString() { - return this.operation.toString(); - } -} + IndexingMode["lazy"] = "lazy"; + /** No Index is provided. */ + IndexingMode["none"] = "none"; +})(exports.IndexingMode || (exports.IndexingMode = {})); + +/* The target data type of a spatial path */ +exports.SpatialType = void 0; +(function (SpatialType) { + SpatialType["LineString"] = "LineString"; + SpatialType["MultiPolygon"] = "MultiPolygon"; + SpatialType["Point"] = "Point"; + SpatialType["Polygon"] = "Polygon"; +})(exports.SpatialType || (exports.SpatialType = {})); // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * The LRO Engine, a class that performs polling. + * Specifies the supported Index types. */ -class LroEngine extends Poller { - constructor(lro, options) { - const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; - const state = resumeFrom - ? deserializeState(resumeFrom) - : {}; - const operation = new GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); - super(operation); - this.resolveOnUnsuccessful = resolveOnUnsuccessful; - this.config = { intervalInMs: intervalInMs }; - operation.setPollerConfig(this.config); - } +exports.IndexKind = void 0; +(function (IndexKind) { /** - * The method used by the poller to wait before attempting to update its operation. + * This is supplied for a path which requires sorting. */ - delay() { - return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); - } -} - -exports.LroEngine = LroEngine; -exports.Poller = Poller; -exports.PollerCancelledError = PollerCancelledError; -exports.PollerStoppedError = PollerStoppedError; -exports.createHttpPoller = createHttpPoller; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9354: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var abortController = __nccwpck_require__(52557); -var crypto = __nccwpck_require__(6113); + IndexKind["Range"] = "Range"; + /** + * This is supplied for a path which requires geospatial indexing. + */ + IndexKind["Spatial"] = "Spatial"; +})(exports.IndexKind || (exports.IndexKind = {})); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -var _a$1; /** - * A constant that indicates whether the environment the code is running is Node.JS. + * @hidden + * None PartitionKey Literal + */ +const NonePartitionKeyLiteral = {}; +/** + * @hidden + * Null PartitionKey Literal */ -const isNode = typeof process !== "undefined" && Boolean(process.version) && Boolean((_a$1 = process.versions) === null || _a$1 === void 0 ? void 0 : _a$1.node); +const NullPartitionKeyLiteral = null; +/** + * @hidden + * Maps PartitionKey to InternalPartitionKey. + * @param partitionKey - PartitonKey to be converted. + * @returns PartitionKeyInternal + */ +function convertToInternalPartitionKey(partitionKey) { + if (Array.isArray(partitionKey)) { + return partitionKey.map((key) => (key === undefined ? NonePartitionKeyLiteral : key)); + } + else + return [partitionKey]; +} // Copyright (c) Microsoft Corporation. /** - * Creates an abortable promise. - * @param buildPromise - A function that takes the resolve and reject functions as parameters. - * @param options - The options for the abortable promise. - * @returns A promise that can be aborted. + * Builder class for building PartitionKey. */ -function createAbortablePromise(buildPromise, options) { - const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; - return new Promise((resolve, reject) => { - function rejectOnAbort() { - reject(new abortController.AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); - } - function removeListeners() { - abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); - } - function onAbort() { - cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); - removeListeners(); - rejectOnAbort(); - } - if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { - return rejectOnAbort(); - } - try { - buildPromise((x) => { - removeListeners(); - resolve(x); - }, (x) => { - removeListeners(); - reject(x); - }); - } - catch (err) { - reject(err); - } - abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); - }); +class PartitionKeyBuilder { + constructor() { + this.values = []; + } + addValue(value) { + this.values.push(value); + return this; + } + addNullValue() { + this.values.push(NullPartitionKeyLiteral); + return this; + } + addNoneValue() { + this.values.push(NonePartitionKeyLiteral); + return this; + } + build() { + return [...this.values]; + } } // Copyright (c) Microsoft Corporation. -const StandardAbortMessage = "The delay was aborted."; +// Licensed under the MIT license. /** - * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. - * @param timeInMs - The number of milliseconds to be delayed. - * @param options - The options for delay - currently abort options - * @returns Promise that is resolved after timeInMs + * PartitionKey Definition Version */ -function delay(timeInMs, options) { - let token; - const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; - return createAbortablePromise((resolve) => { - token = setTimeout(resolve, timeInMs); - }, { - cleanupBeforeAbort: () => clearTimeout(token), - abortSignal, - abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, - }); -} +exports.PartitionKeyDefinitionVersion = void 0; +(function (PartitionKeyDefinitionVersion) { + PartitionKeyDefinitionVersion[PartitionKeyDefinitionVersion["V1"] = 1] = "V1"; + PartitionKeyDefinitionVersion[PartitionKeyDefinitionVersion["V2"] = 2] = "V2"; +})(exports.PartitionKeyDefinitionVersion || (exports.PartitionKeyDefinitionVersion = {})); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Returns a random integer value between a lower and upper bound, - * inclusive of both bounds. - * Note that this uses Math.random and isn't secure. If you need to use - * this for any kind of security purpose, find a better source of random. - * @param min - The smallest integer value allowed. - * @param max - The largest integer value allowed. + * Type of PartitionKey i.e. Hash, MultiHash */ -function getRandomIntegerInclusive(min, max) { - // Make sure inputs are integers. - min = Math.ceil(min); - max = Math.floor(max); - // Pick a random offset from zero to the size of the range. - // Since Math.random() can never return 1, we have to make the range one larger - // in order to be inclusive of the maximum value after we take the floor. - const offset = Math.floor(Math.random() * (max - min + 1)); - return offset + min; -} +exports.PartitionKeyKind = void 0; +(function (PartitionKeyKind) { + PartitionKeyKind["Hash"] = "Hash"; + PartitionKeyKind["MultiHash"] = "MultiHash"; +})(exports.PartitionKeyKind || (exports.PartitionKeyKind = {})); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Helper to determine when an input is a generic JS object. - * @returns true when input is an object type that is not null, Array, RegExp, or Date. + * Enum for permission mode values. */ -function isObject(input) { - return (typeof input === "object" && - input !== null && - !Array.isArray(input) && - !(input instanceof RegExp) && - !(input instanceof Date)); -} +exports.PermissionMode = void 0; +(function (PermissionMode) { + /** Permission not valid. */ + PermissionMode["None"] = "none"; + /** Permission applicable for read operations only. */ + PermissionMode["Read"] = "read"; + /** Permission applicable for all operations. */ + PermissionMode["All"] = "all"; +})(exports.PermissionMode || (exports.PermissionMode = {})); // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Typeguard for an error object shape (has name and message) - * @param e - Something caught by a catch clause. + * Represents Priority Level associated with each Azure Cosmos DB client requests.
+ * The Low priority requests are always throttled before any High priority requests. + * + * By default all requests are considered as High priority requests. + * + * See https://aka.ms/CosmosDB/PriorityBasedExecution for more detailed documentation on Priority based throttling. */ -function isError(e) { - if (isObject(e)) { - const hasName = typeof e.name === "string"; - const hasMessage = typeof e.message === "string"; - return hasName && hasMessage; +exports.PriorityLevel = void 0; +(function (PriorityLevel) { + /** + * High Priority requests are throttled after Low priority requests. + */ + PriorityLevel["High"] = "High"; + /** + * Low Priority requests are throttled before High priority requests. + */ + PriorityLevel["Low"] = "Low"; +})(exports.PriorityLevel || (exports.PriorityLevel = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Enum for trigger operation values. + * specifies the operations on which a trigger should be executed. + */ +exports.TriggerOperation = void 0; +(function (TriggerOperation) { + /** All operations. */ + TriggerOperation["All"] = "all"; + /** Create operations only. */ + TriggerOperation["Create"] = "create"; + /** Update operations only. */ + TriggerOperation["Update"] = "update"; + /** Delete operations only. */ + TriggerOperation["Delete"] = "delete"; + /** Replace operations only. */ + TriggerOperation["Replace"] = "replace"; +})(exports.TriggerOperation || (exports.TriggerOperation = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Enum for trigger type values. + * Specifies the type of the trigger. + */ +exports.TriggerType = void 0; +(function (TriggerType) { + /** Trigger should be executed before the associated operation(s). */ + TriggerType["Pre"] = "pre"; + /** Trigger should be executed after the associated operation(s). */ + TriggerType["Post"] = "post"; +})(exports.TriggerType || (exports.TriggerType = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Enum for udf type values. + * Specifies the types of user defined functions. + */ +exports.UserDefinedFunctionType = void 0; +(function (UserDefinedFunctionType) { + /** The User Defined Function is written in JavaScript. This is currently the only option. */ + UserDefinedFunctionType["Javascript"] = "Javascript"; +})(exports.UserDefinedFunctionType || (exports.UserDefinedFunctionType = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +exports.GeospatialType = void 0; +(function (GeospatialType) { + /** Represents data in round-earth coordinate system. */ + GeospatialType["Geography"] = "Geography"; + /** Represents data in Eucledian(flat) coordinate system. */ + GeospatialType["Geometry"] = "Geometry"; +})(exports.GeospatialType || (exports.GeospatialType = {})); + +// Copyright (c) Microsoft Corporation. +const logger$4 = logger$5.createClientLogger("extractPartitionKey"); +/** + * Function to extract PartitionKey based on {@link PartitionKeyDefinition} + * from an object. + * Retuns + * 1. PartitionKeyInternal[] if extraction is successful. + * 2. undefined if either {@link partitionKeyDefinition} is not well formed + * or an unsupported partitionkey type is encountered. + * @hidden + */ +function extractPartitionKeys(document, partitionKeyDefinition) { + if (partitionKeyDefinition && + partitionKeyDefinition.paths && + partitionKeyDefinition.paths.length > 0) { + if (partitionKeyDefinition.systemKey === true) { + return []; + } + if (partitionKeyDefinition.paths.length === 1 && + partitionKeyDefinition.paths[0] === DEFAULT_PARTITION_KEY_PATH) { + return [extractPartitionKey(DEFAULT_PARTITION_KEY_PATH, document)]; + } + const partitionKeys = []; + partitionKeyDefinition.paths.forEach((path) => { + const obj = extractPartitionKey(path, document); + if (obj === undefined) { + logger$4.warning("Unsupported PartitionKey found."); + return undefined; + } + partitionKeys.push(obj); + }); + return partitionKeys; } - return false; + logger$4.error("Unexpected Partition Key Definition Found."); + return undefined; +} +function extractPartitionKey(path, obj) { + const pathParts = parsePath(path); + for (const part of pathParts) { + if (typeof obj === "object" && obj !== null && part in obj) { + obj = obj[part]; + } + else { + obj = undefined; + break; + } + } + if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { + return obj; + } + else if (obj === NullPartitionKeyLiteral) { + return NullPartitionKeyLiteral; + } + else if (obj === undefined || JSON.stringify(obj) === JSON.stringify(NonePartitionKeyLiteral)) { + return NonePartitionKeyLiteral; + } + return undefined; } /** - * Given what is thought to be an error object, return the message if possible. - * If the message is missing, returns a stringified version of the input. - * @param e - Something thrown from a try block - * @returns The error message or a string of the input + * @hidden */ -function getErrorMessage(e) { - if (isError(e)) { - return e.message; +function undefinedPartitionKey(partitionKeyDefinition) { + if (partitionKeyDefinition.systemKey === true) { + return []; } else { - let stringified; - try { - if (typeof e === "object" && e) { - stringified = JSON.stringify(e); - } - else { - stringified = String(e); - } - } - catch (err) { - stringified = "[unable to stringify input]"; - } - return `Unknown error ${stringified}`; + return partitionKeyDefinition.paths.map(() => NonePartitionKeyLiteral); } } // Copyright (c) Microsoft Corporation. /** - * Generates a SHA-256 HMAC signature. - * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. - * @param stringToSign - The data to be signed. - * @param encoding - The textual encoding to use for the returned HMAC digest. + * Utility function to avoid writing boilder plate code while checking for + * undefined values. It throws Error if the input value is undefined. + * @param value - Value which is potentially undefined. + * @param msg - Error Message to throw if value is undefined. + * @returns */ -async function computeSha256Hmac(key, stringToSign, encoding) { - const decodedKey = Buffer.from(key, "base64"); - return crypto.createHmac("sha256", decodedKey).update(stringToSign).digest(encoding); +function assertNotUndefined(value, msg) { + if (value !== undefined) { + return value; + } + throw new Error(msg || "Unexpected 'undefined' value encountered"); } /** - * Generates a SHA-256 hash. - * @param content - The data to be included in the hash. - * @param encoding - The textual encoding to use for the returned hash. + * Check for value being PrimitivePartitionKeyValue. + * @internal */ -async function computeSha256Hash(content, encoding) { - return crypto.createHash("sha256").update(content).digest(encoding); +function isPrimitivePartitionKeyValue(value) { + return (isWellDefinedPartitionKeyValue(value) || + isNonePartitionKeyValue(value) || + isNullPartitionKeyValue(value)); } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * Helper TypeGuard that checks if something is defined or not. - * @param thing - Anything + * Check for value being string, number or boolean. + * @internal */ -function isDefined(thing) { - return typeof thing !== "undefined" && thing !== null; +function isWellDefinedPartitionKeyValue(value) { + return typeof value === "string" || typeof value === "boolean" || typeof value === "number"; } /** - * Helper TypeGuard that checks if the input is an object with the specified properties. - * @param thing - Anything. - * @param properties - The name of the properties that should appear in the object. + * Check for value being NonePartitionKeyType. + * @internal */ -function isObjectWithProperties(thing, properties) { - if (!isDefined(thing) || typeof thing !== "object") { - return false; - } - for (const property of properties) { - if (!objectHasProperty(thing, property)) { - return false; - } - } - return true; +function isNonePartitionKeyValue(value) { + return value !== undefined && JSON.stringify(value) === JSON.stringify(NonePartitionKeyLiteral); } /** - * Helper TypeGuard that checks if the input is an object with the specified property. - * @param thing - Any object. - * @param property - The name of the property that should appear in the object. + * Check for value being NullPartitionKeyType. + * @internal */ -function objectHasProperty(thing, property) { - return (isDefined(thing) && typeof thing === "object" && property in thing); +function isNullPartitionKeyValue(value) { + return value === NullPartitionKeyLiteral; +} +/** + * Verify validity of partition key. + * @internal + */ +function isPartitionKey(partitionKey) { + return isPrimitivePartitionKeyValue(partitionKey) || Array.isArray(partitionKey); } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/* - * NOTE: When moving this file, please update "react-native" section in package.json. +/** + * The \@azure/logger configuration for this package. */ +const defaultLogger = logger$5.createClientLogger("cosmosdb"); + +// Copyright (c) Microsoft Corporation. +// ---------------------------------------------------------------------------- +// Utility methods +// +/** @hidden */ +function javaScriptFriendlyJSONStringify(s) { + // two line terminators (Line separator and Paragraph separator) are not needed to be escaped in JSON + // but are needed to be escaped in JavaScript. + return JSON.stringify(s) + .replace(/\u2028/g, "\\u2028") + .replace(/\u2029/g, "\\u2029"); +} +/** @hidden */ +function bodyFromData(data) { + if (typeof data === "object") { + return javaScriptFriendlyJSONStringify(data); + } + return data; +} +const JsonContentType = "application/json"; /** - * Generated Universally Unique Identifier - * - * @returns RFC4122 v4 UUID. + * @hidden */ -function generateUUID() { - let uuid = ""; - for (let i = 0; i < 32; i++) { - // Generate a random number between 0 and 15 - const randomNumber = Math.floor(Math.random() * 16); - // Set the UUID version to 4 in the 13th position - if (i === 12) { - uuid += "4"; +async function getHeaders({ clientOptions, defaultHeaders, verb, path, resourceId, resourceType, options = {}, partitionKeyRangeId, useMultipleWriteLocations, partitionKey, }) { + const headers = Object.assign({ [Constants$1.HttpHeaders.ResponseContinuationTokenLimitInKB]: 1, [Constants$1.HttpHeaders.EnableCrossPartitionQuery]: true }, defaultHeaders); + if (useMultipleWriteLocations) { + headers[Constants$1.HttpHeaders.ALLOW_MULTIPLE_WRITES] = true; + } + if (options.continuationTokenLimitInKB) { + headers[Constants$1.HttpHeaders.ResponseContinuationTokenLimitInKB] = + options.continuationTokenLimitInKB; + } + if (options.continuationToken) { + headers[Constants$1.HttpHeaders.Continuation] = options.continuationToken; + } + else if (options.continuation) { + headers[Constants$1.HttpHeaders.Continuation] = options.continuation; + } + if (options.preTriggerInclude) { + headers[Constants$1.HttpHeaders.PreTriggerInclude] = + options.preTriggerInclude.constructor === Array + ? options.preTriggerInclude.join(",") + : options.preTriggerInclude; + } + if (options.postTriggerInclude) { + headers[Constants$1.HttpHeaders.PostTriggerInclude] = + options.postTriggerInclude.constructor === Array + ? options.postTriggerInclude.join(",") + : options.postTriggerInclude; + } + if (options.offerType) { + headers[Constants$1.HttpHeaders.OfferType] = options.offerType; + } + if (options.offerThroughput) { + headers[Constants$1.HttpHeaders.OfferThroughput] = options.offerThroughput; + } + if (options.maxItemCount) { + headers[Constants$1.HttpHeaders.PageSize] = options.maxItemCount; + } + if (options.accessCondition) { + if (options.accessCondition.type === "IfMatch") { + headers[Constants$1.HttpHeaders.IfMatch] = options.accessCondition.condition; } - else if (i === 16) { - // Set the UUID variant to "10" in the 17th position - uuid += (randomNumber & 0x3) | 0x8; + else { + headers[Constants$1.HttpHeaders.IfNoneMatch] = options.accessCondition.condition; + } + } + if (options.useIncrementalFeed) { + headers[Constants$1.HttpHeaders.A_IM] = "Incremental Feed"; + } + if (options.indexingDirective) { + headers[Constants$1.HttpHeaders.IndexingDirective] = options.indexingDirective; + } + if (options.consistencyLevel) { + headers[Constants$1.HttpHeaders.ConsistencyLevel] = options.consistencyLevel; + } + if (options.priorityLevel) { + headers[Constants$1.HttpHeaders.PriorityLevel] = options.priorityLevel; + } + if (options.maxIntegratedCacheStalenessInMs && resourceType === exports.ResourceType.item) { + if (typeof options.maxIntegratedCacheStalenessInMs === "number") { + headers[Constants$1.HttpHeaders.DedicatedGatewayPerRequestCacheStaleness] = + options.maxIntegratedCacheStalenessInMs.toString(); } else { - // Add a random hexadecimal digit to the UUID string - uuid += randomNumber.toString(16); + defaultLogger.error(`RangeError: maxIntegratedCacheStalenessInMs "${options.maxIntegratedCacheStalenessInMs}" is not a valid parameter.`); + headers[Constants$1.HttpHeaders.DedicatedGatewayPerRequestCacheStaleness] = "null"; } - // Add hyphens to the UUID string at the appropriate positions - if (i === 7 || i === 11 || i === 15 || i === 19) { - uuid += "-"; + } + if (options.resourceTokenExpirySeconds) { + headers[Constants$1.HttpHeaders.ResourceTokenExpiry] = options.resourceTokenExpirySeconds; + } + if (options.sessionToken) { + headers[Constants$1.HttpHeaders.SessionToken] = options.sessionToken; + } + if (options.enableScanInQuery) { + headers[Constants$1.HttpHeaders.EnableScanInQuery] = options.enableScanInQuery; + } + if (options.populateQuotaInfo) { + headers[Constants$1.HttpHeaders.PopulateQuotaInfo] = options.populateQuotaInfo; + } + if (options.populateQueryMetrics) { + headers[Constants$1.HttpHeaders.PopulateQueryMetrics] = options.populateQueryMetrics; + } + if (options.maxDegreeOfParallelism !== undefined) { + headers[Constants$1.HttpHeaders.ParallelizeCrossPartitionQuery] = true; + } + if (options.populateQuotaInfo) { + headers[Constants$1.HttpHeaders.PopulateQuotaInfo] = true; + } + if (partitionKey !== undefined && !headers[Constants$1.HttpHeaders.PartitionKey]) { + headers[Constants$1.HttpHeaders.PartitionKey] = jsonStringifyAndEscapeNonASCII(partitionKey); + } + if (clientOptions.key || clientOptions.tokenProvider) { + headers[Constants$1.HttpHeaders.XDate] = new Date().toUTCString(); + } + if (verb === exports.HTTPMethod.post || verb === exports.HTTPMethod.put) { + if (!headers[Constants$1.HttpHeaders.ContentType]) { + headers[Constants$1.HttpHeaders.ContentType] = JsonContentType; } } - return uuid; + if (!headers[Constants$1.HttpHeaders.Accept]) { + headers[Constants$1.HttpHeaders.Accept] = JsonContentType; + } + if (partitionKeyRangeId !== undefined) { + headers[Constants$1.HttpHeaders.PartitionKeyRangeID] = partitionKeyRangeId; + } + if (options.enableScriptLogging) { + headers[Constants$1.HttpHeaders.EnableScriptLogging] = options.enableScriptLogging; + } + if (options.disableRUPerMinuteUsage) { + headers[Constants$1.HttpHeaders.DisableRUPerMinuteUsage] = true; + } + if (options.populateIndexMetrics) { + headers[Constants$1.HttpHeaders.PopulateIndexMetrics] = options.populateIndexMetrics; + } + if (clientOptions.key || + clientOptions.resourceTokens || + clientOptions.tokenProvider || + clientOptions.permissionFeed) { + await setAuthorizationHeader(clientOptions, verb, path, resourceId, resourceType, headers); + } + return headers; } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -var _a; -// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. -let uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" - ? globalThis.crypto.randomUUID.bind(globalThis.crypto) - : crypto.randomUUID; -// Not defined in earlier versions of Node.js 14 -if (!uuidFunction) { - uuidFunction = generateUUID; +const uuid$2 = uuid$3.v4; +function isKeyInRange(min, max, key) { + const isAfterMinInclusive = key.localeCompare(min) >= 0; + const isBeforeMax = key.localeCompare(max) < 0; + return isAfterMinInclusive && isBeforeMax; } +const BulkOperationType = { + Create: "Create", + Upsert: "Upsert", + Read: "Read", + Delete: "Delete", + Replace: "Replace", + Patch: "Patch", +}; /** - * Generated Universally Unique Identifier - * - * @returns RFC4122 v4 UUID. + * Maps OperationInput to Operation by + * - generating Ids if needed. + * - choosing partitionKey which can be used to choose which batch this + * operation should be part of. The order is - + * 1. If the operationInput itself has partitionKey field set it is used. + * 2. Other wise for create/replace/upsert it is extracted from resource body. + * 3. For read/delete/patch type operations undefined partitionKey is used. + * - Here one nuance is that, the partitionKey field inside Operation needs to + * be serialized as a JSON string. + * @param operationInput - OperationInput + * @param definition - PartitionKeyDefinition + * @param options - RequestOptions + * @returns */ -function randomUUID() { - return uuidFunction(); +function prepareOperations(operationInput, definition, options = {}) { + populateIdsIfNeeded(operationInput, options); + let partitionKey; + if (Object.prototype.hasOwnProperty.call(operationInput, "partitionKey")) { + if (operationInput.partitionKey === undefined) { + partitionKey = definition.paths.map(() => NonePartitionKeyLiteral); + } + else { + partitionKey = convertToInternalPartitionKey(operationInput.partitionKey); + } + } + else { + switch (operationInput.operationType) { + case BulkOperationType.Create: + case BulkOperationType.Replace: + case BulkOperationType.Upsert: + partitionKey = assertNotUndefined(extractPartitionKeys(operationInput.resourceBody, definition), "Unexpected undefined Partition Key Found."); + break; + case BulkOperationType.Read: + case BulkOperationType.Delete: + case BulkOperationType.Patch: + partitionKey = definition.paths.map(() => NonePartitionKeyLiteral); + } + } + return { + operation: Object.assign(Object.assign({}, operationInput), { partitionKey: JSON.stringify(partitionKey) }), + partitionKey, + }; +} +/** + * For operations requiring Id genrate random uuids. + * @param operationInput - OperationInput to be checked. + * @param options - RequestOptions + */ +function populateIdsIfNeeded(operationInput, options) { + if (operationInput.operationType === BulkOperationType.Create || + operationInput.operationType === BulkOperationType.Upsert) { + if ((operationInput.resourceBody.id === undefined || operationInput.resourceBody.id === "") && + !options.disableAutomaticIdGeneration) { + operationInput.resourceBody.id = uuid$2(); + } + } +} +/** + * Splits a batch into array of batches based on cumulative size of its operations by making sure + * cumulative size of an individual batch is not larger than {@link Constants.DefaultMaxBulkRequestBodySizeInBytes}. + * If a single operation itself is larger than {@link Constants.DefaultMaxBulkRequestBodySizeInBytes}, that + * operation would be moved into a batch containing only that operation. + * @param originalBatch - A batch of operations needed to be checked. + * @returns + * @hidden + */ +function splitBatchBasedOnBodySize(originalBatch) { + if ((originalBatch === null || originalBatch === void 0 ? void 0 : originalBatch.operations) === undefined || originalBatch.operations.length < 1) + return []; + let currentBatchSize = calculateObjectSizeInBytes(originalBatch.operations[0]); + let currentBatch = Object.assign(Object.assign({}, originalBatch), { operations: [originalBatch.operations[0]], indexes: [originalBatch.indexes[0]] }); + const processedBatches = []; + processedBatches.push(currentBatch); + for (let index = 1; index < originalBatch.operations.length; index++) { + const operation = originalBatch.operations[index]; + const currentOpSize = calculateObjectSizeInBytes(operation); + if (currentBatchSize + currentOpSize > Constants$1.DefaultMaxBulkRequestBodySizeInBytes) { + currentBatch = Object.assign(Object.assign({}, originalBatch), { operations: [], indexes: [] }); + processedBatches.push(currentBatch); + currentBatchSize = 0; + } + currentBatch.operations.push(operation); + currentBatch.indexes.push(originalBatch.indexes[index]); + currentBatchSize += currentOpSize; + } + return processedBatches; +} +/** + * Calculates size of an JSON object in bytes with utf-8 encoding. + * @hidden + */ +function calculateObjectSizeInBytes(obj) { + return new TextEncoder().encode(bodyFromData(obj)).length; +} +function decorateBatchOperation(operation, options = {}) { + if (operation.operationType === BulkOperationType.Create || + operation.operationType === BulkOperationType.Upsert) { + if ((operation.resourceBody.id === undefined || operation.resourceBody.id === "") && + !options.disableAutomaticIdGeneration) { + operation.resourceBody.id = uuid$2(); + } + } + return operation; } -exports.computeSha256Hash = computeSha256Hash; -exports.computeSha256Hmac = computeSha256Hmac; -exports.createAbortablePromise = createAbortablePromise; -exports.delay = delay; -exports.getErrorMessage = getErrorMessage; -exports.getRandomIntegerInclusive = getRandomIntegerInclusive; -exports.isDefined = isDefined; -exports.isError = isError; -exports.isNode = isNode; -exports.isObject = isObject; -exports.isObjectWithProperties = isObjectWithProperties; -exports.objectHasProperty = objectHasProperty; -exports.randomUUID = randomUUID; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 74559: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const PatchOperationType = { + add: "add", + replace: "replace", + remove: "remove", + set: "set", + incr: "incr", +}; -"use strict"; +class ErrorResponse extends Error { +} +class ResourceResponse { + constructor(resource, headers, statusCode, diagnostics, substatus) { + this.resource = resource; + this.headers = headers; + this.statusCode = statusCode; + this.diagnostics = diagnostics; + this.substatus = substatus; + } + get requestCharge() { + return Number(this.headers[Constants$1.HttpHeaders.RequestCharge]) || 0; + } + get activityId() { + return this.headers[Constants$1.HttpHeaders.ActivityId]; + } + get etag() { + return this.headers[Constants$1.HttpHeaders.ETag]; + } +} -Object.defineProperty(exports, "__esModule", ({ value: true })); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class ClientSideMetrics { + constructor(requestCharge) { + this.requestCharge = requestCharge; + } + /** + * Adds one or more ClientSideMetrics to a copy of this instance and returns the result. + */ + add(...clientSideMetricsArray) { + let requestCharge = this.requestCharge; + for (const clientSideMetrics of clientSideMetricsArray) { + if (clientSideMetrics == null) { + throw new Error("clientSideMetrics has null or undefined item(s)"); + } + requestCharge += clientSideMetrics.requestCharge; + } + return new ClientSideMetrics(requestCharge); + } + static createFromArray(...clientSideMetricsArray) { + if (clientSideMetricsArray == null) { + throw new Error("clientSideMetricsArray is null or undefined item(s)"); + } + return this.zero.add(...clientSideMetricsArray); + } +} +ClientSideMetrics.zero = new ClientSideMetrics(0); -var tslib = __nccwpck_require__(26429); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var QueryMetricsConstants = { + // QueryMetrics + RetrievedDocumentCount: "retrievedDocumentCount", + RetrievedDocumentSize: "retrievedDocumentSize", + OutputDocumentCount: "outputDocumentCount", + OutputDocumentSize: "outputDocumentSize", + IndexHitRatio: "indexUtilizationRatio", + IndexHitDocumentCount: "indexHitDocumentCount", + TotalQueryExecutionTimeInMs: "totalExecutionTimeInMs", + // QueryPreparationTimes + QueryCompileTimeInMs: "queryCompileTimeInMs", + LogicalPlanBuildTimeInMs: "queryLogicalPlanBuildTimeInMs", + PhysicalPlanBuildTimeInMs: "queryPhysicalPlanBuildTimeInMs", + QueryOptimizationTimeInMs: "queryOptimizationTimeInMs", + // QueryTimes + IndexLookupTimeInMs: "indexLookupTimeInMs", + DocumentLoadTimeInMs: "documentLoadTimeInMs", + VMExecutionTimeInMs: "VMExecutionTimeInMs", + DocumentWriteTimeInMs: "writeOutputTimeInMs", + // RuntimeExecutionTimes + QueryEngineTimes: "queryEngineTimes", + SystemFunctionExecuteTimeInMs: "systemFunctionExecuteTimeInMs", + UserDefinedFunctionExecutionTimeInMs: "userFunctionExecuteTimeInMs", + // QueryMetrics Text + RetrievedDocumentCountText: "Retrieved Document Count", + RetrievedDocumentSizeText: "Retrieved Document Size", + OutputDocumentCountText: "Output Document Count", + OutputDocumentSizeText: "Output Document Size", + IndexUtilizationText: "Index Utilization", + TotalQueryExecutionTimeText: "Total Query Execution Time", + // QueryPreparationTimes Text + QueryPreparationTimesText: "Query Preparation Times", + QueryCompileTimeText: "Query Compilation Time", + LogicalPlanBuildTimeText: "Logical Plan Build Time", + PhysicalPlanBuildTimeText: "Physical Plan Build Time", + QueryOptimizationTimeText: "Query Optimization Time", + // QueryTimes Text + QueryEngineTimesText: "Query Engine Times", + IndexLookupTimeText: "Index Lookup Time", + DocumentLoadTimeText: "Document Load Time", + WriteOutputTimeText: "Document Write Time", + // RuntimeExecutionTimes Text + RuntimeExecutionTimesText: "Runtime Execution Times", + TotalExecutionTimeText: "Query Engine Execution Time", + SystemFunctionExecuteTimeText: "System Function Execution Time", + UserDefinedFunctionExecutionTimeText: "User-defined Function Execution Time", + // ClientSideQueryMetrics Text + ClientSideQueryMetricsText: "Client Side Metrics", + RetriesText: "Retry Count", + RequestChargeText: "Request Charge", + FetchExecutionRangesText: "Partition Execution Timeline", + SchedulingMetricsText: "Scheduling Metrics", +}; // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// Ported this implementation to javascript: +// https://referencesource.microsoft.com/#mscorlib/system/timespan.cs,83e476c1ae112117 +/** @hidden */ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const ticksPerMillisecond = 10000; +/** @hidden */ +const millisecondsPerTick = 1.0 / ticksPerMillisecond; +/** @hidden */ +const ticksPerSecond = ticksPerMillisecond * 1000; // 10,000,000 +/** @hidden */ +const secondsPerTick = 1.0 / ticksPerSecond; // 0.0001 +/** @hidden */ +const ticksPerMinute = ticksPerSecond * 60; // 600,000,000 +/** @hidden */ +const minutesPerTick = 1.0 / ticksPerMinute; // 1.6666666666667e-9 +/** @hidden */ +const ticksPerHour = ticksPerMinute * 60; // 36,000,000,000 +/** @hidden */ +const hoursPerTick = 1.0 / ticksPerHour; // 2.77777777777777778e-11 +/** @hidden */ +const ticksPerDay = ticksPerHour * 24; // 864,000,000,000 +/** @hidden */ +const daysPerTick = 1.0 / ticksPerDay; // 1.1574074074074074074e-12 +/** @hidden */ +const millisPerSecond = 1000; +/** @hidden */ +const millisPerMinute = millisPerSecond * 60; // 60,000 +/** @hidden */ +const millisPerHour = millisPerMinute * 60; // 3,600,000 +/** @hidden */ +const millisPerDay = millisPerHour * 24; // 86,400,000 +/** @hidden */ +const maxMilliSeconds = Number.MAX_SAFE_INTEGER / ticksPerMillisecond; +/** @hidden */ +const minMilliSeconds = Number.MIN_SAFE_INTEGER / ticksPerMillisecond; /** - * returns an async iterator that iterates over results. It also has a `byPage` - * method that returns pages of items at once. + * Represents a time interval. * - * @param pagedResult - an object that specifies how to get pages. - * @returns a paged async iterator that iterates over results. + * @param days - Number of days. + * @param hours - Number of hours. + * @param minutes - Number of minutes. + * @param seconds - Number of seconds. + * @param milliseconds - Number of milliseconds. + * @hidden */ -function getPagedAsyncIterator(pagedResult) { - var _a; - const iter = getItemAsyncIterator(pagedResult); - return { - next() { - return iter.next(); - }, - [Symbol.asyncIterator]() { - return this; - }, - byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { - const { continuationToken, maxPageSize } = settings !== null && settings !== void 0 ? settings : {}; - return getPageAsyncIterator(pagedResult, { - pageLink: continuationToken, - maxPageSize, - }); - }), - }; -} -function getItemAsyncIterator(pagedResult) { - return tslib.__asyncGenerator(this, arguments, function* getItemAsyncIterator_1() { - var e_1, _a, e_2, _b; - const pages = getPageAsyncIterator(pagedResult); - const firstVal = yield tslib.__await(pages.next()); - // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is - if (!Array.isArray(firstVal.value)) { - // can extract elements from this page - const { toElements } = pagedResult; - if (toElements) { - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(toElements(firstVal.value)))); - try { - for (var pages_1 = tslib.__asyncValues(pages), pages_1_1; pages_1_1 = yield tslib.__await(pages_1.next()), !pages_1_1.done;) { - const page = pages_1_1.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(toElements(page)))); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (pages_1_1 && !pages_1_1.done && (_a = pages_1.return)) yield tslib.__await(_a.call(pages_1)); - } - finally { if (e_1) throw e_1.error; } - } - } - else { - yield yield tslib.__await(firstVal.value); - // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(pages))); - } +class TimeSpan { + constructor(days, hours, minutes, seconds, milliseconds) { + // Constructor + if (!Number.isInteger(days)) { + throw new Error("days is not an integer"); } - else { - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(firstVal.value))); - try { - for (var pages_2 = tslib.__asyncValues(pages), pages_2_1; pages_2_1 = yield tslib.__await(pages_2.next()), !pages_2_1.done;) { - const page = pages_2_1.value; - // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, - // it must be the case that `TPage = TElement[]` - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(page))); - } - } - catch (e_2_1) { e_2 = { error: e_2_1 }; } - finally { - try { - if (pages_2_1 && !pages_2_1.done && (_b = pages_2.return)) yield tslib.__await(_b.call(pages_2)); - } - finally { if (e_2) throw e_2.error; } - } + if (!Number.isInteger(hours)) { + throw new Error("hours is not an integer"); } - }); -} -function getPageAsyncIterator(pagedResult, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* getPageAsyncIterator_1() { - const { pageLink, maxPageSize } = options; - let response = yield tslib.__await(pagedResult.getPage(pageLink !== null && pageLink !== void 0 ? pageLink : pagedResult.firstPageLink, maxPageSize)); - if (!response) { - return yield tslib.__await(void 0); + if (!Number.isInteger(minutes)) { + throw new Error("minutes is not an integer"); } - yield yield tslib.__await(response.page); - while (response.nextPageLink) { - response = yield tslib.__await(pagedResult.getPage(response.nextPageLink, maxPageSize)); - if (!response) { - return yield tslib.__await(void 0); - } - yield yield tslib.__await(response.page); + if (!Number.isInteger(seconds)) { + throw new Error("seconds is not an integer"); } - }); + if (!Number.isInteger(milliseconds)) { + throw new Error("milliseconds is not an integer"); + } + const totalMilliSeconds = (days * 3600 * 24 + hours * 3600 + minutes * 60 + seconds) * 1000 + milliseconds; + if (totalMilliSeconds > maxMilliSeconds || totalMilliSeconds < minMilliSeconds) { + throw new Error("Total number of milliseconds was either too large or too small"); + } + this._ticks = totalMilliSeconds * ticksPerMillisecond; + } + /** + * Returns a new TimeSpan object whose value is the sum of the specified TimeSpan object and this instance. + * @param ts - The time interval to add. + */ + add(ts) { + if (TimeSpan.additionDoesOverflow(this._ticks, ts._ticks)) { + throw new Error("Adding the two timestamps causes an overflow."); + } + const results = this._ticks + ts._ticks; + return TimeSpan.fromTicks(results); + } + /** + * Returns a new TimeSpan object whose value is the difference of the specified TimeSpan object and this instance. + * @param ts - The time interval to subtract. + */ + subtract(ts) { + if (TimeSpan.subtractionDoesUnderflow(this._ticks, ts._ticks)) { + throw new Error("Subtracting the two timestamps causes an underflow."); + } + const results = this._ticks - ts._ticks; + return TimeSpan.fromTicks(results); + } + /** + * Compares this instance to a specified object and returns an integer that indicates whether this + * instance is shorter than, equal to, or longer than the specified object. + * @param value - The time interval to add. + */ + compareTo(value) { + if (value == null) { + return 1; + } + if (!TimeSpan.isTimeSpan(value)) { + throw new Error("Argument must be a TimeSpan object"); + } + return TimeSpan.compare(this, value); + } + /** + * Returns a new TimeSpan object whose value is the absolute value of the current TimeSpan object. + */ + duration() { + return TimeSpan.fromTicks(this._ticks >= 0 ? this._ticks : -this._ticks); + } + /** + * Returns a value indicating whether this instance is equal to a specified object. + * @param value - The time interval to check for equality. + */ + equals(value) { + if (TimeSpan.isTimeSpan(value)) { + return this._ticks === value._ticks; + } + return false; + } + /** + * Returns a new TimeSpan object whose value is the negated value of this instance. + * @param value - The time interval to check for equality. + */ + negate() { + return TimeSpan.fromTicks(-this._ticks); + } + days() { + return Math.floor(this._ticks / ticksPerDay); + } + hours() { + return Math.floor(this._ticks / ticksPerHour); + } + milliseconds() { + return Math.floor(this._ticks / ticksPerMillisecond); + } + seconds() { + return Math.floor(this._ticks / ticksPerSecond); + } + ticks() { + return this._ticks; + } + totalDays() { + return this._ticks * daysPerTick; + } + totalHours() { + return this._ticks * hoursPerTick; + } + totalMilliseconds() { + return this._ticks * millisecondsPerTick; + } + totalMinutes() { + return this._ticks * minutesPerTick; + } + totalSeconds() { + return this._ticks * secondsPerTick; + } + static fromTicks(value) { + const timeSpan = new TimeSpan(0, 0, 0, 0, 0); + timeSpan._ticks = value; + return timeSpan; + } + static isTimeSpan(timespan) { + return timespan._ticks; + } + static additionDoesOverflow(a, b) { + const c = a + b; + return a !== c - b || b !== c - a; + } + static subtractionDoesUnderflow(a, b) { + const c = a - b; + return a !== c + b || b !== a - c; + } + static compare(t1, t2) { + if (t1._ticks > t2._ticks) { + return 1; + } + if (t1._ticks < t2._ticks) { + return -1; + } + return 0; + } + static interval(value, scale) { + if (isNaN(value)) { + throw new Error("value must be a number"); + } + const milliseconds = value * scale; + if (milliseconds > maxMilliSeconds || milliseconds < minMilliSeconds) { + throw new Error("timespan too long"); + } + return TimeSpan.fromTicks(Math.floor(milliseconds * ticksPerMillisecond)); + } + static fromMilliseconds(value) { + return TimeSpan.interval(value, 1); + } + static fromSeconds(value) { + return TimeSpan.interval(value, millisPerSecond); + } + static fromMinutes(value) { + return TimeSpan.interval(value, millisPerMinute); + } + static fromHours(value) { + return TimeSpan.interval(value, millisPerHour); + } + static fromDays(value) { + return TimeSpan.interval(value, millisPerDay); + } } - -exports.getPagedAsyncIterator = getPagedAsyncIterator; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 26429: -/***/ ((module) => { - -/****************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, Symbol, Reflect, Promise, SuppressedError */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __esDecorate; -var __runInitializers; -var __propKey; -var __setFunctionName; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __classPrivateFieldIn; -var __createBinding; -var __addDisposableResource; -var __disposeResources; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { - function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } - var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; - var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; - var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); - var _, done = false; - for (var i = decorators.length - 1; i >= 0; i--) { - var context = {}; - for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context.access[p] = contextIn.access[p]; - context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); - if (kind === "accessor") { - if (result === void 0) continue; - if (result === null || typeof result !== "object") throw new TypeError("Object expected"); - if (_ = accept(result.get)) descriptor.get = _; - if (_ = accept(result.set)) descriptor.set = _; - if (_ = accept(result.init)) initializers.unshift(_); - } - else if (_ = accept(result)) { - if (kind === "field") initializers.unshift(_); - else descriptor[key] = _; - } - } - if (target) Object.defineProperty(target, contextIn.name, descriptor); - done = true; - }; - - __runInitializers = function (thisArg, initializers, value) { - var useValue = arguments.length > 2; - for (var i = 0; i < initializers.length; i++) { - value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); - } - return useValue ? value : void 0; - }; - - __propKey = function (x) { - return typeof x === "symbol" ? x : "".concat(x); - }; - - __setFunctionName = function (f, name, prefix) { - if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; - return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - __classPrivateFieldIn = function (state, receiver) { - if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); - return typeof state === "function" ? receiver === state : state.has(receiver); - }; - - __addDisposableResource = function (env, value, async) { - if (value !== null && value !== void 0) { - if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); - var dispose; - if (async) { - if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); - dispose = value[Symbol.asyncDispose]; - } - if (dispose === void 0) { - if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); - dispose = value[Symbol.dispose]; - } - if (typeof dispose !== "function") throw new TypeError("Object not disposable."); - env.stack.push({ value: value, dispose: dispose, async: async }); - } - else if (async) { - env.stack.push({ async: true }); - } - return value; - }; - - var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { - var e = new Error(message); - return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; - }; - - __disposeResources = function (env) { - function fail(e) { - env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; - env.hasError = true; - } - function next() { - while (env.stack.length) { - var rec = env.stack.pop(); - try { - var result = rec.dispose && rec.dispose.call(rec.value); - if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); - } - catch (e) { - fail(e); - } - } - if (env.hasError) throw env.error; - } - return next(); - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__esDecorate", __esDecorate); - exporter("__runInitializers", __runInitializers); - exporter("__propKey", __propKey); - exporter("__setFunctionName", __setFunctionName); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); - exporter("__classPrivateFieldIn", __classPrivateFieldIn); - exporter("__addDisposableResource", __addDisposableResource); - exporter("__disposeResources", __disposeResources); -}); - - -/***/ }), - -/***/ 88121: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var logger$1 = __nccwpck_require__(3233); -var coreUtil = __nccwpck_require__(51333); -var os = __nccwpck_require__(22037); -var abortController = __nccwpck_require__(52557); -var httpsProxyAgent = __nccwpck_require__(77219); -var httpProxyAgent = __nccwpck_require__(23764); -var coreTracing = __nccwpck_require__(94175); -var util = __nccwpck_require__(73837); -var tslib = __nccwpck_require__(89045); -var stream = __nccwpck_require__(12781); -var http = __nccwpck_require__(13685); -var https = __nccwpck_require__(95687); -var zlib = __nccwpck_require__(59796); - -function _interopNamespaceDefault(e) { - var n = Object.create(null); - if (e) { - Object.keys(e).forEach(function (k) { - if (k !== 'default') { - var d = Object.getOwnPropertyDescriptor(e, k); - Object.defineProperty(n, k, d.get ? d : { - enumerable: true, - get: function () { return e[k]; } - }); - } - }); - } - n.default = e; - return Object.freeze(n); -} - -var os__namespace = /*#__PURE__*/_interopNamespaceDefault(os); -var http__namespace = /*#__PURE__*/_interopNamespaceDefault(http); -var https__namespace = /*#__PURE__*/_interopNamespaceDefault(https); -var zlib__namespace = /*#__PURE__*/_interopNamespaceDefault(zlib); +TimeSpan.zero = new TimeSpan(0, 0, 0, 0, 0); +TimeSpan.maxValue = TimeSpan.fromTicks(Number.MAX_SAFE_INTEGER); +TimeSpan.minValue = TimeSpan.fromTicks(Number.MIN_SAFE_INTEGER); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const ValidPhaseNames = new Set(["Deserialize", "Serialize", "Retry", "Sign"]); /** - * A private implementation of Pipeline. - * Do not export this class from the package. - * @internal + * @hidden */ -class HttpPipeline { - constructor(policies) { - var _a; - this._policies = []; - this._policies = (_a = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a !== void 0 ? _a : []; - this._orderedPolicies = undefined; +function parseDelimitedString(delimitedString) { + if (delimitedString == null) { + throw new Error("delimitedString is null or undefined"); } - addPolicy(policy, options = {}) { - if (options.phase && options.afterPhase) { - throw new Error("Policies inside a phase cannot specify afterPhase."); - } - if (options.phase && !ValidPhaseNames.has(options.phase)) { - throw new Error(`Invalid phase name: ${options.phase}`); - } - if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { - throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); + const metrics = {}; + const headerAttributes = delimitedString.split(";"); + for (const attribute of headerAttributes) { + const attributeKeyValue = attribute.split("="); + if (attributeKeyValue.length !== 2) { + throw new Error("recieved a malformed delimited string"); } - this._policies.push({ - policy, - options, - }); - this._orderedPolicies = undefined; + const attributeKey = attributeKeyValue[0]; + const attributeValue = parseFloat(attributeKeyValue[1]); + metrics[attributeKey] = attributeValue; } - removePolicy(options) { - const removedPolicies = []; - this._policies = this._policies.filter((policyDescriptor) => { - if ((options.name && policyDescriptor.policy.name === options.name) || - (options.phase && policyDescriptor.options.phase === options.phase)) { - removedPolicies.push(policyDescriptor.policy); - return false; - } - else { - return true; + return metrics; +} +/** + * @hidden + */ +function timeSpanFromMetrics(metrics /* TODO: any */, key) { + if (key in metrics) { + return TimeSpan.fromMilliseconds(metrics[key]); + } + return TimeSpan.zero; +} + +// Copyright (c) Microsoft Corporation. +class QueryPreparationTimes { + constructor(queryCompilationTime, logicalPlanBuildTime, physicalPlanBuildTime, queryOptimizationTime) { + this.queryCompilationTime = queryCompilationTime; + this.logicalPlanBuildTime = logicalPlanBuildTime; + this.physicalPlanBuildTime = physicalPlanBuildTime; + this.queryOptimizationTime = queryOptimizationTime; + } + /** + * returns a new QueryPreparationTimes instance that is the addition of this and the arguments. + */ + add(...queryPreparationTimesArray) { + let queryCompilationTime = this.queryCompilationTime; + let logicalPlanBuildTime = this.logicalPlanBuildTime; + let physicalPlanBuildTime = this.physicalPlanBuildTime; + let queryOptimizationTime = this.queryOptimizationTime; + for (const queryPreparationTimes of queryPreparationTimesArray) { + if (queryPreparationTimes == null) { + throw new Error("queryPreparationTimesArray has null or undefined item(s)"); } - }); - this._orderedPolicies = undefined; - return removedPolicies; + queryCompilationTime = queryCompilationTime.add(queryPreparationTimes.queryCompilationTime); + logicalPlanBuildTime = logicalPlanBuildTime.add(queryPreparationTimes.logicalPlanBuildTime); + physicalPlanBuildTime = physicalPlanBuildTime.add(queryPreparationTimes.physicalPlanBuildTime); + queryOptimizationTime = queryOptimizationTime.add(queryPreparationTimes.queryOptimizationTime); + } + return new QueryPreparationTimes(queryCompilationTime, logicalPlanBuildTime, physicalPlanBuildTime, queryOptimizationTime); } - sendRequest(httpClient, request) { - const policies = this.getOrderedPolicies(); - const pipeline = policies.reduceRight((next, policy) => { - return (req) => { - return policy.sendRequest(req, next); - }; - }, (req) => httpClient.sendRequest(req)); - return pipeline(request); + /** + * Output the QueryPreparationTimes as a delimited string. + */ + toDelimitedString() { + return (`${QueryMetricsConstants.QueryCompileTimeInMs}=${this.queryCompilationTime.totalMilliseconds()};` + + `${QueryMetricsConstants.LogicalPlanBuildTimeInMs}=${this.logicalPlanBuildTime.totalMilliseconds()};` + + `${QueryMetricsConstants.PhysicalPlanBuildTimeInMs}=${this.physicalPlanBuildTime.totalMilliseconds()};` + + `${QueryMetricsConstants.QueryOptimizationTimeInMs}=${this.queryOptimizationTime.totalMilliseconds()}`); } - getOrderedPolicies() { - if (!this._orderedPolicies) { - this._orderedPolicies = this.orderPolicies(); + /** + * Returns a new instance of the QueryPreparationTimes class that is the + * aggregation of an array of QueryPreparationTimes. + */ + static createFromArray(queryPreparationTimesArray) { + if (queryPreparationTimesArray == null) { + throw new Error("queryPreparationTimesArray is null or undefined item(s)"); } - return this._orderedPolicies; + return QueryPreparationTimes.zero.add(...queryPreparationTimesArray); } - clone() { - return new HttpPipeline(this._policies); + /** + * Returns a new instance of the QueryPreparationTimes class this is deserialized from a delimited string. + */ + static createFromDelimitedString(delimitedString) { + const metrics = parseDelimitedString(delimitedString); + return new QueryPreparationTimes(timeSpanFromMetrics(metrics, QueryMetricsConstants.QueryCompileTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.LogicalPlanBuildTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.PhysicalPlanBuildTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.QueryOptimizationTimeInMs)); } - static create() { - return new HttpPipeline(); +} +QueryPreparationTimes.zero = new QueryPreparationTimes(TimeSpan.zero, TimeSpan.zero, TimeSpan.zero, TimeSpan.zero); + +// Copyright (c) Microsoft Corporation. +class RuntimeExecutionTimes { + constructor(queryEngineExecutionTime, systemFunctionExecutionTime, userDefinedFunctionExecutionTime) { + this.queryEngineExecutionTime = queryEngineExecutionTime; + this.systemFunctionExecutionTime = systemFunctionExecutionTime; + this.userDefinedFunctionExecutionTime = userDefinedFunctionExecutionTime; } - orderPolicies() { - /** - * The goal of this method is to reliably order pipeline policies - * based on their declared requirements when they were added. - * - * Order is first determined by phase: - * - * 1. Serialize Phase - * 2. Policies not in a phase - * 3. Deserialize Phase - * 4. Retry Phase - * 5. Sign Phase - * - * Within each phase, policies are executed in the order - * they were added unless they were specified to execute - * before/after other policies or after a particular phase. - * - * To determine the final order, we will walk the policy list - * in phase order multiple times until all dependencies are - * satisfied. - * - * `afterPolicies` are the set of policies that must be - * executed before a given policy. This requirement is - * considered satisfied when each of the listed policies - * have been scheduled. - * - * `beforePolicies` are the set of policies that must be - * executed after a given policy. Since this dependency - * can be expressed by converting it into a equivalent - * `afterPolicies` declarations, they are normalized - * into that form for simplicity. - * - * An `afterPhase` dependency is considered satisfied when all - * policies in that phase have scheduled. - * - */ - const result = []; - // Track all policies we know about. - const policyMap = new Map(); - function createPhase(name) { - return { - name, - policies: new Set(), - hasRun: false, - hasAfterPolicies: false, - }; - } - // Track policies for each phase. - const serializePhase = createPhase("Serialize"); - const noPhase = createPhase("None"); - const deserializePhase = createPhase("Deserialize"); - const retryPhase = createPhase("Retry"); - const signPhase = createPhase("Sign"); - // a list of phases in order - const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; - // Small helper function to map phase name to each Phase - function getPhase(phase) { - if (phase === "Retry") { - return retryPhase; - } - else if (phase === "Serialize") { - return serializePhase; - } - else if (phase === "Deserialize") { - return deserializePhase; - } - else if (phase === "Sign") { - return signPhase; - } - else { - return noPhase; + /** + * returns a new RuntimeExecutionTimes instance that is the addition of this and the arguments. + */ + add(...runtimeExecutionTimesArray) { + let queryEngineExecutionTime = this.queryEngineExecutionTime; + let systemFunctionExecutionTime = this.systemFunctionExecutionTime; + let userDefinedFunctionExecutionTime = this.userDefinedFunctionExecutionTime; + for (const runtimeExecutionTimes of runtimeExecutionTimesArray) { + if (runtimeExecutionTimes == null) { + throw new Error("runtimeExecutionTimes has null or undefined item(s)"); } + queryEngineExecutionTime = queryEngineExecutionTime.add(runtimeExecutionTimes.queryEngineExecutionTime); + systemFunctionExecutionTime = systemFunctionExecutionTime.add(runtimeExecutionTimes.systemFunctionExecutionTime); + userDefinedFunctionExecutionTime = userDefinedFunctionExecutionTime.add(runtimeExecutionTimes.userDefinedFunctionExecutionTime); } - // First walk each policy and create a node to track metadata. - for (const descriptor of this._policies) { - const policy = descriptor.policy; - const options = descriptor.options; - const policyName = policy.name; - if (policyMap.has(policyName)) { - throw new Error("Duplicate policy names not allowed in pipeline"); - } - const node = { - policy, - dependsOn: new Set(), - dependants: new Set(), - }; - if (options.afterPhase) { - node.afterPhase = getPhase(options.afterPhase); - node.afterPhase.hasAfterPolicies = true; - } - policyMap.set(policyName, node); - const phase = getPhase(options.phase); - phase.policies.add(node); + return new RuntimeExecutionTimes(queryEngineExecutionTime, systemFunctionExecutionTime, userDefinedFunctionExecutionTime); + } + /** + * Output the RuntimeExecutionTimes as a delimited string. + */ + toDelimitedString() { + return (`${QueryMetricsConstants.SystemFunctionExecuteTimeInMs}=${this.systemFunctionExecutionTime.totalMilliseconds()};` + + `${QueryMetricsConstants.UserDefinedFunctionExecutionTimeInMs}=${this.userDefinedFunctionExecutionTime.totalMilliseconds()}`); + } + /** + * Returns a new instance of the RuntimeExecutionTimes class that is + * the aggregation of an array of RuntimeExecutionTimes. + */ + static createFromArray(runtimeExecutionTimesArray) { + if (runtimeExecutionTimesArray == null) { + throw new Error("runtimeExecutionTimesArray is null or undefined item(s)"); } - // Now that each policy has a node, connect dependency references. - for (const descriptor of this._policies) { - const { policy, options } = descriptor; - const policyName = policy.name; - const node = policyMap.get(policyName); - if (!node) { - throw new Error(`Missing node for policy ${policyName}`); - } - if (options.afterPolicies) { - for (const afterPolicyName of options.afterPolicies) { - const afterNode = policyMap.get(afterPolicyName); - if (afterNode) { - // Linking in both directions helps later - // when we want to notify dependants. - node.dependsOn.add(afterNode); - afterNode.dependants.add(node); - } - } - } - if (options.beforePolicies) { - for (const beforePolicyName of options.beforePolicies) { - const beforeNode = policyMap.get(beforePolicyName); - if (beforeNode) { - // To execute before another node, make it - // depend on the current node. - beforeNode.dependsOn.add(node); - node.dependants.add(beforeNode); - } - } + return RuntimeExecutionTimes.zero.add(...runtimeExecutionTimesArray); + } + /** + * Returns a new instance of the RuntimeExecutionTimes class this is deserialized from a delimited string. + */ + static createFromDelimitedString(delimitedString) { + const metrics = parseDelimitedString(delimitedString); + const vmExecutionTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.VMExecutionTimeInMs); + const indexLookupTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.IndexLookupTimeInMs); + const documentLoadTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentLoadTimeInMs); + const documentWriteTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentWriteTimeInMs); + let queryEngineExecutionTime = TimeSpan.zero; + queryEngineExecutionTime = queryEngineExecutionTime.add(vmExecutionTime); + queryEngineExecutionTime = queryEngineExecutionTime.subtract(indexLookupTime); + queryEngineExecutionTime = queryEngineExecutionTime.subtract(documentLoadTime); + queryEngineExecutionTime = queryEngineExecutionTime.subtract(documentWriteTime); + return new RuntimeExecutionTimes(queryEngineExecutionTime, timeSpanFromMetrics(metrics, QueryMetricsConstants.SystemFunctionExecuteTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.UserDefinedFunctionExecutionTimeInMs)); + } +} +RuntimeExecutionTimes.zero = new RuntimeExecutionTimes(TimeSpan.zero, TimeSpan.zero, TimeSpan.zero); + +// Copyright (c) Microsoft Corporation. +class QueryMetrics { + constructor(retrievedDocumentCount, retrievedDocumentSize, outputDocumentCount, outputDocumentSize, indexHitDocumentCount, totalQueryExecutionTime, queryPreparationTimes, indexLookupTime, documentLoadTime, vmExecutionTime, runtimeExecutionTimes, documentWriteTime, clientSideMetrics) { + this.retrievedDocumentCount = retrievedDocumentCount; + this.retrievedDocumentSize = retrievedDocumentSize; + this.outputDocumentCount = outputDocumentCount; + this.outputDocumentSize = outputDocumentSize; + this.indexHitDocumentCount = indexHitDocumentCount; + this.totalQueryExecutionTime = totalQueryExecutionTime; + this.queryPreparationTimes = queryPreparationTimes; + this.indexLookupTime = indexLookupTime; + this.documentLoadTime = documentLoadTime; + this.vmExecutionTime = vmExecutionTime; + this.runtimeExecutionTimes = runtimeExecutionTimes; + this.documentWriteTime = documentWriteTime; + this.clientSideMetrics = clientSideMetrics; + } + /** + * Gets the IndexHitRatio + * @hidden + */ + get indexHitRatio() { + return this.retrievedDocumentCount === 0 + ? 1 + : this.indexHitDocumentCount / this.retrievedDocumentCount; + } + /** + * returns a new QueryMetrics instance that is the addition of this and the arguments. + */ + add(queryMetricsArray) { + let retrievedDocumentCount = 0; + let retrievedDocumentSize = 0; + let outputDocumentCount = 0; + let outputDocumentSize = 0; + let indexHitDocumentCount = 0; + let totalQueryExecutionTime = TimeSpan.zero; + const queryPreparationTimesArray = []; + let indexLookupTime = TimeSpan.zero; + let documentLoadTime = TimeSpan.zero; + let vmExecutionTime = TimeSpan.zero; + const runtimeExecutionTimesArray = []; + let documentWriteTime = TimeSpan.zero; + const clientSideQueryMetricsArray = []; + queryMetricsArray.push(this); + for (const queryMetrics of queryMetricsArray) { + if (queryMetrics) { + retrievedDocumentCount += queryMetrics.retrievedDocumentCount; + retrievedDocumentSize += queryMetrics.retrievedDocumentSize; + outputDocumentCount += queryMetrics.outputDocumentCount; + outputDocumentSize += queryMetrics.outputDocumentSize; + indexHitDocumentCount += queryMetrics.indexHitDocumentCount; + totalQueryExecutionTime = totalQueryExecutionTime.add(queryMetrics.totalQueryExecutionTime); + queryPreparationTimesArray.push(queryMetrics.queryPreparationTimes); + indexLookupTime = indexLookupTime.add(queryMetrics.indexLookupTime); + documentLoadTime = documentLoadTime.add(queryMetrics.documentLoadTime); + vmExecutionTime = vmExecutionTime.add(queryMetrics.vmExecutionTime); + runtimeExecutionTimesArray.push(queryMetrics.runtimeExecutionTimes); + documentWriteTime = documentWriteTime.add(queryMetrics.documentWriteTime); + clientSideQueryMetricsArray.push(queryMetrics.clientSideMetrics); } } - function walkPhase(phase) { - phase.hasRun = true; - // Sets iterate in insertion order - for (const node of phase.policies) { - if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { - // If this node is waiting on a phase to complete, - // we need to skip it for now. - // Even if the phase is empty, we should wait for it - // to be walked to avoid re-ordering policies. - continue; - } - if (node.dependsOn.size === 0) { - // If there's nothing else we're waiting for, we can - // add this policy to the result list. - result.push(node.policy); - // Notify anything that depends on this policy that - // the policy has been scheduled. - for (const dependant of node.dependants) { - dependant.dependsOn.delete(node); - } - policyMap.delete(node.policy.name); - phase.policies.delete(node); - } - } + return new QueryMetrics(retrievedDocumentCount, retrievedDocumentSize, outputDocumentCount, outputDocumentSize, indexHitDocumentCount, totalQueryExecutionTime, QueryPreparationTimes.createFromArray(queryPreparationTimesArray), indexLookupTime, documentLoadTime, vmExecutionTime, RuntimeExecutionTimes.createFromArray(runtimeExecutionTimesArray), documentWriteTime, ClientSideMetrics.createFromArray(...clientSideQueryMetricsArray)); + } + /** + * Output the QueryMetrics as a delimited string. + * @hidden + */ + toDelimitedString() { + return (QueryMetricsConstants.RetrievedDocumentCount + + "=" + + this.retrievedDocumentCount + + ";" + + QueryMetricsConstants.RetrievedDocumentSize + + "=" + + this.retrievedDocumentSize + + ";" + + QueryMetricsConstants.OutputDocumentCount + + "=" + + this.outputDocumentCount + + ";" + + QueryMetricsConstants.OutputDocumentSize + + "=" + + this.outputDocumentSize + + ";" + + QueryMetricsConstants.IndexHitRatio + + "=" + + this.indexHitRatio + + ";" + + QueryMetricsConstants.TotalQueryExecutionTimeInMs + + "=" + + this.totalQueryExecutionTime.totalMilliseconds() + + ";" + + this.queryPreparationTimes.toDelimitedString() + + ";" + + QueryMetricsConstants.IndexLookupTimeInMs + + "=" + + this.indexLookupTime.totalMilliseconds() + + ";" + + QueryMetricsConstants.DocumentLoadTimeInMs + + "=" + + this.documentLoadTime.totalMilliseconds() + + ";" + + QueryMetricsConstants.VMExecutionTimeInMs + + "=" + + this.vmExecutionTime.totalMilliseconds() + + ";" + + this.runtimeExecutionTimes.toDelimitedString() + + ";" + + QueryMetricsConstants.DocumentWriteTimeInMs + + "=" + + this.documentWriteTime.totalMilliseconds()); + } + /** + * Returns a new instance of the QueryMetrics class that is the aggregation of an array of query metrics. + */ + static createFromArray(queryMetricsArray) { + if (!queryMetricsArray) { + throw new Error("queryMetricsArray is null or undefined item(s)"); } - function walkPhases() { - for (const phase of orderedPhases) { - walkPhase(phase); - // if the phase isn't complete - if (phase.policies.size > 0 && phase !== noPhase) { - if (!noPhase.hasRun) { - // Try running noPhase to see if that unblocks this phase next tick. - // This can happen if a phase that happens before noPhase - // is waiting on a noPhase policy to complete. - walkPhase(noPhase); - } - // Don't proceed to the next phase until this phase finishes. - return; - } - if (phase.hasAfterPolicies) { - // Run any policies unblocked by this phase - walkPhase(noPhase); - } - } + return QueryMetrics.zero.add(queryMetricsArray); + } + /** + * Returns a new instance of the QueryMetrics class this is deserialized from a delimited string. + */ + static createFromDelimitedString(delimitedString, clientSideMetrics) { + const metrics = parseDelimitedString(delimitedString); + const indexHitRatio = metrics[QueryMetricsConstants.IndexHitRatio] || 0; + const retrievedDocumentCount = metrics[QueryMetricsConstants.RetrievedDocumentCount] || 0; + const indexHitCount = indexHitRatio * retrievedDocumentCount; + const outputDocumentCount = metrics[QueryMetricsConstants.OutputDocumentCount] || 0; + const outputDocumentSize = metrics[QueryMetricsConstants.OutputDocumentSize] || 0; + const retrievedDocumentSize = metrics[QueryMetricsConstants.RetrievedDocumentSize] || 0; + const totalQueryExecutionTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.TotalQueryExecutionTimeInMs); + return new QueryMetrics(retrievedDocumentCount, retrievedDocumentSize, outputDocumentCount, outputDocumentSize, indexHitCount, totalQueryExecutionTime, QueryPreparationTimes.createFromDelimitedString(delimitedString), timeSpanFromMetrics(metrics, QueryMetricsConstants.IndexLookupTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentLoadTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.VMExecutionTimeInMs), RuntimeExecutionTimes.createFromDelimitedString(delimitedString), timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentWriteTimeInMs), clientSideMetrics || ClientSideMetrics.zero); + } +} +QueryMetrics.zero = new QueryMetrics(0, 0, 0, 0, 0, TimeSpan.zero, QueryPreparationTimes.zero, TimeSpan.zero, TimeSpan.zero, TimeSpan.zero, RuntimeExecutionTimes.zero, TimeSpan.zero, ClientSideMetrics.zero); + +// Copyright (c) Microsoft Corporation. +/** @hidden */ +// TODO: docs +function getRequestChargeIfAny(headers) { + if (typeof headers === "number") { + return headers; + } + else if (typeof headers === "string") { + return parseFloat(headers); + } + if (headers) { + const rc = headers[Constants$1.HttpHeaders.RequestCharge]; + if (rc) { + return parseFloat(rc); } - // Iterate until we've put every node in the result list. - let iteration = 0; - while (policyMap.size > 0) { - iteration++; - const initialResultLength = result.length; - // Keep walking each phase in order until we can order every node. - walkPhases(); - // The result list *should* get at least one larger each time - // after the first full pass. - // Otherwise, we're going to loop forever. - if (result.length <= initialResultLength && iteration > 1) { - throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); - } + else { + return 0; } - return result; + } + else { + return 0; } } /** - * Creates a totally empty pipeline. - * Useful for testing or creating a custom one. + * @hidden */ -function createEmptyPipeline() { - return HttpPipeline.create(); +function getInitialHeader() { + const headers = {}; + headers[Constants$1.HttpHeaders.RequestCharge] = 0; + headers[Constants$1.HttpHeaders.QueryMetrics] = {}; + return headers; +} +/** + * @hidden + */ +// TODO: The name of this method isn't very accurate to what it does +function mergeHeaders(headers, toBeMergedHeaders) { + if (headers[Constants$1.HttpHeaders.RequestCharge] === undefined) { + headers[Constants$1.HttpHeaders.RequestCharge] = 0; + } + if (headers[Constants$1.HttpHeaders.QueryMetrics] === undefined) { + headers[Constants$1.HttpHeaders.QueryMetrics] = QueryMetrics.zero; + } + if (!toBeMergedHeaders) { + return; + } + headers[Constants$1.HttpHeaders.RequestCharge] += getRequestChargeIfAny(toBeMergedHeaders); + if (toBeMergedHeaders[Constants$1.HttpHeaders.IsRUPerMinuteUsed]) { + headers[Constants$1.HttpHeaders.IsRUPerMinuteUsed] = + toBeMergedHeaders[Constants$1.HttpHeaders.IsRUPerMinuteUsed]; + } + if (Constants$1.HttpHeaders.QueryMetrics in toBeMergedHeaders) { + const headerQueryMetrics = headers[Constants$1.HttpHeaders.QueryMetrics]; + const toBeMergedHeaderQueryMetrics = toBeMergedHeaders[Constants$1.HttpHeaders.QueryMetrics]; + for (const partitionId in toBeMergedHeaderQueryMetrics) { + if (headerQueryMetrics[partitionId]) { + const combinedQueryMetrics = headerQueryMetrics[partitionId].add([ + toBeMergedHeaderQueryMetrics[partitionId], + ]); + headerQueryMetrics[partitionId] = combinedQueryMetrics; + } + else { + headerQueryMetrics[partitionId] = toBeMergedHeaderQueryMetrics[partitionId]; + } + } + } + if (Constants$1.HttpHeaders.IndexUtilization in toBeMergedHeaders) { + headers[Constants$1.HttpHeaders.IndexUtilization] = + toBeMergedHeaders[Constants$1.HttpHeaders.IndexUtilization]; + } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const logger = logger$1.createClientLogger("core-rest-pipeline"); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const RedactedString = "REDACTED"; -// Make sure this list is up-to-date with the one under core/logger/Readme#Keyconcepts -const defaultAllowedHeaderNames = [ - "x-ms-client-request-id", - "x-ms-return-client-request-id", - "x-ms-useragent", - "x-ms-correlation-request-id", - "x-ms-request-id", - "client-request-id", - "ms-cv", - "return-client-request-id", - "traceparent", - "Access-Control-Allow-Credentials", - "Access-Control-Allow-Headers", - "Access-Control-Allow-Methods", - "Access-Control-Allow-Origin", - "Access-Control-Expose-Headers", - "Access-Control-Max-Age", - "Access-Control-Request-Headers", - "Access-Control-Request-Method", - "Origin", - "Accept", - "Accept-Encoding", - "Cache-Control", - "Connection", - "Content-Length", - "Content-Type", - "Date", - "ETag", - "Expires", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "Last-Modified", - "Pragma", - "Request-Id", - "Retry-After", - "Server", - "Transfer-Encoding", - "User-Agent", - "WWW-Authenticate", -]; -const defaultAllowedQueryParameters = ["api-version"]; -/** - * @internal - */ -class Sanitizer { - constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [], } = {}) { - allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); - allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); - this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); - this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); - } - sanitize(obj) { - const seen = new Set(); - return JSON.stringify(obj, (key, value) => { - // Ensure Errors include their interesting non-enumerable members - if (value instanceof Error) { - return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); - } - if (key === "headers") { - return this.sanitizeHeaders(value); - } - else if (key === "url") { - return this.sanitizeUrl(value); - } - else if (key === "query") { - return this.sanitizeQuery(value); - } - else if (key === "body") { - // Don't log the request body - return undefined; - } - else if (key === "response") { - // Don't log response again - return undefined; - } - else if (key === "operationSpec") { - // When using sendOperationRequest, the request carries a massive - // field with the autorest spec. No need to log it. - return undefined; - } - else if (Array.isArray(value) || coreUtil.isObject(value)) { - if (seen.has(value)) { - return "[Circular]"; - } - seen.add(value); - } - return value; - }, 2); +class IndexUtilizationInfo { + constructor(UtilizedSingleIndexes, PotentialSingleIndexes, UtilizedCompositeIndexes, PotentialCompositeIndexes) { + this.UtilizedSingleIndexes = UtilizedSingleIndexes; + this.PotentialSingleIndexes = PotentialSingleIndexes; + this.UtilizedCompositeIndexes = UtilizedCompositeIndexes; + this.PotentialCompositeIndexes = PotentialCompositeIndexes; } - sanitizeHeaders(obj) { - const sanitized = {}; - for (const key of Object.keys(obj)) { - if (this.allowedHeaderNames.has(key.toLowerCase())) { - sanitized[key] = obj[key]; - } - else { - sanitized[key] = RedactedString; - } + static tryCreateFromDelimitedBase64String(delimitedString, out) { + if (delimitedString == null) { + out.result = IndexUtilizationInfo.Empty; + return false; } - return sanitized; + return IndexUtilizationInfo.tryCreateFromDelimitedString(Buffer.from(delimitedString, "base64").toString(), out); } - sanitizeQuery(value) { - if (typeof value !== "object" || value === null) { - return value; + static tryCreateFromDelimitedString(delimitedString, out) { + if (delimitedString == null) { + out.result = IndexUtilizationInfo.Empty; + return false; } - const sanitized = {}; - for (const k of Object.keys(value)) { - if (this.allowedQueryParameters.has(k.toLowerCase())) { - sanitized[k] = value[k]; - } - else { - sanitized[k] = RedactedString; - } + try { + out.result = JSON.parse(delimitedString) || IndexUtilizationInfo.Empty; + return true; } - return sanitized; - } - sanitizeUrl(value) { - if (typeof value !== "string" || value === null) { - return value; + catch (error) { + out.result = IndexUtilizationInfo.Empty; + return false; } - const url = new URL(value); - if (!url.search) { - return value; + } + static createFromString(delimitedString, isBase64Encoded) { + var _a; + const result = { result: undefined }; + if (isBase64Encoded) { + IndexUtilizationInfo.tryCreateFromDelimitedBase64String(delimitedString, result); } - for (const [key] of url.searchParams) { - if (!this.allowedQueryParameters.has(key.toLowerCase())) { - url.searchParams.set(key, RedactedString); - } + else { + IndexUtilizationInfo.tryCreateFromDelimitedString(delimitedString, result); } - return url.toString(); + return (_a = result.result) !== null && _a !== void 0 ? _a : IndexUtilizationInfo.Empty; } } +IndexUtilizationInfo.Empty = new IndexUtilizationInfo([], [], [], []); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -/** - * The programmatic identifier of the logPolicy. - */ -const logPolicyName = "logPolicy"; -/** - * A policy that logs all requests and responses. - * @param options - Options to configure logPolicy. - */ -function logPolicy(options = {}) { - var _a; - const logger$1 = (_a = options.logger) !== null && _a !== void 0 ? _a : logger.info; - const sanitizer = new Sanitizer({ - additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, - additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, - }); - return { - name: logPolicyName, - async sendRequest(request, next) { - if (!logger$1.enabled) { - return next(request); - } - logger$1(`Request: ${sanitizer.sanitize(request)}`); - const response = await next(request); - logger$1(`Response status code: ${response.status}`); - logger$1(`Headers: ${sanitizer.sanitize(response.headers)}`); - return response; - }, - }; -} +var Constants = { + IndexUtilizationInfo: "Index Utilization Information", + UtilizedSingleIndexes: "Utilized Single Indexes", + PotentialSingleIndexes: "Potential Single Indexes", + UtilizedCompositeIndexes: "Utilized Composite Indexes", + PotentialCompositeIndexes: "Potential Composite Indexes", + IndexExpression: "Index Spec", + IndexImpactScore: "Index Impact Score", + IndexUtilizationSeparator: "---", +}; // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The programmatic identifier of the redirectPolicy. - */ -const redirectPolicyName = "redirectPolicy"; -/** - * Methods that are allowed to follow redirects 301 and 302 - */ -const allowedRedirect = ["GET", "HEAD"]; -/** - * A policy to follow Location headers from the server in order - * to support server-side redirection. - * In the browser, this policy is not used. - * @param options - Options to control policy behavior. - */ -function redirectPolicy(options = {}) { - const { maxRetries = 20 } = options; - return { - name: redirectPolicyName, - async sendRequest(request, next) { - const response = await next(request); - return handleRedirect(next, response, maxRetries); - }, - }; -} -async function handleRedirect(next, response, maxRetries, currentRetries = 0) { - const { request, status, headers } = response; - const locationHeader = headers.get("location"); - if (locationHeader && - (status === 300 || - (status === 301 && allowedRedirect.includes(request.method)) || - (status === 302 && allowedRedirect.includes(request.method)) || - (status === 303 && request.method === "POST") || - status === 307) && - currentRetries < maxRetries) { - const url = new URL(locationHeader, request.url); - request.url = url.toString(); - // POST request with Status code 303 should be converted into a - // redirected GET request if the redirect url is present in the location header - if (status === 303) { - request.method = "GET"; - request.headers.delete("Content-Length"); - delete request.body; +class IndexMetricWriter { + writeIndexMetrics(indexUtilizationInfo) { + let result = ""; + result = this.writeBeforeIndexUtilizationInfo(result); + result = this.writeIndexUtilizationInfo(result, indexUtilizationInfo); + result = this.writeAfterIndexUtilizationInfo(result); + return result; + } + writeBeforeIndexUtilizationInfo(result) { + result = this.appendNewlineToResult(result); + result = this.appendHeaderToResult(result, Constants.IndexUtilizationInfo, 0); + return result; + } + writeIndexUtilizationInfo(result, indexUtilizationInfo) { + result = this.appendHeaderToResult(result, Constants.UtilizedSingleIndexes, 1); + for (const indexUtilizationEntity of indexUtilizationInfo.UtilizedSingleIndexes) { + result = this.writeSingleIndexUtilizationEntity(result, indexUtilizationEntity); } - request.headers.delete("Authorization"); - const res = await next(request); - return handleRedirect(next, res, maxRetries, currentRetries + 1); + result = this.appendHeaderToResult(result, Constants.PotentialSingleIndexes, 1); + for (const indexUtilizationEntity of indexUtilizationInfo.PotentialSingleIndexes) { + result = this.writeSingleIndexUtilizationEntity(result, indexUtilizationEntity); + } + result = this.appendHeaderToResult(result, Constants.UtilizedCompositeIndexes, 1); + for (const indexUtilizationEntity of indexUtilizationInfo.UtilizedCompositeIndexes) { + result = this.writeCompositeIndexUtilizationEntity(result, indexUtilizationEntity); + } + result = this.appendHeaderToResult(result, Constants.PotentialCompositeIndexes, 1); + for (const indexUtilizationEntity of indexUtilizationInfo.PotentialCompositeIndexes) { + result = this.writeCompositeIndexUtilizationEntity(result, indexUtilizationEntity); + } + return result; + } + writeAfterIndexUtilizationInfo(result) { + return result; + } + writeSingleIndexUtilizationEntity(result, indexUtilizationEntity) { + result = this.appendHeaderToResult(result, `${Constants.IndexExpression}: ${indexUtilizationEntity.IndexSpec}`, 2); + result = this.appendHeaderToResult(result, `${Constants.IndexImpactScore}: ${indexUtilizationEntity.IndexImpactScore}`, 2); + result = this.appendHeaderToResult(result, Constants.IndexUtilizationSeparator, 2); + return result; + } + writeCompositeIndexUtilizationEntity(result, indexUtilizationEntity) { + result = this.appendHeaderToResult(result, `${Constants.IndexExpression}: ${indexUtilizationEntity.IndexSpecs.join(", ")}`, 2); + result = this.appendHeaderToResult(result, `${Constants.IndexImpactScore}: ${indexUtilizationEntity.IndexImpactScore}`, 2); + result = this.appendHeaderToResult(result, Constants.IndexUtilizationSeparator, 2); + return result; + } + appendNewlineToResult(result) { + return this.appendHeaderToResult(result, "", 0); + } + appendHeaderToResult(result, headerTitle, indentLevel) { + const Indent = " "; + const header = `${Indent.repeat(indentLevel)}${headerTitle}\n`; + result += header; + return result; } - return response; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * @internal - */ -function getHeaderName() { - return "User-Agent"; -} -/** - * @internal - */ -function setPlatformSpecificData(map) { - map.set("Node", process.version); - map.set("OS", `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`); } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const SDK_VERSION = "1.12.3"; -const DEFAULT_RETRY_POLICY_COUNT = 3; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function getUserAgentString(telemetryInfo) { - const parts = []; - for (const [key, value] of telemetryInfo) { - const token = value ? `${key}/${value}` : key; - parts.push(token); +class FeedResponse { + constructor(resources, headers, hasMoreResults, diagnostics) { + this.resources = resources; + this.headers = headers; + this.hasMoreResults = hasMoreResults; + this.diagnostics = diagnostics; + } + get continuation() { + return this.continuationToken; + } + get continuationToken() { + return this.headers[Constants$1.HttpHeaders.Continuation]; + } + get queryMetrics() { + return this.headers[Constants$1.HttpHeaders.QueryMetrics]; + } + get requestCharge() { + return getRequestChargeIfAny(this.headers); + } + get activityId() { + return this.headers[Constants$1.HttpHeaders.ActivityId]; + } + get indexMetrics() { + const writer = new IndexMetricWriter(); + const indexUtilizationInfo = IndexUtilizationInfo.createFromString(this.headers[Constants$1.HttpHeaders.IndexUtilization], true); + return writer.writeIndexMetrics(indexUtilizationInfo); } - return parts.join(" "); -} -/** - * @internal - */ -function getUserAgentHeaderName() { - return getHeaderName(); -} -/** - * @internal - */ -function getUserAgentValue(prefix) { - const runtimeInfo = new Map(); - runtimeInfo.set("core-rest-pipeline", SDK_VERSION); - setPlatformSpecificData(runtimeInfo); - const defaultAgent = getUserAgentString(runtimeInfo); - const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; - return userAgentValue; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const UserAgentHeaderName = getUserAgentHeaderName(); -/** - * The programmatic identifier of the userAgentPolicy. - */ -const userAgentPolicyName = "userAgentPolicy"; /** - * A policy that sets the User-Agent header (or equivalent) to reflect - * the library version. - * @param options - Options to customize the user agent value. + * @hidden */ -function userAgentPolicy(options = {}) { - const userAgentValue = getUserAgentValue(options.userAgentPrefix); - return { - name: userAgentPolicyName, - async sendRequest(request, next) { - if (!request.headers.has(UserAgentHeaderName)) { - request.headers.set(UserAgentHeaderName, userAgentValue); - } - return next(request); - }, - }; +const TimeoutErrorCode = "TimeoutError"; +class TimeoutError extends Error { + constructor(message = "Timeout Error") { + super(message); + this.code = TimeoutErrorCode; + this.name = TimeoutErrorCode; + } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * The programmatic identifier of the decompressResponsePolicy. - */ -const decompressResponsePolicyName = "decompressResponsePolicy"; -/** - * A policy to enable response decompression according to Accept-Encoding header - * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + * @hidden + * Utility function to get currentTime in UTC milliseconds. + * @returns */ -function decompressResponsePolicy() { - return { - name: decompressResponsePolicyName, - async sendRequest(request, next) { - // HEAD requests have no body - if (request.method !== "HEAD") { - request.headers.set("Accept-Encoding", "gzip,deflate"); - } - return next(request); - }, - }; +function getCurrentTimestampInMs() { + return Date.now(); } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const StandardAbortMessage = "The operation was aborted."; /** - * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. - * @param delayInMs - The number of milliseconds to be delayed. - * @param value - The value to be resolved with after a timeout of t milliseconds. - * @param options - The options for delay - currently abort options - * - abortSignal - The abortSignal associated with containing operation. - * - abortErrorMsg - The abort error message associated with containing operation. - * @returns Resolved promise + * @hidden + * Internal class to hold CosmosDiagnostic aggregate information all through the lifecycle of a request. + * This object gathers diagnostic information throughout Client operation which may span across multiple + * Server call, retries etc. + * Functions - recordFailedAttempt, recordMetaDataQuery, recordEndpointContactEvent are used to ingest + * data into the context. At the end of operation, getDiagnostics() is used to + * get final CosmosDiagnostic object. */ -function delay(delayInMs, value, options) { - return new Promise((resolve, reject) => { - let timer = undefined; - let onAborted = undefined; - const rejectOnAbort = () => { - return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); - }; - const removeListeners = () => { - if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { - options.abortSignal.removeEventListener("abort", onAborted); - } +class CosmosDiagnosticContext { + constructor() { + this.failedAttempts = []; + this.metadataLookups = []; + this.gaterwayStatistics = []; + this.locationEndpointsContacted = new Set(); + this.requestStartTimeUTCinMs = getCurrentTimestampInMs(); + } + recordFailedAttempt(gaterwayStatistics, retryAttemptNumber) { + const attempt = { + attemptNumber: retryAttemptNumber, + startTimeUTCInMs: gaterwayStatistics.startTimeUTCInMs, + durationInMs: gaterwayStatistics.durationInMs, + statusCode: gaterwayStatistics.statusCode, + substatusCode: gaterwayStatistics.subStatusCode, + requestPayloadLengthInBytes: gaterwayStatistics.requestPayloadLengthInBytes, + responsePayloadLengthInBytes: gaterwayStatistics.responsePayloadLengthInBytes, + activityId: gaterwayStatistics.activityId, + operationType: gaterwayStatistics.operationType, + resourceType: gaterwayStatistics.resourceType, }; - onAborted = () => { - if (timer) { - clearTimeout(timer); - } - removeListeners(); - return rejectOnAbort(); + this.failedAttempts.push(attempt); + } + recordNetworkCall(gaterwayStatistics) { + this.gaterwayStatistics.push(gaterwayStatistics); + } + /** + * Merge given DiagnosticContext to current node's DiagnosticContext, Treating GatewayRequests of + * given DiagnosticContext, as metadata requests. + */ + mergeDiagnostics(childDiagnostics, metadataType) { + // Copy Location endpoints contacted. + childDiagnostics.locationEndpointsContacted.forEach((endpoint) => this.locationEndpointsContacted.add(endpoint)); + // Copy child nodes's GatewayStatistics to parent's metadata lookups. + childDiagnostics.gaterwayStatistics.forEach((gateway) => this.metadataLookups.push({ + activityId: gateway.activityId, + requestPayloadLengthInBytes: gateway.requestPayloadLengthInBytes, + responsePayloadLengthInBytes: gateway.responsePayloadLengthInBytes, + startTimeUTCInMs: gateway.startTimeUTCInMs, + operationType: gateway.operationType, + resourceType: gateway.resourceType, + durationInMs: gateway.durationInMs, + metaDataType: metadataType, + })); + // Copy child nodes's metadata lookups to parent's metadata lookups. + childDiagnostics.metadataLookups.forEach((lookup) => this.metadataLookups.push(lookup)); + // Copy child nodes's failed attempts to parent's failed attempts. + childDiagnostics.failedAttempts.forEach((lookup) => this.failedAttempts.push(lookup)); + } + getClientSideStats(endTimeUTCInMs = getCurrentTimestampInMs()) { + return { + requestStartTimeUTCInMs: this.requestStartTimeUTCinMs, + requestDurationInMs: endTimeUTCInMs - this.requestStartTimeUTCinMs, + totalRequestPayloadLengthInBytes: this.getTotalRequestPayloadLength(), + totalResponsePayloadLengthInBytes: this.getTotalResponsePayloadLength(), + locationEndpointsContacted: [...this.locationEndpointsContacted.values()], + metadataDiagnostics: { + metadataLookups: [...this.metadataLookups], + }, + retryDiagnostics: { + failedAttempts: [...this.failedAttempts], + }, + gatewayStatistics: this.gaterwayStatistics, }; - if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { - return rejectOnAbort(); - } - timer = setTimeout(() => { - removeListeners(); - resolve(value); - }, delayInMs); - if (options === null || options === void 0 ? void 0 : options.abortSignal) { - options.abortSignal.addEventListener("abort", onAborted); - } - }); -} -/** - * @internal - * @returns the parsed value or undefined if the parsed value is invalid. - */ -function parseHeaderValueAsNumber(response, headerName) { - const value = response.headers.get(headerName); - if (!value) - return; - const valueAsNum = Number(value); - if (Number.isNaN(valueAsNum)) - return; - return valueAsNum; + } + getTotalRequestPayloadLength() { + let totalRequestPayloadLength = 0; + this.gaterwayStatistics.forEach((req) => (totalRequestPayloadLength += req.requestPayloadLengthInBytes)); + this.metadataLookups.forEach((req) => (totalRequestPayloadLength += req.requestPayloadLengthInBytes)); + this.failedAttempts.forEach((req) => (totalRequestPayloadLength += req.requestPayloadLengthInBytes)); + return totalRequestPayloadLength; + } + getTotalResponsePayloadLength() { + let totalResponsePayloadLength = 0; + this.gaterwayStatistics.forEach((req) => (totalResponsePayloadLength += req.responsePayloadLengthInBytes)); + this.metadataLookups.forEach((req) => (totalResponsePayloadLength += req.responsePayloadLengthInBytes)); + this.failedAttempts.forEach((req) => (totalResponsePayloadLength += req.responsePayloadLengthInBytes)); + return totalResponsePayloadLength; + } + recordEndpointResolution(location) { + this.locationEndpointsContacted.add(location); + } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * The header that comes back from Azure services representing - * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry). - */ -const RetryAfterHeader = "Retry-After"; -/** - * The headers that come back from Azure services representing - * the amount of time (minimum) to wait to retry. - * - * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds - * "Retry-After" : seconds or timestamp - */ -const AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; -/** - * A response is a throttling retry response if it has a throttling status code (429 or 503), - * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. - * - * Returns the `retryAfterInMs` value if the response is a throttling retry response. - * If not throttling retry response, returns `undefined`. + * * This is a Cosmos Diagnostic type that holds collected diagnostic information during a client operations. ie. Item.read(), Container.create(). + * It has three members - + * 1. `clientSideRequestStatistics` member contains aggregate diagnostic information, including - + * - metadata lookups. Here all the server requests, apart from the final intended resource are considered as metadata calls. + * i.e. for item.read(id), if the client makes server call to discover endpoints it would be considered as metadata call. + * - retries + * - endpoints contacted. + * - request, response payload stats. + * - gatewayStatistics - Information corresponding to main operation. For example during Item.read(), the client might perform many operations + * i.e. metadata lookup etc, but gatewayStatistics represents the diagnostics information for actual read operation. * - * @internal + * 2. diagnosticNode - Is a tree like structure which captures detailed diagnostic information. By default it is disabled, and is intended to be + * used only for debugging on non production environments. The kind of details captured in diagnosticNode is controlled by `CosmosDbDiagnosticLevel`. + * - CosmosDbDiagnosticLevel.info - Is default value. In this level only clientSideRequestStatistics are captured. Is is meant for production environments. + * - CosmosDbDiagnosticLevel.debug - Captures diagnosticNode and clientConfig. No request and response payloads are captured. Is not meant to be used + * in production environment. + * - CosmosDbDiagnosticLevel.debug-unsafe - In addition to data captured in CosmosDbDiagnosticLevel.debug, also captures request and response payloads. + * Is not meant to be used in production environment. + * 3. clientConfig - Captures information related to how client was configured during initialization. */ -function getRetryAfterInMs(response) { - if (!(response && [429, 503].includes(response.status))) - return undefined; - try { - // Headers: "retry-after-ms", "x-ms-retry-after-ms", "Retry-After" - for (const header of AllRetryAfterHeaders) { - const retryAfterValue = parseHeaderValueAsNumber(response, header); - if (retryAfterValue === 0 || retryAfterValue) { - // "Retry-After" header ==> seconds - // "retry-after-ms", "x-ms-retry-after-ms" headers ==> milli-seconds - const multiplyingFactor = header === RetryAfterHeader ? 1000 : 1; - return retryAfterValue * multiplyingFactor; // in milli-seconds - } - } - // RetryAfterHeader ("Retry-After") has a special case where it might be formatted as a date instead of a number of seconds - const retryAfterHeader = response.headers.get(RetryAfterHeader); - if (!retryAfterHeader) - return; - const date = Date.parse(retryAfterHeader); - const diff = date - Date.now(); - // negative diff would mean a date in the past, so retry asap with 0 milliseconds - return Number.isFinite(diff) ? Math.max(0, diff) : undefined; - } - catch (e) { - return undefined; +class CosmosDiagnostics { + /** + * @internal + */ + constructor(clientSideRequestStatistics, diagnosticNode, clientConfig) { + this.clientSideRequestStatistics = clientSideRequestStatistics; + this.diagnosticNode = diagnosticNode; + this.clientConfig = clientConfig; } } /** - * A response is a retry response if it has a throttling status code (429 or 503), - * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + * This is enum for Type of Metadata lookups possible. */ -function isThrottlingRetryResponse(response) { - return Number.isFinite(getRetryAfterInMs(response)); -} -function throttlingRetryStrategy() { - return { - name: "throttlingRetryStrategy", - retry({ response }) { - const retryAfterInMs = getRetryAfterInMs(response); - if (!Number.isFinite(retryAfterInMs)) { - return { skipStrategy: true }; - } - return { - retryAfterInMs, - }; - }, - }; +exports.MetadataLookUpType = void 0; +(function (MetadataLookUpType) { + MetadataLookUpType["PartitionKeyRangeLookUp"] = "PARTITION_KEY_RANGE_LOOK_UP"; + MetadataLookUpType["DatabaseAccountLookUp"] = "DATABASE_ACCOUNT_LOOK_UP"; + MetadataLookUpType["QueryPlanLookUp"] = "QUERY_PLAN_LOOK_UP"; + MetadataLookUpType["DatabaseLookUp"] = "DATABASE_LOOK_UP"; + MetadataLookUpType["ContainerLookUp"] = "CONTAINER_LOOK_UP"; +})(exports.MetadataLookUpType || (exports.MetadataLookUpType = {})); +function getRootNode(node) { + if (node.parent) + return getRootNode(node.parent); + else + return node; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -// intervals are in milliseconds -const DEFAULT_CLIENT_RETRY_INTERVAL = 1000; -const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 64; -/** - * A retry strategy that retries with an exponentially increasing delay in these two cases: - * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). - * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505). - */ -function exponentialRetryStrategy(options = {}) { - var _a, _b; - const retryInterval = (_a = options.retryDelayInMs) !== null && _a !== void 0 ? _a : DEFAULT_CLIENT_RETRY_INTERVAL; - const maxRetryInterval = (_b = options.maxRetryDelayInMs) !== null && _b !== void 0 ? _b : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - let retryAfterInMs = retryInterval; - return { - name: "exponentialRetryStrategy", - retry({ retryCount, response, responseError }) { - const matchedSystemError = isSystemError(responseError); - const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; - const isExponential = isExponentialRetryResponse(response); - const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; - const unknownResponse = response && (isThrottlingRetryResponse(response) || !isExponential); - if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { - return { skipStrategy: true }; - } - if (responseError && !matchedSystemError && !isExponential) { - return { errorToThrow: responseError }; - } - // Exponentially increase the delay each time - const exponentialDelay = retryAfterInMs * Math.pow(2, retryCount); - // Don't let the delay exceed the maximum - const clampedExponentialDelay = Math.min(maxRetryInterval, exponentialDelay); - // Allow the final value to have some "jitter" (within 50% of the delay size) so - // that retries across multiple clients don't occur simultaneously. - retryAfterInMs = - clampedExponentialDelay / 2 + coreUtil.getRandomIntegerInclusive(0, clampedExponentialDelay / 2); - return { retryAfterInMs }; - }, - }; -} -/** - * A response is a retry response if it has status codes: - * - 408, or - * - Greater or equal than 500, except for 501 and 505. - */ -function isExponentialRetryResponse(response) { - return Boolean(response && - response.status !== undefined && - (response.status >= 500 || response.status === 408) && - response.status !== 501 && - response.status !== 505); -} /** - * Determines whether an error from a pipeline response was triggered in the network layer. + * Cosmos DB Diagnostic Level */ -function isSystemError(err) { - if (!err) { - return false; - } - return (err.code === "ETIMEDOUT" || - err.code === "ESOCKETTIMEDOUT" || - err.code === "ECONNREFUSED" || - err.code === "ECONNRESET" || - err.code === "ENOENT" || - err.code === "ENOTFOUND"); -} +exports.CosmosDbDiagnosticLevel = void 0; +(function (CosmosDbDiagnosticLevel) { + CosmosDbDiagnosticLevel["info"] = "info"; + CosmosDbDiagnosticLevel["debug"] = "debug"; + CosmosDbDiagnosticLevel["debugUnsafe"] = "debug-unsafe"; +})(exports.CosmosDbDiagnosticLevel || (exports.CosmosDbDiagnosticLevel = {})); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const retryPolicyLogger = logger$1.createClientLogger("core-rest-pipeline retryPolicy"); /** - * The programmatic identifier of the retryPolicy. + * @hidden */ -const retryPolicyName = "retryPolicy"; +const CosmosDbDiagnosticLevelOrder = [ + exports.CosmosDbDiagnosticLevel.info, + exports.CosmosDbDiagnosticLevel.debug, + exports.CosmosDbDiagnosticLevel.debugUnsafe, +]; /** - * retryPolicy is a generic policy to enable retrying requests when certain conditions are met + * @hidden */ -function retryPolicy(strategies, options = { maxRetries: DEFAULT_RETRY_POLICY_COUNT }) { - const logger = options.logger || retryPolicyLogger; - return { - name: retryPolicyName, - async sendRequest(request, next) { - var _a, _b; - let response; - let responseError; - let retryCount = -1; - // eslint-disable-next-line no-constant-condition - retryRequest: while (true) { - retryCount += 1; - response = undefined; - responseError = undefined; - try { - logger.info(`Retry ${retryCount}: Attempting to send request`, request.requestId); - response = await next(request); - logger.info(`Retry ${retryCount}: Received a response from request`, request.requestId); - } - catch (e) { - logger.error(`Retry ${retryCount}: Received an error from request`, request.requestId); - // RestErrors are valid targets for the retry strategies. - // If none of the retry strategies can work with them, they will be thrown later in this policy. - // If the received error is not a RestError, it is immediately thrown. - responseError = e; - if (!e || responseError.name !== "RestError") { - throw e; - } - response = responseError.response; - } - if ((_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - logger.error(`Retry ${retryCount}: Request aborted.`); - const abortError = new abortController.AbortError(); - throw abortError; - } - if (retryCount >= ((_b = options.maxRetries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_POLICY_COUNT)) { - logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); - if (responseError) { - throw responseError; - } - else if (response) { - return response; - } - else { - throw new Error("Maximum retries reached with no response or error to throw"); - } - } - logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); - strategiesLoop: for (const strategy of strategies) { - const strategyLogger = strategy.logger || retryPolicyLogger; - strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); - const modifiers = strategy.retry({ - retryCount, - response, - responseError, - }); - if (modifiers.skipStrategy) { - strategyLogger.info(`Retry ${retryCount}: Skipped.`); - continue strategiesLoop; - } - const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; - if (errorToThrow) { - strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); - throw errorToThrow; - } - if (retryAfterInMs || retryAfterInMs === 0) { - strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); - await delay(retryAfterInMs, undefined, { abortSignal: request.abortSignal }); - continue retryRequest; - } - if (redirectTo) { - strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); - request.url = redirectTo; - continue retryRequest; - } - } - if (responseError) { - logger.info(`None of the retry strategies could work with the received error. Throwing it.`); - throw responseError; - } - if (response) { - logger.info(`None of the retry strategies could work with the received response. Returning it.`); - return response; - } - // If all the retries skip and there's no response, - // we're still in the retry loop, so a new request will be sent - // until `maxRetries` is reached. - } - }, - }; +function allowTracing(levelToCheck, clientDiagnosticLevel) { + const indexOfDiagnosticLevelToCheck = CosmosDbDiagnosticLevelOrder.indexOf(levelToCheck); + const indexOfClientDiagnosticLevel = CosmosDbDiagnosticLevelOrder.indexOf(clientDiagnosticLevel); + if (indexOfDiagnosticLevelToCheck === -1 || indexOfClientDiagnosticLevel === -1) { + return false; + } + return indexOfDiagnosticLevelToCheck <= indexOfClientDiagnosticLevel; } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * Name of the {@link defaultRetryPolicy} - */ -const defaultRetryPolicyName = "defaultRetryPolicy"; -/** - * A policy that retries according to three strategies: - * - When the server sends a 429 response with a Retry-After header. - * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). - * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. + * @hidden + * This is Internal Representation for DiagnosticNode. It contains useful helper functions to collect + * diagnostic information throughout the lifetime of Diagnostic session. + * The functions toDiagnosticNode() & toDiagnostic() are given to convert it to public facing counterpart. */ -function defaultRetryPolicy(options = {}) { - var _a; - return { - name: defaultRetryPolicyName, - sendRequest: retryPolicy([throttlingRetryStrategy(), exponentialRetryStrategy(options)], { - maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, - }).sendRequest, - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function normalizeName(name) { - return name.toLowerCase(); -} -function* headerIterator(map) { - for (const entry of map.values()) { - yield [entry.name, entry.value]; +class DiagnosticNodeInternal { + /** + * @internal + */ + constructor(diagnosticLevel, type, parent, data = {}, startTimeUTCInMs = getCurrentTimestampInMs(), ctx = new CosmosDiagnosticContext()) { + this.id = uuid$3.v4(); + this.nodeType = type; + this.startTimeUTCInMs = startTimeUTCInMs; + this.data = data; + this.children = []; + this.durationInMs = 0; + this.parent = parent; + this.diagnosticCtx = ctx; + this.diagnosticLevel = diagnosticLevel; } -} -class HttpHeadersImpl { - constructor(rawHeaders) { - this._headersMap = new Map(); - if (rawHeaders) { - for (const headerName of Object.keys(rawHeaders)) { - this.set(headerName, rawHeaders[headerName]); - } + /** + * @internal + */ + addLog(msg) { + if (!this.data.log) { + this.data.log = []; } + this.data.log.push(msg); } /** - * Set a header in this collection with the provided name and value. The name is - * case-insensitive. - * @param name - The name of the header to set. This value is case-insensitive. - * @param value - The value of the header to set. + * @internal */ - set(name, value) { - this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); + sanitizeHeaders(headers) { + return headers; } /** - * Get the header value for the provided header name, or undefined if no header exists in this - * collection with the provided name. - * @param name - The name of the header. This value is case-insensitive. + * Updated durationInMs for node, based on endTimeUTCInMs provided. + * @internal */ - get(name) { - var _a; - return (_a = this._headersMap.get(normalizeName(name))) === null || _a === void 0 ? void 0 : _a.value; + updateTimestamp(endTimeUTCInMs = getCurrentTimestampInMs()) { + this.durationInMs = endTimeUTCInMs - this.startTimeUTCInMs; } /** - * Get whether or not this header collection contains a header entry for the provided header name. - * @param name - The name of the header to set. This value is case-insensitive. + * @internal */ - has(name) { - return this._headersMap.has(normalizeName(name)); + recordSuccessfulNetworkCall(startTimeUTCInMs, requestContext, pipelineResponse, substatus, url) { + const responseHeaders = pipelineResponse.headers.toJSON(); + const gatewayRequest = { + activityId: responseHeaders[Constants$1.HttpHeaders.ActivityId], + startTimeUTCInMs, + durationInMs: getCurrentTimestampInMs() - startTimeUTCInMs, + statusCode: pipelineResponse.status, + subStatusCode: substatus, + requestPayloadLengthInBytes: calculateRequestPayloadLength(requestContext), + responsePayloadLengthInBytes: calculateResponsePayloadLength(pipelineResponse), + operationType: requestContext.operationType, + resourceType: requestContext.resourceType, + partitionKeyRangeId: requestContext.partitionKeyRangeId, + }; + let requestData = { + OperationType: gatewayRequest.operationType, + resourceType: gatewayRequest.resourceType, + requestPayloadLengthInBytes: gatewayRequest.requestPayloadLengthInBytes, + }; + if (allowTracing(exports.CosmosDbDiagnosticLevel.debugUnsafe, this.diagnosticLevel)) { + requestData = Object.assign(Object.assign({}, requestData), { headers: this.sanitizeHeaders(requestContext.headers), requestBody: requestContext.body, responseBody: pipelineResponse.bodyAsText, url: url }); + } + this.addData({ + requestPayloadLengthInBytes: gatewayRequest.requestPayloadLengthInBytes, + responsePayloadLengthInBytes: gatewayRequest.responsePayloadLengthInBytes, + startTimeUTCInMs: gatewayRequest.startTimeUTCInMs, + durationInMs: gatewayRequest.durationInMs, + requestData, + }); + this.diagnosticCtx.recordNetworkCall(gatewayRequest); } /** - * Remove the header with the provided headerName. - * @param name - The name of the header to remove. + * @internal */ - delete(name) { - this._headersMap.delete(normalizeName(name)); + recordFailedNetworkCall(startTimeUTCInMs, requestContext, retryAttemptNumber, statusCode, substatusCode, responseHeaders) { + this.addData({ failedAttempty: true }); + const requestPayloadLengthInBytes = calculateRequestPayloadLength(requestContext); + this.diagnosticCtx.recordFailedAttempt({ + activityId: responseHeaders[Constants$1.HttpHeaders.ActivityId], + startTimeUTCInMs, + durationInMs: getCurrentTimestampInMs() - startTimeUTCInMs, + statusCode, + subStatusCode: substatusCode, + requestPayloadLengthInBytes, + responsePayloadLengthInBytes: 0, + operationType: requestContext.operationType, + resourceType: requestContext.resourceType, + }, retryAttemptNumber); + let requestData = { + OperationType: requestContext.operationType, + resourceType: requestContext.resourceType, + requestPayloadLengthInBytes, + }; + if (allowTracing(exports.CosmosDbDiagnosticLevel.debugUnsafe, this.diagnosticLevel)) { + requestData = Object.assign(Object.assign({}, requestData), { headers: this.sanitizeHeaders(requestContext.headers), requestBody: requestContext.body, url: prepareURL(requestContext.endpoint, requestContext.path) }); + } + this.addData({ + failedAttempty: true, + requestData, + }); } /** - * Get the JSON object representation of this HTTP header collection. + * @internal */ - toJSON(options = {}) { - const result = {}; - if (options.preserveCase) { - for (const entry of this._headersMap.values()) { - result[entry.name] = entry.value; - } - } - else { - for (const [normalizedName, entry] of this._headersMap) { - result[normalizedName] = entry.value; + recordEndpointResolution(location) { + this.addData({ selectedLocation: location }); + this.diagnosticCtx.recordEndpointResolution(location); + } + /** + * @internal + */ + addData(data, msg, level = this.diagnosticLevel) { + if (level !== exports.CosmosDbDiagnosticLevel.info) { + this.data = Object.assign(Object.assign({}, this.data), data); + if (msg) { + this.addLog(msg); } } - return result; } /** - * Get the string representation of this HTTP header collection. + * Merge given DiagnosticNodeInternal's context to current node's DiagnosticContext, Treating GatewayRequests of + * given DiagnosticContext, as metadata requests. Given DiagnosticNodeInternal becomes a child of this node. + * @internal */ - toString() { - return JSON.stringify(this.toJSON({ preserveCase: true })); + addChildNode(child, level, metadataType) { + this.diagnosticCtx.mergeDiagnostics(child.diagnosticCtx, metadataType); + if (allowTracing(level, this.diagnosticLevel)) { + child.parent = this; + this.children.push(child); + } + return child; } /** - * Iterate over tuples of header [name, value] pairs. + * @internal */ - [Symbol.iterator]() { - return headerIterator(this._headersMap); - } -} -/** - * Creates an object that satisfies the `HttpHeaders` interface. - * @param rawHeaders - A simple object representing initial headers - */ -function createHttpHeaders(rawHeaders) { - return new HttpHeadersImpl(rawHeaders); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The programmatic identifier of the formDataPolicy. - */ -const formDataPolicyName = "formDataPolicy"; -/** - * A policy that encodes FormData on the request into the body. - */ -function formDataPolicy() { - return { - name: formDataPolicyName, - async sendRequest(request, next) { - if (request.formData) { - const contentType = request.headers.get("Content-Type"); - if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { - request.body = wwwFormUrlEncode(request.formData); - } - else { - await prepareFormData(request.formData, request); - } - request.formData = undefined; - } - return next(request); - }, - }; -} -function wwwFormUrlEncode(formData) { - const urlSearchParams = new URLSearchParams(); - for (const [key, value] of Object.entries(formData)) { - if (Array.isArray(value)) { - for (const subValue of value) { - urlSearchParams.append(key, subValue.toString()); - } + initializeChildNode(type, level, data = {}) { + if (allowTracing(level, this.diagnosticLevel)) { + const child = new DiagnosticNodeInternal(this.diagnosticLevel, type, this, data, getCurrentTimestampInMs(), this.diagnosticCtx); + this.children.push(child); + return child; } else { - urlSearchParams.append(key, value.toString()); + return this; } } - return urlSearchParams.toString(); -} -async function prepareFormData(formData, request) { - // validate content type (multipart/form-data) - const contentType = request.headers.get("Content-Type"); - if (contentType && !contentType.startsWith("multipart/form-data")) { - // content type is specified and is not multipart/form-data. Exit. - return; - } - request.headers.set("Content-Type", contentType !== null && contentType !== void 0 ? contentType : "multipart/form-data"); - // set body to MultipartRequestBody using content from FormDataMap - const parts = []; - for (const [fieldName, values] of Object.entries(formData)) { - for (const value of Array.isArray(values) ? values : [values]) { - if (typeof value === "string") { - parts.push({ - headers: createHttpHeaders({ - "Content-Disposition": `form-data; name="${fieldName}"`, - }), - body: coreUtil.stringToUint8Array(value, "utf-8"), - }); - } - else { - // using || instead of ?? here since if value.name is empty we should create a file name - const fileName = value.name || "blob"; - const headers = createHttpHeaders(); - headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); - if (value.type) { - headers.set("Content-Type", value.type); - } - parts.push({ - headers, - body: value, - }); + /** + * @internal + */ + recordQueryResult(resources, level) { + var _a; + if (allowTracing(level, this.diagnosticLevel)) { + const previousCount = (_a = this.data.queryRecordsRead) !== null && _a !== void 0 ? _a : 0; + if (Array.isArray(resources)) { + this.data.queryRecordsRead = previousCount + resources.length; } } - request.multipartBody = { parts }; + } + /** + * Convert DiagnosticNodeInternal (internal representation) to DiagnosticNode (public, sanitized representation) + * @internal + */ + toDiagnosticNode() { + return { + id: this.id, + nodeType: this.nodeType, + children: this.children.map((child) => child.toDiagnosticNode()), + data: this.data, + startTimeUTCInMs: this.startTimeUTCInMs, + durationInMs: this.durationInMs, + }; + } + /** + * Convert to CosmosDiagnostics + * @internal + */ + toDiagnostic(clientConfigDiagnostic) { + const rootNode = getRootNode(this); + const diagnostiNode = allowTracing(exports.CosmosDbDiagnosticLevel.debug, this.diagnosticLevel) + ? rootNode.toDiagnosticNode() + : undefined; + const clientConfig = allowTracing(exports.CosmosDbDiagnosticLevel.debug, this.diagnosticLevel) + ? clientConfigDiagnostic + : undefined; + const cosmosDiagnostic = new CosmosDiagnostics(this.diagnosticCtx.getClientSideStats(), diagnostiNode, clientConfig); + return cosmosDiagnostic; } } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const HTTPS_PROXY = "HTTPS_PROXY"; -const HTTP_PROXY = "HTTP_PROXY"; -const ALL_PROXY = "ALL_PROXY"; -const NO_PROXY = "NO_PROXY"; /** - * The programmatic identifier of the proxyPolicy. + * @hidden */ -const proxyPolicyName = "proxyPolicy"; +exports.DiagnosticNodeType = void 0; +(function (DiagnosticNodeType) { + DiagnosticNodeType["CLIENT_REQUEST_NODE"] = "CLIENT_REQUEST_NODE"; + DiagnosticNodeType["METADATA_REQUEST_NODE"] = "METADATA_REQUEST_NODE"; + DiagnosticNodeType["HTTP_REQUEST"] = "HTTP_REQUEST"; + DiagnosticNodeType["BATCH_REQUEST"] = "BATCH_REQUEST"; + DiagnosticNodeType["PARALLEL_QUERY_NODE"] = "PARALLEL_QUERY_NODE"; + DiagnosticNodeType["DEFAULT_QUERY_NODE"] = "DEFAULT_QUERY_NODE"; + DiagnosticNodeType["QUERY_REPAIR_NODE"] = "QUERY_REPAIR_NODE"; + DiagnosticNodeType["BACKGROUND_REFRESH_THREAD"] = "BACKGROUND_REFRESH_THREAD"; + DiagnosticNodeType["REQUEST_ATTEMPTS"] = "REQUEST_ATTEMPTS"; +})(exports.DiagnosticNodeType || (exports.DiagnosticNodeType = {})); +function calculateResponsePayloadLength(response) { + var _a; + return ((_a = response === null || response === void 0 ? void 0 : response.bodyAsText) === null || _a === void 0 ? void 0 : _a.length) || 0; +} +function calculateRequestPayloadLength(requestContext) { + return requestContext.body ? requestContext.body.length : 0; +} + +// Copyright (c) Microsoft Corporation. /** - * Stores the patterns specified in NO_PROXY environment variable. - * @internal + * @hidden + * Utility function to create an Empty CosmosDiagnostic object. */ -const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); -function getEnvironmentValue(name) { - if (process.env[name]) { - return process.env[name]; - } - else if (process.env[name.toLowerCase()]) { - return process.env[name.toLowerCase()]; - } - return undefined; -} -function loadEnvironmentProxyValue() { - if (!process) { - return undefined; - } - const httpsProxy = getEnvironmentValue(HTTPS_PROXY); - const allProxy = getEnvironmentValue(ALL_PROXY); - const httpProxy = getEnvironmentValue(HTTP_PROXY); - return httpsProxy || allProxy || httpProxy; +function getEmptyCosmosDiagnostics() { + return new CosmosDiagnostics({ + requestDurationInMs: 0, + requestStartTimeUTCInMs: getCurrentTimestampInMs(), + totalRequestPayloadLengthInBytes: 0, + totalResponsePayloadLengthInBytes: 0, + locationEndpointsContacted: [], + retryDiagnostics: { + failedAttempts: [], + }, + metadataDiagnostics: { + metadataLookups: [], + }, + gatewayStatistics: [], + }, { + id: uuid$3.v4(), + nodeType: exports.DiagnosticNodeType.CLIENT_REQUEST_NODE, + children: [], + data: {}, + startTimeUTCInMs: getCurrentTimestampInMs(), + durationInMs: 0, + }); } /** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + * A supporting utility wrapper function, to be used inside a diagnostic session started + * by `withDiagnostics` function. + * Created a Diagnostic node and add it as a child to existing diagnostic session. + * @hidden */ -function isBypassed(uri, noProxyList, bypassedMap) { - if (noProxyList.length === 0) { - return false; - } - const host = new URL(uri).hostname; - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); - } - let isBypassedFlag = false; - for (const pattern of noProxyList) { - if (pattern[0] === ".") { - // This should match either domain it self or any subdomain or host - // .foo.com will match foo.com it self or *.foo.com - if (host.endsWith(pattern)) { - isBypassedFlag = true; - } - else { - if (host.length === pattern.length - 1 && host === pattern.slice(1)) { - isBypassedFlag = true; - } - } - } - else { - if (host === pattern) { - isBypassedFlag = true; - } - } +async function addDignosticChild(callback, node, type, data = {}) { + const childNode = node.initializeChildNode(type, exports.CosmosDbDiagnosticLevel.debug, data); + try { + const response = await callback(childNode); + childNode.updateTimestamp(); + return response; } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); - return isBypassedFlag; -} -function loadNoProxy() { - const noProxy = getEnvironmentValue(NO_PROXY); - noProxyListLoaded = true; - if (noProxy) { - return noProxy - .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); + catch (e) { + childNode.addData({ + failure: true, + }); + childNode.updateTimestamp(); + throw e; } - return []; } /** - * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy. - * If no argument is given, it attempts to parse a proxy URL from the environment - * variables `HTTPS_PROXY` or `HTTP_PROXY`. - * @param proxyUrl - The url of the proxy to use. May contain authentication information. + * A supporting utility wrapper function, to be used inside a diagnostic session started + * by `withDiagnostics` function. + * Treats requests originating in provided `callback` as metadata calls. + * To realize this, starts a temporary diagnostic session, after execution of callback is + * finished. Merges this temporary diagnostic session to the original diagnostic session + * represented by the input parameter `node`. + * @hidden */ -function getDefaultProxySettings(proxyUrl) { - if (!proxyUrl) { - proxyUrl = loadEnvironmentProxyValue(); - if (!proxyUrl) { - return undefined; - } +async function withMetadataDiagnostics(callback, node, type) { + const diagnosticNodeForMetadataCall = new DiagnosticNodeInternal(node.diagnosticLevel, exports.DiagnosticNodeType.METADATA_REQUEST_NODE, null); + try { + const response = await callback(diagnosticNodeForMetadataCall); + node.addChildNode(diagnosticNodeForMetadataCall, exports.CosmosDbDiagnosticLevel.debug, type); + return response; + } + catch (e) { + node.addChildNode(diagnosticNodeForMetadataCall, exports.CosmosDbDiagnosticLevel.debug, type); + throw e; } - const parsedUrl = new URL(proxyUrl); - const schema = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; - return { - host: schema + parsedUrl.hostname, - port: Number.parseInt(parsedUrl.port || "80"), - username: parsedUrl.username, - password: parsedUrl.password, - }; } /** - * @internal + * Utility wrapper function to managed lifecycle of a Diagnostic session. + * Meant to be used at the root of the client operation. i.e. item.read(), + * queryIterator.fetchAll(). + * + * This utility starts a new diagnostic session. So using it any where else + * other than start of operation, will result is different diagnostic sessions. + * + * Workings : + * 1. Takes a callback function as input. + * 2. Creates a new instance of DiagnosticNodeInternal, which can be though as starting + * a new diagnostic session. + * 3. Executes the callback function. + * 4. If execution was successful. Converts DiagnosticNodeInternal to CosmosDiagnostics + * and injects it to the response object and returns this object. + * 5. If execution threw an exception. Sill converts DiagnosticNodeInternal to CosmosDiagnostics + * and injects it to the Error object, and rethrows the Error object. + * + * @hidden */ -function getProxyAgentOptions(proxySettings, { headers, tlsSettings }) { - let parsedProxyUrl; +async function withDiagnostics(callback, clientContext, type = exports.DiagnosticNodeType.CLIENT_REQUEST_NODE) { + const diagnosticNode = new DiagnosticNodeInternal(clientContext.diagnosticLevel, type, null); try { - parsedProxyUrl = new URL(proxySettings.host); - } - catch (_error) { - throw new Error(`Expecting a valid host string in proxy settings, but found "${proxySettings.host}".`); + const response = await callback(diagnosticNode); + diagnosticNode.updateTimestamp(); + const diagnostics = diagnosticNode.toDiagnostic(clientContext.getClientConfig()); + if (typeof response === "object" && response !== null) { + response.diagnostics = diagnostics; + } + clientContext.recordDiagnostics(diagnostics); + return response; } - if (tlsSettings) { - logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); + catch (e) { + diagnosticNode.updateTimestamp(); + diagnosticNode.addData({ + failure: true, + }); + const diagnostics = diagnosticNode.toDiagnostic(clientContext.getClientConfig()); + e.diagnostics = diagnostics; + clientContext.recordDiagnostics(diagnostics); + throw e; } - const proxyAgentOptions = { - hostname: parsedProxyUrl.hostname, - port: proxySettings.port, - protocol: parsedProxyUrl.protocol, - headers: headers.toJSON(), - }; - if (proxySettings.username && proxySettings.password) { - proxyAgentOptions.auth = `${proxySettings.username}:${proxySettings.password}`; +} + +// Copyright (c) Microsoft Corporation. +const logger$3 = logger$5.createClientLogger("ClientContext"); +/** @hidden */ +var STATES; +(function (STATES) { + STATES["start"] = "start"; + STATES["inProgress"] = "inProgress"; + STATES["ended"] = "ended"; +})(STATES || (STATES = {})); +/** @hidden */ +class DefaultQueryExecutionContext { + get continuation() { + return this.continuationToken; } - else if (proxySettings.username) { - proxyAgentOptions.auth = `${proxySettings.username}`; + /** + * Provides the basic Query Execution Context. + * This wraps the internal logic query execution using provided fetch functions + * + * @param clientContext - Is used to read the partitionKeyRanges for split proofing + * @param query - A SQL query. + * @param options - Represents the feed options. + * @param fetchFunctions - A function to retrieve each page of data. + * An array of functions may be used to query more than one partition. + * @hidden + */ + constructor(options, fetchFunctions) { + this.resources = []; + this.currentIndex = 0; + this.currentPartitionIndex = 0; + this.fetchFunctions = Array.isArray(fetchFunctions) ? fetchFunctions : [fetchFunctions]; + this.options = options || {}; + this.continuationToken = this.options.continuationToken || this.options.continuation || null; + this.state = DefaultQueryExecutionContext.STATES.start; } - return proxyAgentOptions; -} -function setProxyAgentOnRequest(request, cachedAgents) { - // Custom Agent should take precedence so if one is present - // we should skip to avoid overwriting it. - if (request.agent) { - return; + /** + * Execute a provided callback on the next element in the execution context. + */ + async nextItem(diagnosticNode) { + ++this.currentIndex; + const response = await this.current(diagnosticNode); + return response; } - const url = new URL(request.url); - const isInsecure = url.protocol !== "https:"; - const proxySettings = request.proxySettings; - if (proxySettings) { - if (isInsecure) { - if (!cachedAgents.httpProxyAgent) { - const proxyAgentOptions = getProxyAgentOptions(proxySettings, request); - cachedAgents.httpProxyAgent = new httpProxyAgent.HttpProxyAgent(proxyAgentOptions); + /** + * Retrieve the current element on the execution context. + */ + async current(diagnosticNode) { + if (this.currentIndex < this.resources.length) { + return { + result: this.resources[this.currentIndex], + headers: getInitialHeader(), + }; + } + if (this._canFetchMore()) { + const { result: resources, headers } = await this.fetchMore(diagnosticNode); + this.resources = resources; + if (this.resources.length === 0) { + if (!this.continuationToken && this.currentPartitionIndex >= this.fetchFunctions.length) { + this.state = DefaultQueryExecutionContext.STATES.ended; + return { result: undefined, headers }; + } + else { + return this.current(diagnosticNode); + } } - request.agent = cachedAgents.httpProxyAgent; + return { result: this.resources[this.currentIndex], headers }; } else { - if (!cachedAgents.httpsProxyAgent) { - const proxyAgentOptions = getProxyAgentOptions(proxySettings, request); - cachedAgents.httpsProxyAgent = new httpsProxyAgent.HttpsProxyAgent(proxyAgentOptions); - } - request.agent = cachedAgents.httpsProxyAgent; + this.state = DefaultQueryExecutionContext.STATES.ended; + return { + result: undefined, + headers: getInitialHeader(), + }; } } -} -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -function proxyPolicy(proxySettings = getDefaultProxySettings(), options) { - if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); + /** + * Determine if there are still remaining resources to processs based on + * the value of the continuation token or the elements remaining on the current batch in the execution context. + * + * @returns true if there is other elements to process in the DefaultQueryExecutionContext. + */ + hasMoreResults() { + return (this.state === DefaultQueryExecutionContext.STATES.start || + this.continuationToken !== undefined || + this.currentIndex < this.resources.length - 1 || + this.currentPartitionIndex < this.fetchFunctions.length); } - const cachedAgents = {}; - return { - name: proxyPolicyName, - async sendRequest(request, next) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = options === null || options === void 0 ? void 0 : options.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, (options === null || options === void 0 ? void 0 : options.customNoProxyList) ? undefined : globalBypassedMap)) { - request.proxySettings = proxySettings; + /** + * Fetches the next batch of the feed and pass them as an array to a callback + */ + async fetchMore(diagnosticNode) { + return addDignosticChild(async (childDiagnosticNode) => { + if (this.currentPartitionIndex >= this.fetchFunctions.length) { + return { + headers: getInitialHeader(), + result: undefined, + }; } - if (request.proxySettings) { - setProxyAgentOnRequest(request, cachedAgents); + // Keep to the original continuation and to restore the value after fetchFunction call + const originalContinuation = this.options.continuationToken || this.options.continuation; + this.options.continuationToken = this.continuationToken; + // Return undefined if there is no more results + if (this.currentPartitionIndex >= this.fetchFunctions.length) { + return { + headers: getInitialHeader(), + result: undefined, + }; } - return next(request); - }, - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The programmatic identifier of the setClientRequestIdPolicy. - */ -const setClientRequestIdPolicyName = "setClientRequestIdPolicy"; -/** - * Each PipelineRequest gets a unique id upon creation. - * This policy passes that unique id along via an HTTP header to enable better - * telemetry and tracing. - * @param requestIdHeaderName - The name of the header to pass the request ID to. - */ -function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { - return { - name: setClientRequestIdPolicyName, - async sendRequest(request, next) { - if (!request.headers.has(requestIdHeaderName)) { - request.headers.set(requestIdHeaderName, request.requestId); + let resources; + let responseHeaders; + try { + let p; + if (this.nextFetchFunction !== undefined) { + logger$3.verbose("using prefetch"); + p = this.nextFetchFunction; + this.nextFetchFunction = undefined; + } + else { + logger$3.verbose("using fresh fetch"); + p = this.fetchFunctions[this.currentPartitionIndex](childDiagnosticNode, this.options); + } + const response = await p; + resources = response.result; + childDiagnosticNode.recordQueryResult(resources, exports.CosmosDbDiagnosticLevel.debugUnsafe); + responseHeaders = response.headers; + this.continuationToken = responseHeaders[Constants$1.HttpHeaders.Continuation]; + if (!this.continuationToken) { + ++this.currentPartitionIndex; + } + if (this.options && this.options.bufferItems === true) { + const fetchFunction = this.fetchFunctions[this.currentPartitionIndex]; + this.nextFetchFunction = fetchFunction + ? fetchFunction(childDiagnosticNode, Object.assign(Object.assign({}, this.options), { continuationToken: this.continuationToken })) + : undefined; + } } - return next(request); - }, - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Name of the TLS Policy - */ -const tlsPolicyName = "tlsPolicy"; -/** - * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. - */ -function tlsPolicy(tlsSettings) { - return { - name: tlsPolicyName, - sendRequest: async (req, next) => { - // Users may define a request tlsSettings, honor those over the client level one - if (!req.tlsSettings) { - req.tlsSettings = tlsSettings; + catch (err) { + this.state = DefaultQueryExecutionContext.STATES.ended; + // return callback(err, undefined, responseHeaders); + // TODO: Error and data being returned is an antipattern, this might broken + throw err; } - return next(req); - }, - }; + this.state = DefaultQueryExecutionContext.STATES.inProgress; + this.currentIndex = 0; + this.options.continuationToken = originalContinuation; + this.options.continuation = originalContinuation; + // deserializing query metrics so that we aren't working with delimited strings in the rest of the code base + if (Constants$1.HttpHeaders.QueryMetrics in responseHeaders) { + const delimitedString = responseHeaders[Constants$1.HttpHeaders.QueryMetrics]; + let queryMetrics = QueryMetrics.createFromDelimitedString(delimitedString); + // Add the request charge to the query metrics so that we can have per partition request charge. + if (Constants$1.HttpHeaders.RequestCharge in responseHeaders) { + const requestCharge = Number(responseHeaders[Constants$1.HttpHeaders.RequestCharge]) || 0; + queryMetrics = new QueryMetrics(queryMetrics.retrievedDocumentCount, queryMetrics.retrievedDocumentSize, queryMetrics.outputDocumentCount, queryMetrics.outputDocumentSize, queryMetrics.indexHitDocumentCount, queryMetrics.totalQueryExecutionTime, queryMetrics.queryPreparationTimes, queryMetrics.indexLookupTime, queryMetrics.documentLoadTime, queryMetrics.vmExecutionTime, queryMetrics.runtimeExecutionTimes, queryMetrics.documentWriteTime, new ClientSideMetrics(requestCharge)); + } + // Wraping query metrics in a object where the key is '0' just so single partition + // and partition queries have the same response schema + responseHeaders[Constants$1.HttpHeaders.QueryMetrics] = {}; + responseHeaders[Constants$1.HttpHeaders.QueryMetrics]["0"] = queryMetrics; + } + return { result: resources, headers: responseHeaders }; + }, diagnosticNode, exports.DiagnosticNodeType.DEFAULT_QUERY_NODE, { + queryMethodIdentifier: "fetchMore", + }); + } + _canFetchMore() { + const res = this.state === DefaultQueryExecutionContext.STATES.start || + (this.continuationToken && this.state === DefaultQueryExecutionContext.STATES.inProgress) || + (this.currentPartitionIndex < this.fetchFunctions.length && + this.state === DefaultQueryExecutionContext.STATES.inProgress); + return res; + } } +DefaultQueryExecutionContext.STATES = STATES; -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const custom = util.inspect.custom; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const errorSanitizer = new Sanitizer(); -/** - * A custom error type for failed pipeline requests. - */ -class RestError extends Error { - constructor(message, options = {}) { - super(message); - this.name = "RestError"; - this.code = options.code; - this.statusCode = options.statusCode; - this.request = options.request; - this.response = options.response; - Object.setPrototypeOf(this, RestError.prototype); +/** @hidden */ +class AverageAggregator { + /** + * Add the provided item to aggregation result. + */ + aggregate(other) { + if (other == null || other.sum == null) { + return; + } + if (this.sum == null) { + this.sum = 0.0; + this.count = 0; + } + this.sum += other.sum; + this.count += other.count; } /** - * Logging method for util.inspect in Node + * Get the aggregation result. */ - [custom]() { - return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + getResult() { + if (this.sum == null || this.count <= 0) { + return undefined; + } + return this.sum / this.count; } } -/** - * Something went wrong when making the request. - * This means the actual request failed for some reason, - * such as a DNS issue or the connection being lost. - */ -RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; -/** - * This means that parsing the response from the server failed. - * It may have been malformed. - */ -RestError.PARSE_ERROR = "PARSE_ERROR"; -/** - * Typeguard for RestError - * @param e - Something caught by a catch clause. - */ -function isRestError(e) { - if (e instanceof RestError) { - return true; + +/** @hidden */ +class CountAggregator { + /** + * Represents an aggregator for COUNT operator. + * @hidden + */ + constructor() { + this.value = 0; + } + /** + * Add the provided item to aggregation result. + */ + aggregate(other) { + this.value += other; + } + /** + * Get the aggregation result. + */ + getResult() { + return this.value; } - return coreUtil.isError(e) && e.name === "RestError"; } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The programmatic identifier of the tracingPolicy. - */ -const tracingPolicyName = "tracingPolicy"; -/** - * A simple policy to create OpenTelemetry Spans for each request made by the pipeline - * that has SpanOptions with a parent. - * Requests made without a parent Span will not be recorded. - * @param options - Options to configure the telemetry logged by the tracing policy. - */ -function tracingPolicy(options = {}) { - const userAgent = getUserAgentValue(options.userAgentPrefix); - const tracingClient = tryCreateTracingClient(); - return { - name: tracingPolicyName, - async sendRequest(request, next) { - var _a, _b; - if (!tracingClient || !((_a = request.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext)) { - return next(request); - } - const { span, tracingContext } = (_b = tryCreateSpan(tracingClient, request, userAgent)) !== null && _b !== void 0 ? _b : {}; - if (!span || !tracingContext) { - return next(request); - } - try { - const response = await tracingClient.withContext(tracingContext, next, request); - tryProcessResponse(span, response); - return response; - } - catch (err) { - tryProcessError(span, err); - throw err; - } +// TODO: this smells funny +/** @hidden */ +const TYPEORDCOMPARATOR = Object.freeze({ + NoValue: { + ord: 0, + }, + undefined: { + ord: 1, + }, + boolean: { + ord: 2, + compFunc: (a, b) => { + return a === b ? 0 : a > b ? 1 : -1; }, - }; -} -function tryCreateTracingClient() { - try { - return coreTracing.createTracingClient({ - namespace: "", - packageName: "@azure/core-rest-pipeline", - packageVersion: SDK_VERSION, - }); + }, + number: { + ord: 4, + compFunc: (a, b) => { + return a === b ? 0 : a > b ? 1 : -1; + }, + }, + string: { + ord: 5, + compFunc: (a, b) => { + return a === b ? 0 : a > b ? 1 : -1; + }, + }, +}); +/** @hidden */ +class OrderByDocumentProducerComparator { + constructor(sortOrder) { + this.sortOrder = sortOrder; + } // TODO: This should be an enum + targetPartitionKeyRangeDocProdComparator(docProd1, docProd2) { + const a = docProd1.getTargetParitionKeyRange()["minInclusive"]; + const b = docProd2.getTargetParitionKeyRange()["minInclusive"]; + return a === b ? 0 : a > b ? 1 : -1; } - catch (e) { - logger.warning(`Error when creating the TracingClient: ${coreUtil.getErrorMessage(e)}`); - return undefined; + compare(docProd1, docProd2) { + // Need to check for split, since we don't want to dereference "item" of undefined / exception + if (docProd1.gotSplit()) { + return -1; + } + if (docProd2.gotSplit()) { + return 1; + } + const orderByItemsRes1 = this.getOrderByItems(docProd1.peekBufferedItems()[0]); + const orderByItemsRes2 = this.getOrderByItems(docProd2.peekBufferedItems()[0]); + // validate order by items and types + // TODO: once V1 order by on different types is fixed this need to change + this.validateOrderByItems(orderByItemsRes1, orderByItemsRes2); + // no async call in the for loop + for (let i = 0; i < orderByItemsRes1.length; i++) { + // compares the orderby items one by one + const compRes = this.compareOrderByItem(orderByItemsRes1[i], orderByItemsRes2[i]); + if (compRes !== 0) { + if (this.sortOrder[i] === "Ascending") { + return compRes; + } + else if (this.sortOrder[i] === "Descending") { + return -compRes; + } + } + } + return this.targetPartitionKeyRangeDocProdComparator(docProd1, docProd2); } -} -function tryCreateSpan(tracingClient, request, userAgent) { - try { - // As per spec, we do not need to differentiate between HTTP and HTTPS in span name. - const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, { - spanKind: "client", - spanAttributes: { - "http.method": request.method, - "http.url": request.url, - requestId: request.requestId, - }, - }); - // If the span is not recording, don't do any more work. - if (!span.isRecording()) { - span.end(); - return undefined; + // TODO: This smells funny + compareValue(item1, type1, item2, type2) { + if (type1 === "object" || type2 === "object") { + throw new Error("Tried to compare an object type"); } - if (userAgent) { - span.setAttribute("http.user_agent", userAgent); + const type1Ord = TYPEORDCOMPARATOR[type1].ord; + const type2Ord = TYPEORDCOMPARATOR[type2].ord; + const typeCmp = type1Ord - type2Ord; + if (typeCmp !== 0) { + // if the types are different, use type ordinal + return typeCmp; } - // set headers - const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); - for (const [key, value] of Object.entries(headers)) { - request.headers.set(key, value); + // both are of the same type + if (type1Ord === TYPEORDCOMPARATOR["undefined"].ord || + type1Ord === TYPEORDCOMPARATOR["NoValue"].ord) { + // if both types are undefined or Null they are equal + return 0; } - return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; + const compFunc = TYPEORDCOMPARATOR[type1].compFunc; + if (typeof compFunc === "undefined") { + throw new Error("Cannot find the comparison function"); + } + // same type and type is defined compare the items + return compFunc(item1, item2); } - catch (e) { - logger.warning(`Skipping creating a tracing span due to an error: ${coreUtil.getErrorMessage(e)}`); - return undefined; + compareOrderByItem(orderByItem1, orderByItem2) { + const type1 = this.getType(orderByItem1); + const type2 = this.getType(orderByItem2); + return this.compareValue(orderByItem1["item"], type1, orderByItem2["item"], type2); } -} -function tryProcessError(span, error) { - try { - span.setStatus({ - status: "error", - error: coreUtil.isError(error) ? error : undefined, - }); - if (isRestError(error) && error.statusCode) { - span.setAttribute("http.status_code", error.statusCode); + validateOrderByItems(res1, res2) { + if (res1.length !== res2.length) { + throw new Error(`Expected ${res1.length}, but got ${res2.length}.`); + } + if (res1.length !== this.sortOrder.length) { + throw new Error("orderByItems cannot have a different size than sort orders."); + } + for (let i = 0; i < this.sortOrder.length; i++) { + const type1 = this.getType(res1[i]); + const type2 = this.getType(res2[i]); + if (type1 !== type2) { + throw new Error(`Expected ${type1}, but got ${type2}. Cannot execute cross partition order-by queries on mixed types. Consider filtering your query using IS_STRING or IS_NUMBER to get around this exception.`); + } } - span.end(); - } - catch (e) { - logger.warning(`Skipping tracing span processing due to an error: ${coreUtil.getErrorMessage(e)}`); } -} -function tryProcessResponse(span, response) { - try { - span.setAttribute("http.status_code", response.status); - const serviceRequestId = response.headers.get("x-ms-request-id"); - if (serviceRequestId) { - span.setAttribute("serviceRequestId", serviceRequestId); + getType(orderByItem) { + // TODO: any item? + if (orderByItem === undefined || orderByItem.item === undefined) { + return "NoValue"; } - span.setStatus({ - status: "success", - }); - span.end(); + const type = typeof orderByItem.item; + if (TYPEORDCOMPARATOR[type] === undefined) { + throw new Error(`unrecognizable type ${type}`); + } + return type; } - catch (e) { - logger.warning(`Skipping tracing span processing due to an error: ${coreUtil.getErrorMessage(e)}`); + getOrderByItems(res) { + // TODO: any res? + return res["orderByItems"]; } } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function isNodeReadableStream(x) { - return Boolean(x && typeof x["pipe"] === "function"); -} -function isWebReadableStream(x) { - return Boolean(x && - typeof x.getReader === "function" && - typeof x.tee === "function"); -} -function isBlob(x) { - return typeof x.stream === "function"; +/** @hidden */ +class MaxAggregator { + /** + * Represents an aggregator for MAX operator. + * @hidden + */ + constructor() { + this.value = undefined; + this.comparer = new OrderByDocumentProducerComparator(["Ascending"]); + } + /** + * Add the provided item to aggregation result. + */ + aggregate(other) { + if (this.value === undefined) { + this.value = other.max; + } + else if (this.comparer.compareValue(other.max, typeof other.max, this.value, typeof this.value) > 0) { + this.value = other.max; + } + } + /** + * Get the aggregation result. + */ + getResult() { + return this.value; + } } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function streamAsyncIterator() { - return tslib.__asyncGenerator(this, arguments, function* streamAsyncIterator_1() { - const reader = this.getReader(); - try { - while (true) { - const { done, value } = yield tslib.__await(reader.read()); - if (done) { - return yield tslib.__await(void 0); - } - yield yield tslib.__await(value); - } +/** @hidden */ +class MinAggregator { + /** + * Represents an aggregator for MIN operator. + * @hidden + */ + constructor() { + this.value = undefined; + this.comparer = new OrderByDocumentProducerComparator(["Ascending"]); + } + /** + * Add the provided item to aggregation result. + */ + aggregate(other) { + if (this.value === undefined) { + // || typeof this.value === "object" + this.value = other.min; } - finally { - reader.releaseLock(); + else { + const otherType = other.min === null ? "NoValue" : typeof other.min; // || typeof other === "object" + const thisType = this.value === null ? "NoValue" : typeof this.value; + if (this.comparer.compareValue(other.min, otherType, this.value, thisType) < 0) { + this.value = other.min; + } } - }); -} -function makeAsyncIterable(webStream) { - if (!webStream[Symbol.asyncIterator]) { - webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); } - if (!webStream.values) { - webStream.values = streamAsyncIterator.bind(webStream); + /** + * Get the aggregation result. + */ + getResult() { + return this.value; } } -function nodeStreamFromWebStream(webStream) { - makeAsyncIterable(webStream); - return stream.Readable.fromWeb(webStream); -} -function toWebStream(stream$1) { - return isWebReadableStream(stream$1) - ? stream$1 - : stream.Readable.toWeb(stream.Readable.from(stream$1)); -} -function toStream(source) { - if (source instanceof Uint8Array) { - return stream.Readable.from(Buffer.from(source)); - } - else if (isBlob(source)) { - return nodeStreamFromWebStream(source.stream()); - } - else if (isNodeReadableStream(source)) { - return source; + +/** @hidden */ +class SumAggregator { + /** + * Add the provided item to aggregation result. + */ + aggregate(other) { + if (other === undefined) { + return; + } + if (this.sum === undefined) { + this.sum = other; + } + else { + this.sum += other; + } } - else { - return nodeStreamFromWebStream(source); + /** + * Get the aggregation result. + */ + getResult() { + return this.sum; } } -function concatenateStreams(sources) { - if (sources.some(isWebReadableStream)) { - throw new Error("Was not expecting a Web stream here"); - } - return stream.Readable.from((function () { - return tslib.__asyncGenerator(this, arguments, function* () { - var _a, e_1, _b, _c; - for (const stream of sources) { - try { - for (var _d = true, stream_1 = (e_1 = void 0, tslib.__asyncValues(stream)), stream_1_1; stream_1_1 = yield tslib.__await(stream_1.next()), _a = stream_1_1.done, !_a; _d = true) { - _c = stream_1_1.value; - _d = false; - const chunk = _c; - yield yield tslib.__await(chunk); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (!_d && !_a && (_b = stream_1.return)) yield tslib.__await(_b.call(stream_1)); - } - finally { if (e_1) throw e_1.error; } - } - } - }); - })()); -} -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function generateBoundary() { - return `----AzSDKFormBoundary${coreUtil.randomUUID()}`; -} -function encodeHeaders(headers) { - let result = ""; - for (const [key, value] of headers) { - result += `${key}: ${value}\r\n`; - } - return result; -} -function getLength(source) { - if (source instanceof Uint8Array) { - return source.byteLength; +/** @hidden */ +class StaticValueAggregator { + aggregate(other) { + if (this.value === undefined) { + this.value = other; + } } - else if (isBlob(source)) { - // if was created using createFile then -1 means we have an unknown size - return source.size === -1 ? undefined : source.size; + getResult() { + return this.value; } - else { - return undefined; +} + +// Copyright (c) Microsoft Corporation. +function createAggregator(aggregateType) { + switch (aggregateType) { + case "Average": + return new AverageAggregator(); + case "Count": + return new CountAggregator(); + case "Max": + return new MaxAggregator(); + case "Min": + return new MinAggregator(); + case "Sum": + return new SumAggregator(); + default: + return new StaticValueAggregator(); } } -function getTotalLength(sources) { - let total = 0; - for (const source of sources) { - const partLength = getLength(source); - if (partLength === undefined) { - return undefined; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** @hidden */ +var FetchResultType; +(function (FetchResultType) { + FetchResultType[FetchResultType["Done"] = 0] = "Done"; + FetchResultType[FetchResultType["Exception"] = 1] = "Exception"; + FetchResultType[FetchResultType["Result"] = 2] = "Result"; +})(FetchResultType || (FetchResultType = {})); +/** @hidden */ +class FetchResult { + /** + * Wraps fetch results for the document producer. + * This allows the document producer to buffer exceptions so that actual results don't get flushed during splits. + * + * @param feedReponse - The response the document producer got back on a successful fetch + * @param error - The exception meant to be buffered on an unsuccessful fetch + * @hidden + */ + constructor(feedResponse, error) { + // TODO: feedResponse/error + if (feedResponse !== undefined) { + this.feedResponse = feedResponse; + this.fetchResultType = FetchResultType.Result; } else { - total += partLength; + this.error = error; + this.fetchResultType = FetchResultType.Exception; } } - return total; } -function buildRequestBody(request, parts, boundary) { - const sources = [ - coreUtil.stringToUint8Array(`--${boundary}`, "utf-8"), - ...parts.flatMap((part) => [ - coreUtil.stringToUint8Array("\r\n", "utf-8"), - coreUtil.stringToUint8Array(encodeHeaders(part.headers), "utf-8"), - coreUtil.stringToUint8Array("\r\n", "utf-8"), - part.body, - coreUtil.stringToUint8Array(`\r\n--${boundary}`, "utf-8"), - ]), - coreUtil.stringToUint8Array("--\r\n\r\n", "utf-8"), - ]; - const contentLength = getTotalLength(sources); - if (contentLength) { - request.headers.set("Content-Length", contentLength); + +/** @hidden */ +class DocumentProducer { + /** + * Provides the Target Partition Range Query Execution Context. + * @param clientContext - The service endpoint to use to create the client. + * @param collectionLink - Represents collection link + * @param query - A SQL query. + * @param targetPartitionKeyRange - Query Target Partition key Range + * @hidden + */ + constructor(clientContext, collectionLink, query, targetPartitionKeyRange, options) { + this.clientContext = clientContext; + this.generation = 0; + this.fetchFunction = async (diagnosticNode, options) => { + const path = getPathFromLink(this.collectionLink, exports.ResourceType.item); + diagnosticNode.addData({ partitionKeyRangeId: this.targetPartitionKeyRange.id }); + const id = getIdFromLink(this.collectionLink); + return this.clientContext.queryFeed({ + path, + resourceType: exports.ResourceType.item, + resourceId: id, + resultFn: (result) => result.Documents, + query: this.query, + options, + diagnosticNode, + partitionKeyRangeId: this.targetPartitionKeyRange["id"], + }); + }; + // TODO: any options + this.collectionLink = collectionLink; + this.query = query; + this.targetPartitionKeyRange = targetPartitionKeyRange; + this.fetchResults = []; + this.allFetched = false; + this.err = undefined; + this.previousContinuationToken = undefined; + this.continuationToken = undefined; + this.respHeaders = getInitialHeader(); + this.internalExecutionContext = new DefaultQueryExecutionContext(options, this.fetchFunction); } - request.body = (() => concatenateStreams(sources.map((source) => (typeof source === "function" ? source() : source)).map(toStream))); -} -/** - * Name of multipart policy - */ -const multipartPolicyName = "multipartPolicy"; -const maxBoundaryLength = 70; -const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); -function assertValidBoundary(boundary) { - if (boundary.length > maxBoundaryLength) { - throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); + /** + * Synchronously gives the contiguous buffered results (stops at the first non result) if any + * @returns buffered current items if any + * @hidden + */ + peekBufferedItems() { + const bufferedResults = []; + for (let i = 0, done = false; i < this.fetchResults.length && !done; i++) { + const fetchResult = this.fetchResults[i]; + switch (fetchResult.fetchResultType) { + case FetchResultType.Done: + done = true; + break; + case FetchResultType.Exception: + done = true; + break; + case FetchResultType.Result: + bufferedResults.push(fetchResult.feedResponse); + break; + } + } + return bufferedResults; } - if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { - throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); + hasMoreResults() { + return this.internalExecutionContext.hasMoreResults() || this.fetchResults.length !== 0; } -} -/** - * Pipeline policy for multipart requests - */ -function multipartPolicy() { - return { - name: multipartPolicyName, - sendRequest(request, next) { - var _a; - if (!request.multipartBody) { - return next(request); - } - if (request.body) { - throw new Error("multipartBody and regular body cannot be set at the same time"); - } - let boundary = request.multipartBody.boundary; - const contentTypeHeader = (_a = request.headers.get("Content-Type")) !== null && _a !== void 0 ? _a : "multipart/mixed"; - const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); - if (!parsedHeader) { - throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); - } - const [, contentType, parsedBoundary] = parsedHeader; - if (parsedBoundary && boundary && parsedBoundary !== boundary) { - throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); - } - boundary !== null && boundary !== void 0 ? boundary : (boundary = parsedBoundary); - if (boundary) { - assertValidBoundary(boundary); - } - else { - boundary = generateBoundary(); + gotSplit() { + const fetchResult = this.fetchResults[0]; + if (fetchResult.fetchResultType === FetchResultType.Exception) { + if (DocumentProducer._needPartitionKeyRangeCacheRefresh(fetchResult.error)) { + return true; } - request.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); - buildRequestBody(request, request.multipartBody.parts, boundary); - request.multipartBody = undefined; - return next(request); - }, - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Create a new pipeline with a default set of customizable policies. - * @param options - Options to configure a custom pipeline. - */ -function createPipelineFromOptions(options) { - var _a; - const pipeline = createEmptyPipeline(); - if (coreUtil.isNode) { - if (options.tlsOptions) { - pipeline.addPolicy(tlsPolicy(options.tlsOptions)); } - pipeline.addPolicy(proxyPolicy(options.proxyOptions)); - pipeline.addPolicy(decompressResponsePolicy()); + return false; } - pipeline.addPolicy(formDataPolicy()); - pipeline.addPolicy(userAgentPolicy(options.userAgentOptions)); - pipeline.addPolicy(setClientRequestIdPolicy((_a = options.telemetryOptions) === null || _a === void 0 ? void 0 : _a.clientRequestIdHeaderName)); - // The multipart policy is added after policies with no phase, so that - // policies can be added between it and formDataPolicy to modify - // properties (e.g., making the boundary constant in recorded tests). - pipeline.addPolicy(multipartPolicy(), { afterPhase: "Deserialize" }); - pipeline.addPolicy(defaultRetryPolicy(options.retryOptions), { phase: "Retry" }); - pipeline.addPolicy(tracingPolicy(options.userAgentOptions), { afterPhase: "Retry" }); - if (coreUtil.isNode) { - // Both XHR and Fetch expect to handle redirects automatically, - // so only include this policy when we're in Node. - pipeline.addPolicy(redirectPolicy(options.redirectOptions), { afterPhase: "Retry" }); + _getAndResetActiveResponseHeaders() { + const ret = this.respHeaders; + this.respHeaders = getInitialHeader(); + return ret; } - pipeline.addPolicy(logPolicy(options.loggingOptions), { afterPhase: "Sign" }); - return pipeline; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const DEFAULT_TLS_SETTINGS = {}; -function isReadableStream(body) { - return body && typeof body.pipe === "function"; -} -function isStreamComplete(stream) { - return new Promise((resolve) => { - stream.on("close", resolve); - stream.on("end", resolve); - stream.on("error", resolve); - }); -} -function isArrayBuffer(body) { - return body && typeof body.byteLength === "number"; -} -class ReportTransform extends stream.Transform { - // eslint-disable-next-line @typescript-eslint/ban-types - _transform(chunk, _encoding, callback) { - this.push(chunk); - this.loadedBytes += chunk.length; - try { - this.progressCallback({ loadedBytes: this.loadedBytes }); - callback(); + _updateStates(err, allFetched) { + // TODO: any Error + if (err) { + this.err = err; + return; } - catch (e) { - callback(e); + if (allFetched) { + this.allFetched = true; } + if (this.internalExecutionContext.continuationToken === this.continuationToken) { + // nothing changed + return; + } + this.previousContinuationToken = this.continuationToken; + this.continuationToken = this.internalExecutionContext.continuationToken; } - constructor(progressCallback) { - super(); - this.loadedBytes = 0; - this.progressCallback = progressCallback; - } -} -/** - * A HttpClient implementation that uses Node's "https" module to send HTTPS requests. - * @internal - */ -class NodeHttpClient { - constructor() { - this.cachedHttpsAgents = new WeakMap(); + static _needPartitionKeyRangeCacheRefresh(error) { + // TODO: error + return (error.code === StatusCodes.Gone && + "substatus" in error && + error["substatus"] === SubStatusCodes.PartitionKeyRangeGone); } /** - * Makes a request over an underlying transport layer and returns the response. - * @param request - The request to be made. + * Fetches and bufferes the next page of results and executes the given callback */ - async sendRequest(request) { - var _a, _b, _c; - const abortController$1 = new AbortController(); - let abortListener; - if (request.abortSignal) { - if (request.abortSignal.aborted) { - throw new abortController.AbortError("The operation was aborted."); - } - abortListener = (event) => { - if (event.type === "abort") { - abortController$1.abort(); - } - }; - request.abortSignal.addEventListener("abort", abortListener); - } - if (request.timeout > 0) { - setTimeout(() => { - abortController$1.abort(); - }, request.timeout); - } - const acceptEncoding = request.headers.get("Accept-Encoding"); - const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate")); - let body = typeof request.body === "function" ? request.body() : request.body; - if (body && !request.headers.has("Content-Length")) { - const bodyLength = getBodyLength(body); - if (bodyLength !== null) { - request.headers.set("Content-Length", bodyLength); - } + async bufferMore(diagnosticNode) { + if (this.err) { + throw this.err; } - let responseStream; try { - if (body && request.onUploadProgress) { - const onUploadProgress = request.onUploadProgress; - const uploadReportStream = new ReportTransform(onUploadProgress); - uploadReportStream.on("error", (e) => { - logger.error("Error in upload progress", e); + const { result: resources, headers: headerResponse } = await this.internalExecutionContext.fetchMore(diagnosticNode); + ++this.generation; + this._updateStates(undefined, resources === undefined); + if (resources !== undefined) { + // some more results + resources.forEach((element) => { + // TODO: resources any + this.fetchResults.push(new FetchResult(element, undefined)); }); - if (isReadableStream(body)) { - body.pipe(uploadReportStream); - } - else { - uploadReportStream.end(body); - } - body = uploadReportStream; - } - const res = await this.makeRequest(request, abortController$1, body); - const headers = getResponseHeaders(res); - const status = (_a = res.statusCode) !== null && _a !== void 0 ? _a : 0; - const response = { - status, - headers, - request, - }; - // Responses to HEAD must not have a body. - // If they do return a body, that body must be ignored. - if (request.method === "HEAD") { - // call resume() and not destroy() to avoid closing the socket - // and losing keep alive - res.resume(); - return response; } - responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; - const onDownloadProgress = request.onDownloadProgress; - if (onDownloadProgress) { - const downloadReportStream = new ReportTransform(onDownloadProgress); - downloadReportStream.on("error", (e) => { - logger.error("Error in download progress", e); - }); - responseStream.pipe(downloadReportStream); - responseStream = downloadReportStream; + // need to modify the header response so that the query metrics are per partition + if (headerResponse != null && Constants$1.HttpHeaders.QueryMetrics in headerResponse) { + // "0" is the default partition before one is actually assigned. + const queryMetrics = headerResponse[Constants$1.HttpHeaders.QueryMetrics]["0"]; + // Wraping query metrics in a object where the keys are the partition key range. + headerResponse[Constants$1.HttpHeaders.QueryMetrics] = {}; + headerResponse[Constants$1.HttpHeaders.QueryMetrics][this.targetPartitionKeyRange.id] = + queryMetrics; } - if ( - // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code - ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(Number.POSITIVE_INFINITY)) || - ((_c = request.streamResponseStatusCodes) === null || _c === void 0 ? void 0 : _c.has(response.status))) { - response.readableStreamBody = responseStream; + return { result: resources, headers: headerResponse }; + } + catch (err) { + // TODO: any error + if (DocumentProducer._needPartitionKeyRangeCacheRefresh(err)) { + // Split just happend + // Buffer the error so the execution context can still get the feedResponses in the itemBuffer + const bufferedError = new FetchResult(undefined, err); + this.fetchResults.push(bufferedError); + // Putting a dummy result so that the rest of code flows + return { + result: [bufferedError], + headers: err.headers, + }; } else { - response.bodyAsText = await streamToText(responseStream); - } - return response; - } - finally { - // clean up event listener - if (request.abortSignal && abortListener) { - let uploadStreamDone = Promise.resolve(); - if (isReadableStream(body)) { - uploadStreamDone = isStreamComplete(body); - } - let downloadStreamDone = Promise.resolve(); - if (isReadableStream(responseStream)) { - downloadStreamDone = isStreamComplete(responseStream); - } - Promise.all([uploadStreamDone, downloadStreamDone]) - .then(() => { - var _a; - // eslint-disable-next-line promise/always-return - if (abortListener) { - (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); - } - }) - .catch((e) => { - logger.warning("Error when cleaning up abortListener on httpRequest", e); - }); + this._updateStates(err, err.resources === undefined); + throw err; } } } - makeRequest(request, abortController$1, body) { - var _a; - const url = new URL(request.url); - const isInsecure = url.protocol !== "https:"; - if (isInsecure && !request.allowInsecureConnection) { - throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + /** + * Synchronously gives the bufferend current item if any + * @returns buffered current item if any + * @hidden + */ + getTargetParitionKeyRange() { + return this.targetPartitionKeyRange; + } + /** + * Fetches the next element in the DocumentProducer. + */ + async nextItem(diagnosticNode) { + if (this.err) { + this._updateStates(this.err, undefined); + throw this.err; } - const agent = (_a = request.agent) !== null && _a !== void 0 ? _a : this.getOrCreateAgent(request, isInsecure); - const options = { - agent, - hostname: url.hostname, - path: `${url.pathname}${url.search}`, - port: url.port, - method: request.method, - headers: request.headers.toJSON({ preserveCase: true }), - }; - return new Promise((resolve, reject) => { - const req = isInsecure ? http__namespace.request(options, resolve) : https__namespace.request(options, resolve); - req.once("error", (err) => { - var _a; - reject(new RestError(err.message, { code: (_a = err.code) !== null && _a !== void 0 ? _a : RestError.REQUEST_SEND_ERROR, request })); - }); - abortController$1.signal.addEventListener("abort", () => { - const abortError = new abortController.AbortError("The operation was aborted."); - req.destroy(abortError); - reject(abortError); - }); - if (body && isReadableStream(body)) { - body.pipe(req); - } - else if (body) { - if (typeof body === "string" || Buffer.isBuffer(body)) { - req.end(body); - } - else if (isArrayBuffer(body)) { - req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); - } - else { - logger.error("Unrecognized body type", body); - reject(new RestError("Unrecognized body type")); - } + try { + const { result, headers } = await this.current(diagnosticNode); + const fetchResult = this.fetchResults.shift(); + this._updateStates(undefined, result === undefined); + if (fetchResult.feedResponse !== result) { + throw new Error(`Expected ${fetchResult.feedResponse} to equal ${result}`); } - else { - // streams don't like "undefined" being passed as data - req.end(); + switch (fetchResult.fetchResultType) { + case FetchResultType.Done: + return { result: undefined, headers }; + case FetchResultType.Exception: + fetchResult.error.headers = headers; + throw fetchResult.error; + case FetchResultType.Result: + return { result: fetchResult.feedResponse, headers }; } - }); + } + catch (err) { + this._updateStates(err, err.item === undefined); + throw err; + } } - getOrCreateAgent(request, isInsecure) { - var _a; - const disableKeepAlive = request.disableKeepAlive; - // Handle Insecure requests first - if (isInsecure) { - if (disableKeepAlive) { - // keepAlive:false is the default so we don't need a custom Agent - return http__namespace.globalAgent; - } - if (!this.cachedHttpAgent) { - // If there is no cached agent create a new one and cache it. - this.cachedHttpAgent = new http__namespace.Agent({ keepAlive: true }); + /** + * Retrieve the current element on the DocumentProducer. + */ + async current(diagnosticNode) { + // If something is buffered just give that + if (this.fetchResults.length > 0) { + const fetchResult = this.fetchResults[0]; + // Need to unwrap fetch results + switch (fetchResult.fetchResultType) { + case FetchResultType.Done: + return { + result: undefined, + headers: this._getAndResetActiveResponseHeaders(), + }; + case FetchResultType.Exception: + fetchResult.error.headers = this._getAndResetActiveResponseHeaders(); + throw fetchResult.error; + case FetchResultType.Result: + return { + result: fetchResult.feedResponse, + headers: this._getAndResetActiveResponseHeaders(), + }; } - return this.cachedHttpAgent; } - else { - if (disableKeepAlive && !request.tlsSettings) { - // When there are no tlsSettings and keepAlive is false - // we don't need a custom agent - return https__namespace.globalAgent; - } - // We use the tlsSettings to index cached clients - const tlsSettings = (_a = request.tlsSettings) !== null && _a !== void 0 ? _a : DEFAULT_TLS_SETTINGS; - // Get the cached agent or create a new one with the - // provided values for keepAlive and tlsSettings - let agent = this.cachedHttpsAgents.get(tlsSettings); - if (agent && agent.options.keepAlive === !disableKeepAlive) { - return agent; - } - logger.info("No cached TLS Agent exist, creating a new Agent"); - agent = new https__namespace.Agent(Object.assign({ - // keepAlive is true if disableKeepAlive is false. - keepAlive: !disableKeepAlive }, tlsSettings)); - this.cachedHttpsAgents.set(tlsSettings, agent); - return agent; + // If there isn't anymore items left to fetch then let the user know. + if (this.allFetched) { + return { + result: undefined, + headers: this._getAndResetActiveResponseHeaders(), + }; + } + // If there are no more bufferd items and there are still items to be fetched then buffer more + const { result, headers } = await this.bufferMore(diagnosticNode); + mergeHeaders(this.respHeaders, headers); + if (result === undefined) { + return { result: undefined, headers: this.respHeaders }; } + return this.current(diagnosticNode); } } -function getResponseHeaders(res) { - const headers = createHttpHeaders(); - for (const header of Object.keys(res.headers)) { - const value = res.headers[header]; - if (Array.isArray(value)) { - if (value.length > 0) { - headers.set(header, value[0]); - } + +/** @hidden */ +class QueryRange { + /** + * Represents a QueryRange. + * + * @param rangeMin - min + * @param rangeMin - max + * @param isMinInclusive - isMinInclusive + * @param isMaxInclusive - isMaxInclusive + * @hidden + */ + constructor(rangeMin, rangeMax, isMinInclusive, isMaxInclusive) { + this.min = rangeMin; + this.max = rangeMax; + this.isMinInclusive = isMinInclusive; + this.isMaxInclusive = isMaxInclusive; + } + overlaps(other) { + const range1 = this; // eslint-disable-line @typescript-eslint/no-this-alias + const range2 = other; + if (range1 === undefined || range2 === undefined) { + return false; } - else if (value) { - headers.set(header, value); + if (range1.isEmpty() || range2.isEmpty()) { + return false; + } + if (range1.min <= range2.max || range2.min <= range1.max) { + if ((range1.min === range2.max && !(range1.isMinInclusive && range2.isMaxInclusive)) || + (range2.min === range1.max && !(range2.isMinInclusive && range1.isMaxInclusive))) { + return false; + } + return true; } + return false; } - return headers; -} -function getDecodedResponseStream(stream, headers) { - const contentEncoding = headers.get("Content-Encoding"); - if (contentEncoding === "gzip") { - const unzip = zlib__namespace.createGunzip(); - stream.pipe(unzip); - return unzip; + isFullRange() { + return (this.min === Constants$1.EffectivePartitionKeyConstants.MinimumInclusiveEffectivePartitionKey && + this.max === Constants$1.EffectivePartitionKeyConstants.MaximumExclusiveEffectivePartitionKey && + this.isMinInclusive === true && + this.isMaxInclusive === false); } - else if (contentEncoding === "deflate") { - const inflate = zlib__namespace.createInflate(); - stream.pipe(inflate); - return inflate; + isEmpty() { + return !(this.isMinInclusive && this.isMaxInclusive) && this.min === this.max; + } + /** + * Parse a QueryRange from a partitionKeyRange + * @returns QueryRange + * @hidden + */ + static parsePartitionKeyRange(partitionKeyRange) { + return new QueryRange(partitionKeyRange[Constants$1.PartitionKeyRange.MinInclusive], partitionKeyRange[Constants$1.PartitionKeyRange.MaxExclusive], true, false); + } + /** + * Parse a QueryRange from a dictionary + * @returns QueryRange + * @hidden + */ + static parseFromDict(queryRangeDict) { + return new QueryRange(queryRangeDict.min, queryRangeDict.max, queryRangeDict.isMinInclusive, queryRangeDict.isMaxInclusive); } - return stream; } -function streamToText(stream) { - return new Promise((resolve, reject) => { - const buffer = []; - stream.on("data", (chunk) => { - if (Buffer.isBuffer(chunk)) { - buffer.push(chunk); + +/** @hidden */ +class InMemoryCollectionRoutingMap { + /** + * Represents a InMemoryCollectionRoutingMap Object, + * Stores partition key ranges in an efficient way with some additional information and provides + * convenience methods for working with set of ranges. + */ + constructor(orderedPartitionKeyRanges, orderedPartitionInfo) { + this.orderedPartitionKeyRanges = orderedPartitionKeyRanges; + this.orderedRanges = orderedPartitionKeyRanges.map((pkr) => { + return new QueryRange(pkr[Constants$1.PartitionKeyRange.MinInclusive], pkr[Constants$1.PartitionKeyRange.MaxExclusive], true, false); + }); + this.orderedPartitionInfo = orderedPartitionInfo; + } + getOrderedParitionKeyRanges() { + return this.orderedPartitionKeyRanges; + } + getOverlappingRanges(providedQueryRanges) { + // TODO This code has all kinds of smells. Multiple iterations and sorts just to grab overlapping ranges + // stfaul attempted to bring it down to one for-loop and failed + const pqr = Array.isArray(providedQueryRanges) + ? providedQueryRanges + : [providedQueryRanges]; + const minToPartitionRange = {}; // TODO: any + // this for loop doesn't invoke any async callback + for (const queryRange of pqr) { + if (queryRange.isEmpty()) { + continue; } - else { - buffer.push(Buffer.from(chunk)); + if (queryRange.isFullRange()) { + return this.orderedPartitionKeyRanges; } - }); - stream.on("end", () => { - resolve(Buffer.concat(buffer).toString("utf8")); - }); - stream.on("error", (e) => { - if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { - reject(e); + const minIndex = this.orderedRanges.findIndex((range) => { + if (queryRange.min > range.min && queryRange.min < range.max) { + return true; + } + if (queryRange.min === range.min) { + return true; + } + if (queryRange.min === range.max) { + return true; + } + }); + if (minIndex < 0) { + throw new Error("error in collection routing map, queried value is less than the start range."); } - else { - reject(new RestError(`Error reading response as text: ${e.message}`, { - code: RestError.PARSE_ERROR, - })); + // Start at the end and work backwards + let maxIndex; + for (let i = this.orderedRanges.length - 1; i >= 0; i--) { + const range = this.orderedRanges[i]; + if (queryRange.max > range.min && queryRange.max < range.max) { + maxIndex = i; + break; + } + if (queryRange.max === range.min) { + maxIndex = i; + break; + } + if (queryRange.max === range.max) { + maxIndex = i; + break; + } + } + if (maxIndex > this.orderedRanges.length) { + throw new Error("error in collection routing map, queried value is greater than the end range."); + } + for (let j = minIndex; j < maxIndex + 1; j++) { + if (queryRange.overlaps(this.orderedRanges[j])) { + minToPartitionRange[this.orderedPartitionKeyRanges[j][Constants$1.PartitionKeyRange.MinInclusive]] = this.orderedPartitionKeyRanges[j]; + } } + } + const overlappingPartitionKeyRanges = Object.keys(minToPartitionRange).map((k) => minToPartitionRange[k]); + return overlappingPartitionKeyRanges.sort((a, b) => { + return a[Constants$1.PartitionKeyRange.MinInclusive].localeCompare(b[Constants$1.PartitionKeyRange.MinInclusive]); }); - }); -} -/** @internal */ -function getBodyLength(body) { - if (!body) { - return 0; - } - else if (Buffer.isBuffer(body)) { - return body.length; - } - else if (isReadableStream(body)) { - return null; - } - else if (isArrayBuffer(body)) { - return body.byteLength; - } - else if (typeof body === "string") { - return Buffer.from(body).length; - } - else { - return null; } } -/** - * Create a new HttpClient instance for the NodeJS environment. - * @internal - */ -function createNodeHttpClient() { - return new NodeHttpClient(); -} // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * Create the correct HttpClient for the current environment. + * @hidden */ -function createDefaultHttpClient() { - return createNodeHttpClient(); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -class PipelineRequestImpl { - constructor(options) { - var _a, _b, _c, _d, _e, _f, _g; - this.url = options.url; - this.body = options.body; - this.headers = (_a = options.headers) !== null && _a !== void 0 ? _a : createHttpHeaders(); - this.method = (_b = options.method) !== null && _b !== void 0 ? _b : "GET"; - this.timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : 0; - this.multipartBody = options.multipartBody; - this.formData = options.formData; - this.disableKeepAlive = (_d = options.disableKeepAlive) !== null && _d !== void 0 ? _d : false; - this.proxySettings = options.proxySettings; - this.streamResponseStatusCodes = options.streamResponseStatusCodes; - this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false; - this.abortSignal = options.abortSignal; - this.tracingOptions = options.tracingOptions; - this.onUploadProgress = options.onUploadProgress; - this.onDownloadProgress = options.onDownloadProgress; - this.requestId = options.requestId || coreUtil.randomUUID(); - this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false; - this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false; +function compareRanges(a, b) { + const aVal = a[0][Constants$1.PartitionKeyRange.MinInclusive]; + const bVal = b[0][Constants$1.PartitionKeyRange.MinInclusive]; + if (aVal > bVal) { + return 1; } + if (aVal < bVal) { + return -1; + } + return 0; } -/** - * Creates a new pipeline request with the given options. - * This method is to allow for the easy setting of default values and not required. - * @param options - The options to create the request with. - */ -function createPipelineRequest(options) { - return new PipelineRequestImpl(options); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The programmatic identifier of the exponentialRetryPolicy. - */ -const exponentialRetryPolicyName = "exponentialRetryPolicy"; -/** - * A policy that attempts to retry requests while introducing an exponentially increasing delay. - * @param options - Options that configure retry logic. - */ -function exponentialRetryPolicy(options = {}) { - var _a; - return retryPolicy([ - exponentialRetryStrategy(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true })), - ], { - maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, - }); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Name of the {@link systemErrorRetryPolicy} - */ -const systemErrorRetryPolicyName = "systemErrorRetryPolicy"; -/** - * A retry policy that specifically seeks to handle errors in the - * underlying transport layer (e.g. DNS lookup failures) rather than - * retryable error codes from the server itself. - * @param options - Options that customize the policy. - */ -function systemErrorRetryPolicy(options = {}) { - var _a; - return { - name: systemErrorRetryPolicyName, - sendRequest: retryPolicy([ - exponentialRetryStrategy(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true })), - ], { - maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, - }).sendRequest, - }; +/** @hidden */ +function createCompleteRoutingMap(partitionKeyRangeInfoTuppleList) { + const rangeById = {}; // TODO: any + const rangeByInfo = {}; // TODO: any + let sortedRanges = []; + // the for loop doesn't invoke any async callback + for (const r of partitionKeyRangeInfoTuppleList) { + rangeById[r[0][Constants$1.PartitionKeyRange.Id]] = r; + rangeByInfo[r[1]] = r[0]; + sortedRanges.push(r); + } + sortedRanges = sortedRanges.sort(compareRanges); + const partitionKeyOrderedRange = sortedRanges.map((r) => r[0]); + const orderedPartitionInfo = sortedRanges.map((r) => r[1]); + if (!isCompleteSetOfRange(partitionKeyOrderedRange)) { + return undefined; + } + return new InMemoryCollectionRoutingMap(partitionKeyOrderedRange, orderedPartitionInfo); } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Name of the {@link throttlingRetryPolicy} - */ -const throttlingRetryPolicyName = "throttlingRetryPolicy"; /** - * A policy that retries when the server sends a 429 response with a Retry-After header. - * - * To learn more, please refer to - * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, - * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and - * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors - * - * @param options - Options that configure retry logic. + * @hidden */ -function throttlingRetryPolicy(options = {}) { - var _a; - return { - name: throttlingRetryPolicyName, - sendRequest: retryPolicy([throttlingRetryStrategy()], { - maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, - }).sendRequest, - }; +function isCompleteSetOfRange(partitionKeyOrderedRange) { + // TODO: any + let isComplete = false; + if (partitionKeyOrderedRange.length > 0) { + const firstRange = partitionKeyOrderedRange[0]; + const lastRange = partitionKeyOrderedRange[partitionKeyOrderedRange.length - 1]; + isComplete = + firstRange[Constants$1.PartitionKeyRange.MinInclusive] === + Constants$1.EffectivePartitionKeyConstants.MinimumInclusiveEffectivePartitionKey; + isComplete = + isComplete && + lastRange[Constants$1.PartitionKeyRange.MaxExclusive] === + Constants$1.EffectivePartitionKeyConstants.MaximumExclusiveEffectivePartitionKey; + for (let i = 1; i < partitionKeyOrderedRange.length; i++) { + const previousRange = partitionKeyOrderedRange[i - 1]; + const currentRange = partitionKeyOrderedRange[i]; + isComplete = + isComplete && + previousRange[Constants$1.PartitionKeyRange.MaxExclusive] === + currentRange[Constants$1.PartitionKeyRange.MinInclusive]; + if (!isComplete) { + if (previousRange[Constants$1.PartitionKeyRange.MaxExclusive] > + currentRange[Constants$1.PartitionKeyRange.MinInclusive]) { + throw Error("Ranges overlap"); + } + break; + } + } + } + return isComplete; } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry -}; -/** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null. - * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts. - * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception. - * @returns - A promise that, if it resolves, will resolve with an access token. - */ -async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < refreshTimeout) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; +/** @hidden */ +class PartitionKeyRangeCache { + constructor(clientContext) { + this.clientContext = clientContext; + this.collectionRoutingMapByCollectionId = {}; + } + /** + * Finds or Instantiates the requested Collection Routing Map + * @param collectionLink - Requested collectionLink + * @hidden + */ + async onCollectionRoutingMap(collectionLink, diagnosticNode, forceRefresh = false) { + const collectionId = getIdFromLink(collectionLink); + if (this.collectionRoutingMapByCollectionId[collectionId] === undefined || forceRefresh) { + this.collectionRoutingMapByCollectionId[collectionId] = this.requestCollectionRoutingMap(collectionLink, diagnosticNode); } + return this.collectionRoutingMapByCollectionId[collectionId]; } - let token = await tryGetAccessToken(); - while (token === null) { - await delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - let tenantId; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. - */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); - }, - }; /** - * Starts a refresh job or returns the existing job if one is already - * running. + * Given the query ranges and a collection, invokes the callback on the list of overlapping partition key ranges + * @hidden */ - function refresh(scopes, getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - tenantId = getTokenOptions.tenantId; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - tenantId = undefined; - throw reason; - }); - } - return refreshWorker; + async getOverlappingRanges(collectionLink, queryRange, diagnosticNode, forceRefresh = false) { + const crm = await this.onCollectionRoutingMap(collectionLink, diagnosticNode, forceRefresh); + return crm.getOverlappingRanges(queryRange); } - return async (scopes, tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - // If the tenantId passed in token options is different to the one we have - // Or if we are in claim challenge and the token was rejected and a new access token need to be issued, we need to - // refresh the token with the new tenantId or token. - const mustRefresh = tenantId !== tokenOptions.tenantId || Boolean(tokenOptions.claims) || cycler.mustRefresh; - if (mustRefresh) - return refresh(scopes, tokenOptions); - if (cycler.shouldRefresh) { - refresh(scopes, tokenOptions); - } - return token; - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The programmatic identifier of the bearerTokenAuthenticationPolicy. - */ -const bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; -/** - * Default authorize request handler - */ -async function defaultAuthorizeRequest(options) { - const { scopes, getAccessToken, request } = options; - const getTokenOptions = { - abortSignal: request.abortSignal, - tracingOptions: request.tracingOptions, - }; - const accessToken = await getAccessToken(scopes, getTokenOptions); - if (accessToken) { - options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + async requestCollectionRoutingMap(collectionLink, diagnosticNode) { + const { resources } = await withMetadataDiagnostics(async (metadataDiagnostics) => { + return this.clientContext + .queryPartitionKeyRanges(collectionLink) + .fetchAllInternal(metadataDiagnostics); + }, diagnosticNode, exports.MetadataLookUpType.PartitionKeyRangeLookUp); + return createCompleteRoutingMap(resources.map((r) => [r, true])); } } -/** - * We will retrieve the challenge only if the response status code was 401, - * and if the response contained the header "WWW-Authenticate" with a non-empty value. - */ -function getChallenge(response) { - const challenge = response.headers.get("WWW-Authenticate"); - if (response.status === 401 && challenge) { - return challenge; + +/** @hidden */ +const PARITIONKEYRANGE = Constants$1.PartitionKeyRange; +/** @hidden */ +class SmartRoutingMapProvider { + constructor(clientContext) { + this.partitionKeyRangeCache = new PartitionKeyRangeCache(clientContext); } - return; -} -/** - * A policy that can request a token from a TokenCredential implementation and - * then apply it to the Authorization header of a request as a Bearer token. - */ -function bearerTokenAuthenticationPolicy(options) { - var _a; - const { credential, scopes, challengeCallbacks } = options; - const logger$1 = options.logger || logger; - const callbacks = Object.assign({ authorizeRequest: (_a = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) !== null && _a !== void 0 ? _a : defaultAuthorizeRequest, authorizeRequestOnChallenge: challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge }, challengeCallbacks); - // This function encapsulates the entire process of reliably retrieving the token - // The options are left out of the public API until there's demand to configure this. - // Remember to extend `BearerTokenAuthenticationPolicyOptions` with `TokenCyclerOptions` - // in order to pass through the `options` object. - const getAccessToken = credential - ? createTokenCycler(credential /* , options */) - : () => Promise.resolve(null); - return { - name: bearerTokenAuthenticationPolicyName, - /** - * If there's no challenge parameter: - * - It will try to retrieve the token using the cache, or the credential's getToken. - * - Then it will try the next policy with or without the retrieved token. - * - * It uses the challenge parameters to: - * - Skip a first attempt to get the token from the credential if there's no cached token, - * since it expects the token to be retrievable only after the challenge. - * - Prepare the outgoing request if the `prepareRequest` method has been provided. - * - Send an initial request to receive the challenge if it fails. - * - Process a challenge if the response contains it. - * - Retrieve a token with the challenge information, then re-send the request. - */ - async sendRequest(request, next) { - if (!request.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - await callbacks.authorizeRequest({ - scopes: Array.isArray(scopes) ? scopes : [scopes], - request, - getAccessToken, - logger: logger$1, - }); - let response; - let error; - try { - response = await next(request); + static _secondRangeIsAfterFirstRange(range1, range2) { + if (typeof range1.max === "undefined") { + throw new Error("range1 must have max"); + } + if (typeof range2.min === "undefined") { + throw new Error("range2 must have min"); + } + if (range1.max > range2.min) { + // r.min < #previous_r.max + return false; + } + else { + if (range1.max === range2.min && range1.isMaxInclusive && range2.isMinInclusive) { + // the inclusive ending endpoint of previous_r is the same as the inclusive beginning endpoint of r + // they share a point + return false; } - catch (err) { - error = err; - response = err.response; + return true; + } + } + static _isSortedAndNonOverlapping(ranges) { + for (let idx = 1; idx < ranges.length; idx++) { + const previousR = ranges[idx - 1]; + const r = ranges[idx]; + if (!this._secondRangeIsAfterFirstRange(previousR, r)) { + return false; } - if (callbacks.authorizeRequestOnChallenge && - (response === null || response === void 0 ? void 0 : response.status) === 401 && - getChallenge(response)) { - // processes challenge - const shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ - scopes: Array.isArray(scopes) ? scopes : [scopes], - request, - response, - getAccessToken, - logger: logger$1, - }); - if (shouldSendRequest) { - return next(request); + } + return true; + } + static _stringMax(a, b) { + return a >= b ? a : b; + } + static _stringCompare(a, b) { + return a === b ? 0 : a > b ? 1 : -1; + } + static _subtractRange(r, partitionKeyRange) { + const left = this._stringMax(partitionKeyRange[PARITIONKEYRANGE.MaxExclusive], r.min); + const leftInclusive = this._stringCompare(left, r.min) === 0 ? r.isMinInclusive : false; + return new QueryRange(left, r.max, leftInclusive, r.isMaxInclusive); + } + /** + * Given the sorted ranges and a collection, invokes the callback on the list of overlapping partition key ranges + * @param callback - Function execute on the overlapping partition key ranges result, + * takes two parameters error, partition key ranges + * @hidden + */ + async getOverlappingRanges(collectionLink, sortedRanges, diagnosticNode) { + // validate if the list is non- overlapping and sorted TODO: any PartitionKeyRanges + if (!SmartRoutingMapProvider._isSortedAndNonOverlapping(sortedRanges)) { + throw new Error("the list of ranges is not a non-overlapping sorted ranges"); + } + let partitionKeyRanges = []; // TODO: any ParitionKeyRanges + if (sortedRanges.length === 0) { + return partitionKeyRanges; + } + const collectionRoutingMap = await this.partitionKeyRangeCache.onCollectionRoutingMap(collectionLink, diagnosticNode); + let index = 0; + let currentProvidedRange = sortedRanges[index]; + for (;;) { + if (currentProvidedRange.isEmpty()) { + // skip and go to the next item + if (++index >= sortedRanges.length) { + return partitionKeyRanges; } + currentProvidedRange = sortedRanges[index]; + continue; } - if (error) { - throw error; + let queryRange; + if (partitionKeyRanges.length > 0) { + queryRange = SmartRoutingMapProvider._subtractRange(currentProvidedRange, partitionKeyRanges[partitionKeyRanges.length - 1]); } else { - return response; + queryRange = currentProvidedRange; } - }, - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The programmatic identifier of the ndJsonPolicy. - */ -const ndJsonPolicyName = "ndJsonPolicy"; -/** - * ndJsonPolicy is a policy used to control keep alive settings for every request. - */ -function ndJsonPolicy() { - return { - name: ndJsonPolicyName, - async sendRequest(request, next) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + const overlappingRanges = collectionRoutingMap.getOverlappingRanges(queryRange); + if (overlappingRanges.length <= 0) { + throw new Error(`error: returned overlapping ranges for queryRange ${queryRange} is empty`); + } + partitionKeyRanges = partitionKeyRanges.concat(overlappingRanges); + const lastKnownTargetRange = QueryRange.parsePartitionKeyRange(partitionKeyRanges[partitionKeyRanges.length - 1]); + if (!lastKnownTargetRange) { + throw new Error("expected lastKnowTargetRange to be truthy"); + } + // the overlapping ranges must contain the requested range + if (SmartRoutingMapProvider._stringCompare(currentProvidedRange.max, lastKnownTargetRange.max) > + 0) { + throw new Error(`error: returned overlapping ranges ${overlappingRanges} \ + does not contain the requested range ${queryRange}`); + } + // the current range is contained in partitionKeyRanges just move forward + if (++index >= sortedRanges.length) { + return partitionKeyRanges; + } + currentProvidedRange = sortedRanges[index]; + while (SmartRoutingMapProvider._stringCompare(currentProvidedRange.max, lastKnownTargetRange.max) <= 0) { + // the current range is covered too.just move forward + if (++index >= sortedRanges.length) { + return partitionKeyRanges; } + currentProvidedRange = sortedRanges[index]; } - return next(request); - }, - }; + } + } } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy. - */ -const auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; -const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; -async function sendAuthorizeRequest(options) { - var _a, _b; - const { scopes, getAccessToken, request } = options; - const getTokenOptions = { - abortSignal: request.abortSignal, - tracingOptions: request.tracingOptions, - }; - return (_b = (_a = (await getAccessToken(scopes, getTokenOptions))) === null || _a === void 0 ? void 0 : _a.token) !== null && _b !== void 0 ? _b : ""; -} -/** - * A policy for external tokens to `x-ms-authorization-auxiliary` header. - * This header will be used when creating a cross-tenant application we may need to handle authentication requests - * for resources that are in different tenants. - * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works - */ -function auxiliaryAuthenticationHeaderPolicy(options) { - const { credentials, scopes } = options; - const logger$1 = options.logger || logger; - const tokenCyclerMap = new WeakMap(); - return { - name: auxiliaryAuthenticationHeaderPolicyName, - async sendRequest(request, next) { - if (!request.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); +/** @hidden */ +const logger$2 = logger$5.createClientLogger("parallelQueryExecutionContextBase"); +/** @hidden */ +var ParallelQueryExecutionContextBaseStates; +(function (ParallelQueryExecutionContextBaseStates) { + ParallelQueryExecutionContextBaseStates["started"] = "started"; + ParallelQueryExecutionContextBaseStates["inProgress"] = "inProgress"; + ParallelQueryExecutionContextBaseStates["ended"] = "ended"; +})(ParallelQueryExecutionContextBaseStates || (ParallelQueryExecutionContextBaseStates = {})); +/** @hidden */ +class ParallelQueryExecutionContextBase { + /** + * Provides the ParallelQueryExecutionContextBase. + * This is the base class that ParallelQueryExecutionContext and OrderByQueryExecutionContext will derive from. + * + * When handling a parallelized query, it instantiates one instance of + * DocumentProcuder per target partition key range and aggregates the result of each. + * + * @param clientContext - The service endpoint to use to create the client. + * @param collectionLink - The Collection Link + * @param options - Represents the feed options. + * @param partitionedQueryExecutionInfo - PartitionedQueryExecutionInfo + * @hidden + */ + constructor(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo) { + this.clientContext = clientContext; + this.collectionLink = collectionLink; + this.query = query; + this.options = options; + this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo; + this.clientContext = clientContext; + this.collectionLink = collectionLink; + this.query = query; + this.options = options; + this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo; + this.diagnosticNodeWrapper = { + consumed: false, + diagnosticNode: new DiagnosticNodeInternal(clientContext.diagnosticLevel, exports.DiagnosticNodeType.PARALLEL_QUERY_NODE, null), + }; + this.diagnosticNodeWrapper.diagnosticNode.addData({ stateful: true }); + this.err = undefined; + this.state = ParallelQueryExecutionContextBase.STATES.started; + this.routingProvider = new SmartRoutingMapProvider(this.clientContext); + this.sortOrders = this.partitionedQueryExecutionInfo.queryInfo.orderBy; + this.requestContinuation = options ? options.continuationToken || options.continuation : null; + // response headers of undergoing operation + this.respHeaders = getInitialHeader(); + // Make priority queue for documentProducers + // The comparator is supplied by the derived class + this.orderByPQ = new PriorityQueue__default["default"]((a, b) => this.documentProducerComparator(b, a)); + // Creating the documentProducers + this.sem = semaphore__default["default"](1); + // Creating callback for semaphore + // TODO: Code smell + const createDocumentProducersAndFillUpPriorityQueueFunc = async () => { + // ensure the lock is released after finishing up + try { + const targetPartitionRanges = await this._onTargetPartitionRanges(); + this.waitingForInternalExecutionContexts = targetPartitionRanges.length; + const maxDegreeOfParallelism = options.maxDegreeOfParallelism === undefined || options.maxDegreeOfParallelism < 1 + ? targetPartitionRanges.length + : Math.min(options.maxDegreeOfParallelism, targetPartitionRanges.length); + logger$2.info("Query starting against " + + targetPartitionRanges.length + + " ranges with parallelism of " + + maxDegreeOfParallelism); + const parallelismSem = semaphore__default["default"](maxDegreeOfParallelism); + let filteredPartitionKeyRanges = []; + // The document producers generated from filteredPartitionKeyRanges + const targetPartitionQueryExecutionContextList = []; + if (this.requestContinuation) { + throw new Error("Continuation tokens are not yet supported for cross partition queries"); + } + else { + filteredPartitionKeyRanges = targetPartitionRanges; + } + // Create one documentProducer for each partitionTargetRange + filteredPartitionKeyRanges.forEach((partitionTargetRange) => { + // TODO: any partitionTargetRange + // no async callback + targetPartitionQueryExecutionContextList.push(this._createTargetPartitionQueryExecutionContext(partitionTargetRange)); + }); + // Fill up our priority queue with documentProducers + targetPartitionQueryExecutionContextList.forEach((documentProducer) => { + // has async callback + const throttledFunc = async () => { + try { + const { result: document, headers } = await documentProducer.current(this.getDiagnosticNode()); + this._mergeWithActiveResponseHeaders(headers); + if (document === undefined) { + // no results on this one + return; + } + // if there are matching results in the target ex range add it to the priority queue + try { + this.orderByPQ.enq(documentProducer); + } + catch (e) { + this.err = e; + } + } + catch (err) { + this._mergeWithActiveResponseHeaders(err.headers); + this.err = err; + } + finally { + parallelismSem.leave(); + this._decrementInitiationLock(); + } + }; + parallelismSem.take(throttledFunc); + }); } - if (!credentials || credentials.length === 0) { - logger$1.info(`${auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); - return next(request); + catch (err) { + this.err = err; + // release the lock + this.sem.leave(); + return; } - const tokenPromises = []; - for (const credential of credentials) { - let getAccessToken = tokenCyclerMap.get(credential); - if (!getAccessToken) { - getAccessToken = createTokenCycler(credential); - tokenCyclerMap.set(credential, getAccessToken); + }; + this.sem.take(createDocumentProducersAndFillUpPriorityQueueFunc); + } + _decrementInitiationLock() { + // decrements waitingForInternalExecutionContexts + // if waitingForInternalExecutionContexts reaches 0 releases the semaphore and changes the state + this.waitingForInternalExecutionContexts = this.waitingForInternalExecutionContexts - 1; + if (this.waitingForInternalExecutionContexts === 0) { + this.sem.leave(); + if (this.orderByPQ.size() === 0) { + this.state = ParallelQueryExecutionContextBase.STATES.inProgress; + } + } + } + _mergeWithActiveResponseHeaders(headers) { + mergeHeaders(this.respHeaders, headers); + } + _getAndResetActiveResponseHeaders() { + const ret = this.respHeaders; + this.respHeaders = getInitialHeader(); + return ret; + } + getDiagnosticNode() { + return this.diagnosticNodeWrapper.diagnosticNode; + } + async _onTargetPartitionRanges() { + // invokes the callback when the target partition ranges are ready + const parsedRanges = this.partitionedQueryExecutionInfo.queryRanges; + const queryRanges = parsedRanges.map((item) => QueryRange.parseFromDict(item)); + return this.routingProvider.getOverlappingRanges(this.collectionLink, queryRanges, this.getDiagnosticNode()); + } + /** + * Gets the replacement ranges for a partitionkeyrange that has been split + */ + async _getReplacementPartitionKeyRanges(documentProducer) { + const partitionKeyRange = documentProducer.targetPartitionKeyRange; + // Download the new routing map + this.routingProvider = new SmartRoutingMapProvider(this.clientContext); + // Get the queryRange that relates to this partitionKeyRange + const queryRange = QueryRange.parsePartitionKeyRange(partitionKeyRange); + return this.routingProvider.getOverlappingRanges(this.collectionLink, [queryRange], this.getDiagnosticNode()); + } + // TODO: P0 Code smell - can barely tell what this is doing + /** + * Removes the current document producer from the priqueue, + * replaces that document producer with child document producers, + * then reexecutes the originFunction with the corrrected executionContext + */ + async _repairExecutionContext(diagnosticNode, originFunction) { + // TODO: any + // Get the replacement ranges + // Removing the invalid documentProducer from the orderByPQ + const parentDocumentProducer = this.orderByPQ.deq(); + try { + const replacementPartitionKeyRanges = await this._getReplacementPartitionKeyRanges(parentDocumentProducer); + const replacementDocumentProducers = []; + // Create the replacement documentProducers + replacementPartitionKeyRanges.forEach((partitionKeyRange) => { + // Create replacment document producers with the parent's continuationToken + const replacementDocumentProducer = this._createTargetPartitionQueryExecutionContext(partitionKeyRange, parentDocumentProducer.continuationToken); + replacementDocumentProducers.push(replacementDocumentProducer); + }); + // We need to check if the documentProducers even has anything left to fetch from before enqueing them + const checkAndEnqueueDocumentProducer = async (documentProducerToCheck, checkNextDocumentProducerCallback) => { + try { + const { result: afterItem } = await documentProducerToCheck.current(diagnosticNode); + if (afterItem === undefined) { + // no more results left in this document producer, so we don't enqueue it + } + else { + // Safe to put document producer back in the queue + this.orderByPQ.enq(documentProducerToCheck); + } + await checkNextDocumentProducerCallback(); } - tokenPromises.push(sendAuthorizeRequest({ - scopes: Array.isArray(scopes) ? scopes : [scopes], - request, - getAccessToken, - logger: logger$1, - })); + catch (err) { + this.err = err; + return; + } + }; + const checkAndEnqueueDocumentProducers = async (rdp) => { + if (rdp.length > 0) { + // We still have a replacementDocumentProducer to check + const replacementDocumentProducer = rdp.shift(); + await checkAndEnqueueDocumentProducer(replacementDocumentProducer, async () => { + await checkAndEnqueueDocumentProducers(rdp); + }); + } + else { + // reexecutes the originFunction with the corrrected executionContext + return originFunction(); + } + }; + // Invoke the recursive function to get the ball rolling + await checkAndEnqueueDocumentProducers(replacementDocumentProducers); + } + catch (err) { + this.err = err; + throw err; + } + } + static _needPartitionKeyRangeCacheRefresh(error) { + // TODO: any error + return (error.code === StatusCodes.Gone && + "substatus" in error && + error["substatus"] === SubStatusCodes.PartitionKeyRangeGone); + } + /** + * Checks to see if the executionContext needs to be repaired. + * if so it repairs the execution context and executes the ifCallback, + * else it continues with the current execution context and executes the elseCallback + */ + async _repairExecutionContextIfNeeded(diagnosticNode, ifCallback, elseCallback) { + const documentProducer = this.orderByPQ.peek(); + // Check if split happened + try { + await documentProducer.current(diagnosticNode); + elseCallback(); + } + catch (err) { + if (ParallelQueryExecutionContextBase._needPartitionKeyRangeCacheRefresh(err)) { + // Split has happened so we need to repair execution context before continueing + return addDignosticChild((childNode) => this._repairExecutionContext(childNode, ifCallback), diagnosticNode, exports.DiagnosticNodeType.QUERY_REPAIR_NODE); } - const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); - if (auxiliaryTokens.length === 0) { - logger$1.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); - return next(request); + else { + // Something actually bad happened ... + this.err = err; + throw err; } - request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); - return next(request); - }, - }; + } + } + /** + * Fetches the next element in the ParallelQueryExecutionContextBase. + */ + async nextItem(diagnosticNode) { + if (this.err) { + // if there is a prior error return error + throw this.err; + } + return new Promise((resolve, reject) => { + this.sem.take(() => { + if (!this.diagnosticNodeWrapper.consumed) { + diagnosticNode.addChildNode(this.diagnosticNodeWrapper.diagnosticNode, exports.CosmosDbDiagnosticLevel.debug, exports.MetadataLookUpType.QueryPlanLookUp); + this.diagnosticNodeWrapper.diagnosticNode = undefined; + this.diagnosticNodeWrapper.consumed = true; + } + else { + this.diagnosticNodeWrapper.diagnosticNode = diagnosticNode; + } + // NOTE: lock must be released before invoking quitting + if (this.err) { + // release the lock before invoking callback + this.sem.leave(); + // if there is a prior error return error + this.err.headers = this._getAndResetActiveResponseHeaders(); + reject(this.err); + return; + } + if (this.orderByPQ.size() === 0) { + // there is no more results + this.state = ParallelQueryExecutionContextBase.STATES.ended; + // release the lock before invoking callback + this.sem.leave(); + return resolve({ + result: undefined, + headers: this._getAndResetActiveResponseHeaders(), + }); + } + const ifCallback = () => { + // Release the semaphore to avoid deadlock + this.sem.leave(); + // Reexcute the function + return resolve(this.nextItem(diagnosticNode)); + }; + const elseCallback = async () => { + let documentProducer; + try { + documentProducer = this.orderByPQ.deq(); + } + catch (e) { + // if comparing elements of the priority queue throws exception + // set that error and return error + this.err = e; + // release the lock before invoking callback + this.sem.leave(); + this.err.headers = this._getAndResetActiveResponseHeaders(); + reject(this.err); + return; + } + let item; + let headers; + try { + const response = await documentProducer.nextItem(diagnosticNode); + item = response.result; + headers = response.headers; + this._mergeWithActiveResponseHeaders(headers); + if (item === undefined) { + // this should never happen + // because the documentProducer already has buffered an item + // assert item !== undefined + this.err = new Error(`Extracted DocumentProducer from the priority queue \ + doesn't have any buffered item!`); + // release the lock before invoking callback + this.sem.leave(); + return resolve({ + result: undefined, + headers: this._getAndResetActiveResponseHeaders(), + }); + } + } + catch (err) { + this.err = new Error(`Extracted DocumentProducer from the priority queue fails to get the \ + buffered item. Due to ${JSON.stringify(err)}`); + this.err.headers = this._getAndResetActiveResponseHeaders(); + // release the lock before invoking callback + this.sem.leave(); + reject(this.err); + return; + } + // we need to put back the document producer to the queue if it has more elements. + // the lock will be released after we know document producer must be put back in the queue or not + try { + const { result: afterItem, headers: otherHeaders } = await documentProducer.current(diagnosticNode); + this._mergeWithActiveResponseHeaders(otherHeaders); + if (afterItem === undefined) { + // no more results is left in this document producer + } + else { + try { + const headItem = documentProducer.fetchResults[0]; + if (typeof headItem === "undefined") { + throw new Error("Extracted DocumentProducer from PQ is invalid state with no result!"); + } + this.orderByPQ.enq(documentProducer); + } + catch (e) { + // if comparing elements in priority queue throws exception + // set error + this.err = e; + } + } + } + catch (err) { + if (ParallelQueryExecutionContextBase._needPartitionKeyRangeCacheRefresh(err)) { + // We want the document producer enqueued + // So that later parts of the code can repair the execution context + this.orderByPQ.enq(documentProducer); + } + else { + // Something actually bad happened + this.err = err; + reject(this.err); + } + } + finally { + // release the lock before returning + this.sem.leave(); + } + // invoke the callback on the item + return resolve({ + result: item, + headers: this._getAndResetActiveResponseHeaders(), + }); + }; + this._repairExecutionContextIfNeeded(diagnosticNode, ifCallback, elseCallback).catch(reject); + }); + }); + } + /** + * Determine if there are still remaining resources to processs based on the value of the continuation + * token or the elements remaining on the current batch in the QueryIterator. + * @returns true if there is other elements to process in the ParallelQueryExecutionContextBase. + */ + hasMoreResults() { + return !(this.state === ParallelQueryExecutionContextBase.STATES.ended || this.err !== undefined); + } + /** + * Creates document producers + */ + _createTargetPartitionQueryExecutionContext(partitionKeyTargetRange, continuationToken) { + // TODO: any + // creates target partition range Query Execution Context + let rewrittenQuery = this.partitionedQueryExecutionInfo.queryInfo.rewrittenQuery; + let sqlQuerySpec; + const query = this.query; + if (typeof query === "string") { + sqlQuerySpec = { query }; + } + else { + sqlQuerySpec = query; + } + const formatPlaceHolder = "{documentdb-formattableorderbyquery-filter}"; + if (rewrittenQuery) { + sqlQuerySpec = JSON.parse(JSON.stringify(sqlQuerySpec)); + // We hardcode the formattable filter to true for now + rewrittenQuery = rewrittenQuery.replace(formatPlaceHolder, "true"); + sqlQuerySpec["query"] = rewrittenQuery; + } + const options = Object.assign({}, this.options); + options.continuationToken = continuationToken; + return new DocumentProducer(this.clientContext, this.collectionLink, sqlQuerySpec, partitionKeyTargetRange, options); + } } +ParallelQueryExecutionContextBase.STATES = ParallelQueryExecutionContextBaseStates; // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const unimplementedMethods = { - arrayBuffer: () => { - throw new Error("Not implemented"); - }, - slice: () => { - throw new Error("Not implemented"); - }, - text: () => { - throw new Error("Not implemented"); - }, -}; /** - * Create an object that implements the File interface. This object is intended to be - * passed into RequestBodyType.formData, and is not guaranteed to work as expected in - * other situations. - * - * Use this function to: - * - Create a File object for use in RequestBodyType.formData in environments where the - * global File object is unavailable. - * - Create a File-like object from a readable stream without reading the stream into memory. - * - * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is - * passed in a request's form data map, the stream will not be read into memory - * and instead will be streamed when the request is made. In the event of a retry, the - * stream needs to be read again, so this callback SHOULD return a fresh stream if possible. - * @param name - the name of the file. - * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + * Provides the ParallelQueryExecutionContext. + * This class is capable of handling parallelized queries and derives from ParallelQueryExecutionContextBase. + * @hidden */ -function createFileFromStream(stream, name, options = {}) { - var _a, _b, _c, _d; - return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => toWebStream(stream()) }); +class ParallelQueryExecutionContext extends ParallelQueryExecutionContextBase { + // Instance members are inherited + // Overriding documentProducerComparator for ParallelQueryExecutionContexts + /** + * Provides a Comparator for document producers using the min value of the corresponding target partition. + * @returns Comparator Function + * @hidden + */ + documentProducerComparator(docProd1, docProd2) { + return docProd1.generation - docProd2.generation; + } } -/** - * Create an object that implements the File interface. This object is intended to be - * passed into RequestBodyType.formData, and is not guaranteed to work as expected in - * other situations. - * - * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable. - * - * @param content - the content of the file as a Uint8Array in memory. - * @param name - the name of the file. - * @param options - optional metadata about the file, e.g. file name, file size, MIME type. - */ -function createFile(content, name, options = {}) { - var _a, _b, _c; - return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream() }); + +/** @hidden */ +class OrderByQueryExecutionContext extends ParallelQueryExecutionContextBase { + /** + * Provides the OrderByQueryExecutionContext. + * This class is capable of handling orderby queries and dervives from ParallelQueryExecutionContextBase. + * + * When handling a parallelized query, it instantiates one instance of + * DocumentProcuder per target partition key range and aggregates the result of each. + * + * @param clientContext - The service endpoint to use to create the client. + * @param collectionLink - The Collection Link + * @param options - Represents the feed options. + * @param partitionedQueryExecutionInfo - PartitionedQueryExecutionInfo + * @hidden + */ + constructor(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo) { + // Calling on base class constructor + super(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo); + this.orderByComparator = new OrderByDocumentProducerComparator(this.sortOrders); + } + // Instance members are inherited + // Overriding documentProducerComparator for OrderByQueryExecutionContexts + /** + * Provides a Comparator for document producers which respects orderby sort order. + * @returns Comparator Function + * @hidden + */ + documentProducerComparator(docProd1, docProd2) { + return this.orderByComparator.compare(docProd1, docProd2); + } } -exports.RestError = RestError; -exports.auxiliaryAuthenticationHeaderPolicy = auxiliaryAuthenticationHeaderPolicy; -exports.auxiliaryAuthenticationHeaderPolicyName = auxiliaryAuthenticationHeaderPolicyName; -exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; -exports.bearerTokenAuthenticationPolicyName = bearerTokenAuthenticationPolicyName; -exports.createDefaultHttpClient = createDefaultHttpClient; -exports.createEmptyPipeline = createEmptyPipeline; -exports.createFile = createFile; -exports.createFileFromStream = createFileFromStream; -exports.createHttpHeaders = createHttpHeaders; -exports.createPipelineFromOptions = createPipelineFromOptions; -exports.createPipelineRequest = createPipelineRequest; -exports.decompressResponsePolicy = decompressResponsePolicy; -exports.decompressResponsePolicyName = decompressResponsePolicyName; -exports.defaultRetryPolicy = defaultRetryPolicy; -exports.exponentialRetryPolicy = exponentialRetryPolicy; -exports.exponentialRetryPolicyName = exponentialRetryPolicyName; -exports.formDataPolicy = formDataPolicy; -exports.formDataPolicyName = formDataPolicyName; -exports.getDefaultProxySettings = getDefaultProxySettings; -exports.isRestError = isRestError; -exports.logPolicy = logPolicy; -exports.logPolicyName = logPolicyName; -exports.multipartPolicy = multipartPolicy; -exports.multipartPolicyName = multipartPolicyName; -exports.ndJsonPolicy = ndJsonPolicy; -exports.ndJsonPolicyName = ndJsonPolicyName; -exports.proxyPolicy = proxyPolicy; -exports.proxyPolicyName = proxyPolicyName; -exports.redirectPolicy = redirectPolicy; -exports.redirectPolicyName = redirectPolicyName; -exports.retryPolicy = retryPolicy; -exports.setClientRequestIdPolicy = setClientRequestIdPolicy; -exports.setClientRequestIdPolicyName = setClientRequestIdPolicyName; -exports.systemErrorRetryPolicy = systemErrorRetryPolicy; -exports.systemErrorRetryPolicyName = systemErrorRetryPolicyName; -exports.throttlingRetryPolicy = throttlingRetryPolicy; -exports.throttlingRetryPolicyName = throttlingRetryPolicyName; -exports.tlsPolicy = tlsPolicy; -exports.tlsPolicyName = tlsPolicyName; -exports.tracingPolicy = tracingPolicy; -exports.tracingPolicyName = tracingPolicyName; -exports.userAgentPolicy = userAgentPolicy; -exports.userAgentPolicyName = userAgentPolicyName; -//# sourceMappingURL=index.js.map +/** @hidden */ +class OffsetLimitEndpointComponent { + constructor(executionContext, offset, limit) { + this.executionContext = executionContext; + this.offset = offset; + this.limit = limit; + } + async nextItem(diagnosticNode) { + const aggregateHeaders = getInitialHeader(); + while (this.offset > 0) { + // Grab next item but ignore the result. We only need the headers + const { headers } = await this.executionContext.nextItem(diagnosticNode); + this.offset--; + mergeHeaders(aggregateHeaders, headers); + } + if (this.limit > 0) { + const { result, headers } = await this.executionContext.nextItem(diagnosticNode); + this.limit--; + mergeHeaders(aggregateHeaders, headers); + return { result, headers: aggregateHeaders }; + } + // If both limit and offset are 0, return nothing + return { + result: undefined, + headers: getInitialHeader(), + }; + } + hasMoreResults() { + return (this.offset > 0 || this.limit > 0) && this.executionContext.hasMoreResults(); + } +} +/** @hidden */ +class OrderByEndpointComponent { + /** + * Represents an endpoint in handling an order by query. For each processed orderby + * result it returns 'payload' item of the result + * + * @param executionContext - Underlying Execution Context + * @hidden + */ + constructor(executionContext) { + this.executionContext = executionContext; + } + /** + * Execute a provided function on the next element in the OrderByEndpointComponent. + */ + async nextItem(diagnosticNode) { + const { result: item, headers } = await this.executionContext.nextItem(diagnosticNode); + return { + result: item !== undefined ? item.payload : undefined, + headers, + }; + } + /** + * Determine if there are still remaining resources to processs. + * @returns true if there is other elements to process in the OrderByEndpointComponent. + */ + hasMoreResults() { + return this.executionContext.hasMoreResults(); + } +} -/***/ }), +// Copyright (c) Microsoft Corporation. +async function digest(str) { + const hash = crypto.createHash("sha256"); + hash.update(str, "utf8"); + return hash.digest("hex"); +} -/***/ 89045: -/***/ ((module) => { +// Copyright (c) Microsoft Corporation. +async function hashObject(object) { + const stringifiedObject = stableStringify__default["default"](object); + return digest(stringifiedObject); +} -/****************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, Symbol, Reflect, Promise, SuppressedError */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __esDecorate; -var __runInitializers; -var __propKey; -var __setFunctionName; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __classPrivateFieldIn; -var __createBinding; -var __addDisposableResource; -var __disposeResources; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { - function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } - var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; - var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; - var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); - var _, done = false; - for (var i = decorators.length - 1; i >= 0; i--) { - var context = {}; - for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context.access[p] = contextIn.access[p]; - context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); - if (kind === "accessor") { - if (result === void 0) continue; - if (result === null || typeof result !== "object") throw new TypeError("Object expected"); - if (_ = accept(result.get)) descriptor.get = _; - if (_ = accept(result.set)) descriptor.set = _; - if (_ = accept(result.init)) initializers.unshift(_); - } - else if (_ = accept(result)) { - if (kind === "field") initializers.unshift(_); - else descriptor[key] = _; - } - } - if (target) Object.defineProperty(target, contextIn.name, descriptor); - done = true; - }; - - __runInitializers = function (thisArg, initializers, value) { - var useValue = arguments.length > 2; - for (var i = 0; i < initializers.length; i++) { - value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); - } - return useValue ? value : void 0; - }; - - __propKey = function (x) { - return typeof x === "symbol" ? x : "".concat(x); - }; - - __setFunctionName = function (f, name, prefix) { - if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; - return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - __classPrivateFieldIn = function (state, receiver) { - if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); - return typeof state === "function" ? receiver === state : state.has(receiver); - }; - - __addDisposableResource = function (env, value, async) { - if (value !== null && value !== void 0) { - if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); - var dispose; - if (async) { - if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); - dispose = value[Symbol.asyncDispose]; - } - if (dispose === void 0) { - if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); - dispose = value[Symbol.dispose]; - } - if (typeof dispose !== "function") throw new TypeError("Object not disposable."); - env.stack.push({ value: value, dispose: dispose, async: async }); - } - else if (async) { - env.stack.push({ async: true }); - } - return value; - }; - - var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { - var e = new Error(message); - return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; - }; - - __disposeResources = function (env) { - function fail(e) { - env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; - env.hasError = true; - } - function next() { - while (env.stack.length) { - var rec = env.stack.pop(); - try { - var result = rec.dispose && rec.dispose.call(rec.value); - if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); - } - catch (e) { - fail(e); - } - } - if (env.hasError) throw env.error; - } - return next(); - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__esDecorate", __esDecorate); - exporter("__runInitializers", __runInitializers); - exporter("__propKey", __propKey); - exporter("__setFunctionName", __setFunctionName); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); - exporter("__classPrivateFieldIn", __classPrivateFieldIn); - exporter("__addDisposableResource", __addDisposableResource); - exporter("__disposeResources", __disposeResources); -}); - - -/***/ }), - -/***/ 94175: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** @internal */ -const knownContextKeys = { - span: Symbol.for("@azure/core-tracing span"), - namespace: Symbol.for("@azure/core-tracing namespace"), -}; -/** - * Creates a new {@link TracingContext} with the given options. - * @param options - A set of known keys that may be set on the context. - * @returns A new {@link TracingContext} with the given options. - * - * @internal - */ -function createTracingContext(options = {}) { - let context = new TracingContextImpl(options.parentContext); - if (options.span) { - context = context.setValue(knownContextKeys.span, options.span); +/** @hidden */ +class OrderedDistinctEndpointComponent { + constructor(executionContext) { + this.executionContext = executionContext; } - if (options.namespace) { - context = context.setValue(knownContextKeys.namespace, options.namespace); + async nextItem(diagnosticNode) { + const { headers, result } = await this.executionContext.nextItem(diagnosticNode); + if (result) { + const hashedResult = await hashObject(result); + if (hashedResult === this.hashedLastResult) { + return { result: undefined, headers }; + } + this.hashedLastResult = hashedResult; + } + return { result, headers }; } - return context; -} -/** @internal */ -class TracingContextImpl { - constructor(initialContext) { - this._contextMap = - initialContext instanceof TracingContextImpl - ? new Map(initialContext._contextMap) - : new Map(); + hasMoreResults() { + return this.executionContext.hasMoreResults(); } - setValue(key, value) { - const newContext = new TracingContextImpl(this); - newContext._contextMap.set(key, value); - return newContext; +} + +/** @hidden */ +class UnorderedDistinctEndpointComponent { + constructor(executionContext) { + this.executionContext = executionContext; + this.hashedResults = new Set(); } - getValue(key) { - return this._contextMap.get(key); + async nextItem(diagnosticNode) { + const { headers, result } = await this.executionContext.nextItem(diagnosticNode); + if (result) { + const hashedResult = await hashObject(result); + if (this.hashedResults.has(hashedResult)) { + return { result: undefined, headers }; + } + this.hashedResults.add(hashedResult); + } + return { result, headers }; } - deleteValue(key) { - const newContext = new TracingContextImpl(this); - newContext._contextMap.delete(key); - return newContext; + hasMoreResults() { + return this.executionContext.hasMoreResults(); } } // Copyright (c) Microsoft Corporation. -function createDefaultTracingSpan() { - return { - end: () => { - // noop - }, - isRecording: () => false, - recordException: () => { - // noop - }, - setAttribute: () => { - // noop - }, - setStatus: () => { - // noop - }, - }; -} -function createDefaultInstrumenter() { - return { - createRequestHeaders: () => { - return {}; - }, - parseTraceparentHeader: () => { - return undefined; - }, - startSpan: (_name, spanOptions) => { +// Licensed under the MIT license. +// All aggregates are effectively a group by operation +// The empty group is used for aggregates without a GROUP BY clause +const emptyGroup = "__empty__"; +// Newer API versions rewrite the query to return `item2`. It fixes some legacy issues with the original `item` result +// Aggregator code should use item2 when available +const extractAggregateResult = (payload) => Object.keys(payload).length > 0 ? (payload.item2 ? payload.item2 : payload.item) : null; + +/** @hidden */ +class GroupByEndpointComponent { + constructor(executionContext, queryInfo) { + this.executionContext = executionContext; + this.queryInfo = queryInfo; + this.groupings = new Map(); + this.aggregateResultArray = []; + this.completed = false; + } + async nextItem(diagnosticNode) { + // If we have a full result set, begin returning results + if (this.aggregateResultArray.length > 0) { return { - span: createDefaultTracingSpan(), - tracingContext: createTracingContext({ parentContext: spanOptions.tracingContext }), + result: this.aggregateResultArray.pop(), + headers: getInitialHeader(), }; - }, - withContext(_context, callback, ...callbackArgs) { - return callback(...callbackArgs); - }, - }; -} -/** @internal */ -let instrumenterImplementation; -/** - * Extends the Azure SDK with support for a given instrumenter implementation. - * - * @param instrumenter - The instrumenter implementation to use. - */ -function useInstrumenter(instrumenter) { - instrumenterImplementation = instrumenter; -} -/** - * Gets the currently set instrumenter, a No-Op instrumenter by default. - * - * @returns The currently set instrumenter - */ -function getInstrumenter() { - if (!instrumenterImplementation) { - instrumenterImplementation = createDefaultInstrumenter(); - } - return instrumenterImplementation; -} - -// Copyright (c) Microsoft Corporation. -/** - * Creates a new tracing client. - * - * @param options - Options used to configure the tracing client. - * @returns - An instance of {@link TracingClient}. - */ -function createTracingClient(options) { - const { namespace, packageName, packageVersion } = options; - function startSpan(name, operationOptions, spanOptions) { - var _a; - const startSpanResult = getInstrumenter().startSpan(name, Object.assign(Object.assign({}, spanOptions), { packageName: packageName, packageVersion: packageVersion, tracingContext: (_a = operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext })); - let tracingContext = startSpanResult.tracingContext; - const span = startSpanResult.span; - if (!tracingContext.getValue(knownContextKeys.namespace)) { - tracingContext = tracingContext.setValue(knownContextKeys.namespace, namespace); } - span.setAttribute("az.namespace", tracingContext.getValue(knownContextKeys.namespace)); - const updatedOptions = Object.assign({}, operationOptions, { - tracingOptions: Object.assign(Object.assign({}, operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions), { tracingContext }), - }); + if (this.completed) { + return { + result: undefined, + headers: getInitialHeader(), + }; + } + const aggregateHeaders = getInitialHeader(); + while (this.executionContext.hasMoreResults()) { + // Grab the next result + const { result, headers } = (await this.executionContext.nextItem(diagnosticNode)); + mergeHeaders(aggregateHeaders, headers); + // If it exists, process it via aggregators + if (result) { + const group = result.groupByItems ? await hashObject(result.groupByItems) : emptyGroup; + const aggregators = this.groupings.get(group); + const payload = result.payload; + if (aggregators) { + // Iterator over all results in the payload + Object.keys(payload).map((key) => { + // in case the value of a group is null make sure we create a dummy payload with item2==null + const effectiveGroupByValue = payload[key] + ? payload[key] + : new Map().set("item2", null); + const aggregateResult = extractAggregateResult(effectiveGroupByValue); + aggregators.get(key).aggregate(aggregateResult); + }); + } + else { + // This is the first time we have seen a grouping. Setup the initial result without aggregate values + const grouping = new Map(); + this.groupings.set(group, grouping); + // Iterator over all results in the payload + Object.keys(payload).map((key) => { + const aggregateType = this.queryInfo.groupByAliasToAggregateType[key]; + // Create a new aggregator for this specific aggregate field + const aggregator = createAggregator(aggregateType); + grouping.set(key, aggregator); + if (aggregateType) { + const aggregateResult = extractAggregateResult(payload[key]); + aggregator.aggregate(aggregateResult); + } + else { + aggregator.aggregate(payload[key]); + } + }); + } + } + } + for (const grouping of this.groupings.values()) { + const groupResult = {}; + for (const [aggregateKey, aggregator] of grouping.entries()) { + groupResult[aggregateKey] = aggregator.getResult(); + } + this.aggregateResultArray.push(groupResult); + } + this.completed = true; return { - span, - updatedOptions, + result: this.aggregateResultArray.pop(), + headers: aggregateHeaders, }; } - async function withSpan(name, operationOptions, callback, spanOptions) { - const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); - try { - const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); - span.setStatus({ status: "success" }); - return result; + hasMoreResults() { + return this.executionContext.hasMoreResults() || this.aggregateResultArray.length > 0; + } +} + +/** @hidden */ +class GroupByValueEndpointComponent { + constructor(executionContext, queryInfo) { + this.executionContext = executionContext; + this.queryInfo = queryInfo; + this.aggregators = new Map(); + this.aggregateResultArray = []; + this.completed = false; + // VALUE queries will only every have a single grouping + this.aggregateType = this.queryInfo.aggregates[0]; + } + async nextItem(diagnosticNode) { + // Start returning results if we have processed a full results set + if (this.aggregateResultArray.length > 0) { + return { + result: this.aggregateResultArray.pop(), + headers: getInitialHeader(), + }; } - catch (err) { - span.setStatus({ status: "error", error: err }); - throw err; + if (this.completed) { + return { + result: undefined, + headers: getInitialHeader(), + }; } - finally { - span.end(); + const aggregateHeaders = getInitialHeader(); + while (this.executionContext.hasMoreResults()) { + // Grab the next result + const { result, headers } = (await this.executionContext.nextItem(diagnosticNode)); + mergeHeaders(aggregateHeaders, headers); + // If it exists, process it via aggregators + if (result) { + let grouping = emptyGroup; + let payload = result; + if (result.groupByItems) { + // If the query contains a GROUP BY clause, it will have a payload property and groupByItems + payload = result.payload; + grouping = await hashObject(result.groupByItems); + } + const aggregator = this.aggregators.get(grouping); + if (!aggregator) { + // This is the first time we have seen a grouping so create a new aggregator + this.aggregators.set(grouping, createAggregator(this.aggregateType)); + } + if (this.aggregateType) { + const aggregateResult = extractAggregateResult(payload[0]); + // if aggregate result is null, we need to short circuit aggregation and return undefined + if (aggregateResult === null) { + this.completed = true; + } + this.aggregators.get(grouping).aggregate(aggregateResult); + } + else { + // Queries with no aggregates pass the payload directly to the aggregator + // Example: SELECT VALUE c.team FROM c GROUP BY c.team + this.aggregators.get(grouping).aggregate(payload); + } + } } + // We bail early since we got an undefined result back `[{}]` + if (this.completed) { + return { + result: undefined, + headers: aggregateHeaders, + }; + } + // If no results are left in the underlying execution context, convert our aggregate results to an array + for (const aggregator of this.aggregators.values()) { + this.aggregateResultArray.push(aggregator.getResult()); + } + this.completed = true; + return { + result: this.aggregateResultArray.pop(), + headers: aggregateHeaders, + }; } - function withContext(context, callback, ...callbackArgs) { - return getInstrumenter().withContext(context, callback, ...callbackArgs); - } - /** - * Parses a traceparent header value into a span identifier. - * - * @param traceparentHeader - The traceparent header to parse. - * @returns An implementation-specific identifier for the span. - */ - function parseTraceparentHeader(traceparentHeader) { - return getInstrumenter().parseTraceparentHeader(traceparentHeader); - } - /** - * Creates a set of request headers to propagate tracing information to a backend. - * - * @param tracingContext - The context containing the span to serialize. - * @returns The set of headers to add to a request. - */ - function createRequestHeaders(tracingContext) { - return getInstrumenter().createRequestHeaders(tracingContext); + hasMoreResults() { + return this.executionContext.hasMoreResults() || this.aggregateResultArray.length > 0; } - return { - startSpan, - withSpan, - withContext, - parseTraceparentHeader, - createRequestHeaders, - }; } -exports.createTracingClient = createTracingClient; -exports.useInstrumenter = useInstrumenter; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 51333: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var abortController = __nccwpck_require__(52557); -var crypto = __nccwpck_require__(6113); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Creates an abortable promise. - * @param buildPromise - A function that takes the resolve and reject functions as parameters. - * @param options - The options for the abortable promise. - * @returns A promise that can be aborted. - */ -function createAbortablePromise(buildPromise, options) { - const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; - return new Promise((resolve, reject) => { - function rejectOnAbort() { - reject(new abortController.AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); +/** @hidden */ +class PipelinedQueryExecutionContext { + constructor(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo) { + this.clientContext = clientContext; + this.collectionLink = collectionLink; + this.query = query; + this.options = options; + this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo; + this.endpoint = null; + this.pageSize = options["maxItemCount"]; + if (this.pageSize === undefined) { + this.pageSize = PipelinedQueryExecutionContext.DEFAULT_PAGE_SIZE; } - function removeListeners() { - abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); + // Pick between parallel vs order by execution context + const sortOrders = partitionedQueryExecutionInfo.queryInfo.orderBy; + if (Array.isArray(sortOrders) && sortOrders.length > 0) { + // Need to wrap orderby execution context in endpoint component, since the data is nested as a \ + // "payload" property. + this.endpoint = new OrderByEndpointComponent(new OrderByQueryExecutionContext(this.clientContext, this.collectionLink, this.query, this.options, this.partitionedQueryExecutionInfo)); } - function onAbort() { - cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); - removeListeners(); - rejectOnAbort(); + else { + this.endpoint = new ParallelQueryExecutionContext(this.clientContext, this.collectionLink, this.query, this.options, this.partitionedQueryExecutionInfo); } - if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { - return rejectOnAbort(); + if (Object.keys(partitionedQueryExecutionInfo.queryInfo.groupByAliasToAggregateType).length > 0 || + partitionedQueryExecutionInfo.queryInfo.aggregates.length > 0 || + partitionedQueryExecutionInfo.queryInfo.groupByExpressions.length > 0) { + if (partitionedQueryExecutionInfo.queryInfo.hasSelectValue) { + this.endpoint = new GroupByValueEndpointComponent(this.endpoint, partitionedQueryExecutionInfo.queryInfo); + } + else { + this.endpoint = new GroupByEndpointComponent(this.endpoint, partitionedQueryExecutionInfo.queryInfo); + } } - try { - buildPromise((x) => { - removeListeners(); - resolve(x); - }, (x) => { - removeListeners(); - reject(x); - }); + // If top then add that to the pipeline. TOP N is effectively OFFSET 0 LIMIT N + const top = partitionedQueryExecutionInfo.queryInfo.top; + if (typeof top === "number") { + this.endpoint = new OffsetLimitEndpointComponent(this.endpoint, 0, top); } - catch (err) { - reject(err); + // If offset+limit then add that to the pipeline + const limit = partitionedQueryExecutionInfo.queryInfo.limit; + const offset = partitionedQueryExecutionInfo.queryInfo.offset; + if (typeof limit === "number" && typeof offset === "number") { + this.endpoint = new OffsetLimitEndpointComponent(this.endpoint, offset, limit); + } + // If distinct then add that to the pipeline + const distinctType = partitionedQueryExecutionInfo.queryInfo.distinctType; + if (distinctType === "Ordered") { + this.endpoint = new OrderedDistinctEndpointComponent(this.endpoint); + } + if (distinctType === "Unordered") { + this.endpoint = new UnorderedDistinctEndpointComponent(this.endpoint); } - abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); - }); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const StandardAbortMessage = "The delay was aborted."; -/** - * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. - * @param timeInMs - The number of milliseconds to be delayed. - * @param options - The options for delay - currently abort options - * @returns Promise that is resolved after timeInMs - */ -function delay(timeInMs, options) { - let token; - const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; - return createAbortablePromise((resolve) => { - token = setTimeout(resolve, timeInMs); - }, { - cleanupBeforeAbort: () => clearTimeout(token), - abortSignal, - abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, - }); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. - */ -async function cancelablePromiseRace(abortablePromiseBuilders, options) { - var _a, _b; - const aborter = new abortController.AbortController(); - function abortHandler() { - aborter.abort(); - } - (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler); - try { - return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); } - finally { - aborter.abort(); - (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler); + async nextItem(diagnosticNode) { + return this.endpoint.nextItem(diagnosticNode); } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Returns a random integer value between a lower and upper bound, - * inclusive of both bounds. - * Note that this uses Math.random and isn't secure. If you need to use - * this for any kind of security purpose, find a better source of random. - * @param min - The smallest integer value allowed. - * @param max - The largest integer value allowed. - */ -function getRandomIntegerInclusive(min, max) { - // Make sure inputs are integers. - min = Math.ceil(min); - max = Math.floor(max); - // Pick a random offset from zero to the size of the range. - // Since Math.random() can never return 1, we have to make the range one larger - // in order to be inclusive of the maximum value after we take the floor. - const offset = Math.floor(Math.random() * (max - min + 1)); - return offset + min; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helper to determine when an input is a generic JS object. - * @returns true when input is an object type that is not null, Array, RegExp, or Date. - */ -function isObject(input) { - return (typeof input === "object" && - input !== null && - !Array.isArray(input) && - !(input instanceof RegExp) && - !(input instanceof Date)); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Typeguard for an error object shape (has name and message) - * @param e - Something caught by a catch clause. - */ -function isError(e) { - if (isObject(e)) { - const hasName = typeof e.name === "string"; - const hasMessage = typeof e.message === "string"; - return hasName && hasMessage; + // Removed callback here beacuse it wouldn't have ever worked... + hasMoreResults() { + return this.endpoint.hasMoreResults(); } - return false; -} -/** - * Given what is thought to be an error object, return the message if possible. - * If the message is missing, returns a stringified version of the input. - * @param e - Something thrown from a try block - * @returns The error message or a string of the input - */ -function getErrorMessage(e) { - if (isError(e)) { - return e.message; + async fetchMore(diagnosticNode) { + // if the wrapped endpoint has different implementation for fetchMore use that + // otherwise use the default implementation + if (typeof this.endpoint.fetchMore === "function") { + return this.endpoint.fetchMore(diagnosticNode); + } + else { + this.fetchBuffer = []; + this.fetchMoreRespHeaders = getInitialHeader(); + return this._fetchMoreImplementation(diagnosticNode); + } } - else { - let stringified; + async _fetchMoreImplementation(diagnosticNode) { try { - if (typeof e === "object" && e) { - stringified = JSON.stringify(e); + const { result: item, headers } = await this.endpoint.nextItem(diagnosticNode); + mergeHeaders(this.fetchMoreRespHeaders, headers); + if (item === undefined) { + // no more results + if (this.fetchBuffer.length === 0) { + return { + result: undefined, + headers: this.fetchMoreRespHeaders, + }; + } + else { + // Just give what we have + const temp = this.fetchBuffer; + this.fetchBuffer = []; + return { result: temp, headers: this.fetchMoreRespHeaders }; + } } else { - stringified = String(e); + // append the result + this.fetchBuffer.push(item); + if (this.fetchBuffer.length >= this.pageSize) { + // fetched enough results + const temp = this.fetchBuffer.slice(0, this.pageSize); + this.fetchBuffer = this.fetchBuffer.splice(this.pageSize); + return { result: temp, headers: this.fetchMoreRespHeaders }; + } + else { + // recursively fetch more + // TODO: is recursion a good idea? + return this._fetchMoreImplementation(diagnosticNode); + } } } catch (err) { - stringified = "[unable to stringify input]"; + mergeHeaders(this.fetchMoreRespHeaders, err.headers); + err.headers = this.fetchMoreRespHeaders; + if (err) { + throw err; + } } - return `Unknown error ${stringified}`; } } +PipelinedQueryExecutionContext.DEFAULT_PAGE_SIZE = 10; // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Generates a SHA-256 HMAC signature. - * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. - * @param stringToSign - The data to be signed. - * @param encoding - The textual encoding to use for the returned HMAC digest. - */ -async function computeSha256Hmac(key, stringToSign, encoding) { - const decodedKey = Buffer.from(key, "base64"); - return crypto.createHmac("sha256", decodedKey).update(stringToSign).digest(encoding); -} /** - * Generates a SHA-256 hash. - * @param content - The data to be included in the hash. - * @param encoding - The textual encoding to use for the returned hash. + * Represents a QueryIterator Object, an implementation of feed or query response that enables + * traversal and iterating over the response + * in the Azure Cosmos DB database service. */ -async function computeSha256Hash(content, encoding) { - return crypto.createHash("sha256").update(content).digest(encoding); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helper TypeGuard that checks if something is defined or not. - * @param thing - Anything - */ -function isDefined(thing) { - return typeof thing !== "undefined" && thing !== null; -} -/** - * Helper TypeGuard that checks if the input is an object with the specified properties. - * @param thing - Anything. - * @param properties - The name of the properties that should appear in the object. - */ -function isObjectWithProperties(thing, properties) { - if (!isDefined(thing) || typeof thing !== "object") { - return false; +class QueryIterator { + /** + * @hidden + */ + constructor(clientContext, query, options, fetchFunctions, resourceLink, resourceType) { + this.clientContext = clientContext; + this.query = query; + this.options = options; + this.fetchFunctions = fetchFunctions; + this.resourceLink = resourceLink; + this.resourceType = resourceType; + this.query = query; + this.fetchFunctions = fetchFunctions; + this.options = options || {}; + this.resourceLink = resourceLink; + this.fetchAllLastResHeaders = getInitialHeader(); + this.reset(); + this.isInitialized = false; } - for (const property of properties) { - if (!objectHasProperty(thing, property)) { - return false; + /** + * Gets an async iterator that will yield results until completion. + * + * NOTE: AsyncIterators are a very new feature and you might need to + * use polyfils/etc. in order to use them in your code. + * + * If you're using TypeScript, you can use the following polyfill as long + * as you target ES6 or higher and are running on Node 6 or higher. + * + * ```typescript + * if (!Symbol || !Symbol.asyncIterator) { + * (Symbol as any).asyncIterator = Symbol.for("Symbol.asyncIterator"); + * } + * ``` + * + * @example Iterate over all databases + * ```typescript + * for await(const { resources: db } of client.databases.readAll().getAsyncIterator()) { + * console.log(`Got ${db} from AsyncIterator`); + * } + * ``` + */ + getAsyncIterator() { + return tslib.__asyncGenerator(this, arguments, function* getAsyncIterator_1() { + this.reset(); + let diagnosticNode = new DiagnosticNodeInternal(this.clientContext.diagnosticLevel, exports.DiagnosticNodeType.CLIENT_REQUEST_NODE, null); + this.queryPlanPromise = this.fetchQueryPlan(diagnosticNode); + while (this.queryExecutionContext.hasMoreResults()) { + let response; + try { + response = yield tslib.__await(this.queryExecutionContext.fetchMore(diagnosticNode)); + } + catch (error) { + if (this.needsQueryPlan(error)) { + yield tslib.__await(this.createPipelinedExecutionContext()); + try { + response = yield tslib.__await(this.queryExecutionContext.fetchMore(diagnosticNode)); + } + catch (queryError) { + this.handleSplitError(queryError); + } + } + else { + throw error; + } + } + const feedResponse = new FeedResponse(response.result, response.headers, this.queryExecutionContext.hasMoreResults(), diagnosticNode.toDiagnostic(this.clientContext.getClientConfig())); + diagnosticNode = new DiagnosticNodeInternal(this.clientContext.diagnosticLevel, exports.DiagnosticNodeType.CLIENT_REQUEST_NODE, null); + if (response.result !== undefined) { + yield yield tslib.__await(feedResponse); + } + } + }); + } + /** + * Determine if there are still remaining resources to process based on the value of the continuation token or the + * elements remaining on the current batch in the QueryIterator. + * @returns true if there is other elements to process in the QueryIterator. + */ + hasMoreResults() { + return this.queryExecutionContext.hasMoreResults(); + } + /** + * Fetch all pages for the query and return a single FeedResponse. + */ + async fetchAll() { + return withDiagnostics(async (diagnosticNode) => { + return this.fetchAllInternal(diagnosticNode); + }, this.clientContext); + } + /** + * @hidden + */ + async fetchAllInternal(diagnosticNode) { + this.reset(); + let response; + try { + response = await this.toArrayImplementation(diagnosticNode); + } + catch (error) { + this.handleSplitError(error); } + return response; } - return true; -} -/** - * Helper TypeGuard that checks if the input is an object with the specified property. - * @param thing - Any object. - * @param property - The name of the property that should appear in the object. - */ -function objectHasProperty(thing, property) { - return (isDefined(thing) && typeof thing === "object" && property in thing); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/* - * NOTE: When moving this file, please update "react-native" section in package.json. - */ -/** - * Generated Universally Unique Identifier - * - * @returns RFC4122 v4 UUID. - */ -function generateUUID() { - let uuid = ""; - for (let i = 0; i < 32; i++) { - // Generate a random number between 0 and 15 - const randomNumber = Math.floor(Math.random() * 16); - // Set the UUID version to 4 in the 13th position - if (i === 12) { - uuid += "4"; + /** + * Retrieve the next batch from the feed. + * + * This may or may not fetch more pages from the backend depending on your settings + * and the type of query. Aggregate queries will generally fetch all backend pages + * before returning the first batch of responses. + */ + async fetchNext() { + return withDiagnostics(async (diagnosticNode) => { + this.queryPlanPromise = withMetadataDiagnostics(async (metadataNode) => { + return this.fetchQueryPlan(metadataNode); + }, diagnosticNode, exports.MetadataLookUpType.QueryPlanLookUp); + if (!this.isInitialized) { + await this.init(); + } + let response; + try { + response = await this.queryExecutionContext.fetchMore(diagnosticNode); + } + catch (error) { + if (this.needsQueryPlan(error)) { + await this.createPipelinedExecutionContext(); + try { + response = await this.queryExecutionContext.fetchMore(diagnosticNode); + } + catch (queryError) { + this.handleSplitError(queryError); + } + } + else { + throw error; + } + } + return new FeedResponse(response.result, response.headers, this.queryExecutionContext.hasMoreResults(), getEmptyCosmosDiagnostics()); + }, this.clientContext); + } + /** + * Reset the QueryIterator to the beginning and clear all the resources inside it + */ + reset() { + this.queryPlanPromise = undefined; + this.fetchAllLastResHeaders = getInitialHeader(); + this.fetchAllTempResources = []; + this.queryExecutionContext = new DefaultQueryExecutionContext(this.options, this.fetchFunctions); + } + async toArrayImplementation(diagnosticNode) { + this.queryPlanPromise = withMetadataDiagnostics(async (metadataNode) => { + return this.fetchQueryPlan(metadataNode); + }, diagnosticNode, exports.MetadataLookUpType.QueryPlanLookUp); + // this.queryPlanPromise = this.fetchQueryPlan(diagnosticNode); + if (!this.isInitialized) { + await this.init(); } - else if (i === 16) { - // Set the UUID variant to "10" in the 17th position - uuid += (randomNumber & 0x3) | 0x8; + while (this.queryExecutionContext.hasMoreResults()) { + let response; + try { + response = await this.queryExecutionContext.nextItem(diagnosticNode); + } + catch (error) { + if (this.needsQueryPlan(error)) { + await this.createPipelinedExecutionContext(); + response = await this.queryExecutionContext.nextItem(diagnosticNode); + } + else { + throw error; + } + } + const { result, headers } = response; + // concatenate the results and fetch more + mergeHeaders(this.fetchAllLastResHeaders, headers); + if (result !== undefined) { + this.fetchAllTempResources.push(result); + } + } + return new FeedResponse(this.fetchAllTempResources, this.fetchAllLastResHeaders, this.queryExecutionContext.hasMoreResults(), getEmptyCosmosDiagnostics()); + } + async createPipelinedExecutionContext() { + const queryPlanResponse = await this.queryPlanPromise; + // We always coerce queryPlanPromise to resolved. So if it errored, we need to manually inspect the resolved value + if (queryPlanResponse instanceof Error) { + throw queryPlanResponse; + } + const queryPlan = queryPlanResponse.result; + const queryInfo = queryPlan.queryInfo; + if (queryInfo.aggregates.length > 0 && queryInfo.hasSelectValue === false) { + throw new Error("Aggregate queries must use the VALUE keyword"); + } + this.queryExecutionContext = new PipelinedQueryExecutionContext(this.clientContext, this.resourceLink, this.query, this.options, queryPlan); + } + async fetchQueryPlan(diagnosticNode) { + if (!this.queryPlanPromise && this.resourceType === exports.ResourceType.item) { + return this.clientContext + .getQueryPlan(getPathFromLink(this.resourceLink) + "/docs", exports.ResourceType.item, this.resourceLink, this.query, this.options, diagnosticNode) + .catch((error) => error); // Without this catch, node reports an unhandled rejection. So we stash the promise as resolved even if it errored. + } + return this.queryPlanPromise; + } + needsQueryPlan(error) { + var _a; + if (((_a = error.body) === null || _a === void 0 ? void 0 : _a.additionalErrorInfo) || + error.message.includes("Cross partition query only supports")) { + return error.code === StatusCodes.BadRequest && this.resourceType === exports.ResourceType.item; } else { - // Add a random hexadecimal digit to the UUID string - uuid += randomNumber.toString(16); + throw error; } - // Add hyphens to the UUID string at the appropriate positions - if (i === 7 || i === 11 || i === 15 || i === 19) { - uuid += "-"; + } + async init() { + if (this.isInitialized === true) { + return; + } + if (this.initPromise === undefined) { + this.initPromise = this._init(); + } + return this.initPromise; + } + async _init() { + if (this.options.forceQueryPlan === true && this.resourceType === exports.ResourceType.item) { + await this.createPipelinedExecutionContext(); } + this.isInitialized = true; + } + handleSplitError(err) { + if (err.code === 410) { + const error = new Error("Encountered partition split and could not recover. This request is retryable"); + error.code = 503; + error.originalError = err; + throw error; + } + else { + throw err; + } + } +} + +class ConflictResponse extends ResourceResponse { + constructor(resource, headers, statusCode, conflict, diagnostics) { + super(resource, headers, statusCode, diagnostics); + this.conflict = conflict; } - return uuid; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -var _a$1; -// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. -let uuidFunction = typeof ((_a$1 = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a$1 === void 0 ? void 0 : _a$1.randomUUID) === "function" - ? globalThis.crypto.randomUUID.bind(globalThis.crypto) - : crypto.randomUUID; -// Not defined in earlier versions of Node.js 14 -if (!uuidFunction) { - uuidFunction = generateUUID; +async function readPartitionKeyDefinition(diagnosticNode, container) { + const partitionKeyDefinition = await container.readPartitionKeyDefinition(diagnosticNode); + return partitionKeyDefinition.resource; } + /** - * Generated Universally Unique Identifier + * Use to read or delete a given {@link Conflict} by id. * - * @returns RFC4122 v4 UUID. + * @see {@link Conflicts} to query or read all conflicts. */ -function randomUUID() { - return uuidFunction(); +class Conflict { + /** + * Returns a reference URL to the resource. Used for linking in Permissions. + */ + get url() { + return `/${this.container.url}/${Constants$1.Path.ConflictsPathSegment}/${this.id}`; + } + /** + * @hidden + * @param container - The parent {@link Container}. + * @param id - The id of the given {@link Conflict}. + */ + constructor(container, id, clientContext, partitionKey) { + this.container = container; + this.id = id; + this.clientContext = clientContext; + this.partitionKey = partitionKey; + this.partitionKey = partitionKey; + } + /** + * Read the {@link ConflictDefinition} for the given {@link Conflict}. + */ + async read(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url, exports.ResourceType.conflicts); + const id = getIdFromLink(this.url); + const response = await this.clientContext.read({ + path, + resourceType: exports.ResourceType.user, + resourceId: id, + options, + diagnosticNode, + }); + return new ConflictResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); + } + /** + * Delete the given {@link ConflictDefinition}. + */ + async delete(options) { + return withDiagnostics(async (diagnosticNode) => { + if (this.partitionKey === undefined) { + const partitionKeyDefinition = await readPartitionKeyDefinition(diagnosticNode, this.container); + this.partitionKey = undefinedPartitionKey(partitionKeyDefinition); + } + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.delete({ + path, + resourceType: exports.ResourceType.conflicts, + resourceId: id, + options, + partitionKey: this.partitionKey, + diagnosticNode, + }); + return new ConflictResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); + } } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -var _a, _b, _c, _d; -/** - * A constant that indicates whether the environment the code is running is a Web Browser. - */ -// eslint-disable-next-line @azure/azure-sdk/ts-no-window -const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; -/** - * A constant that indicates whether the environment the code is running is a Web Worker. - */ -const isWebWorker = typeof self === "object" && - typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && - (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || - ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || - ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); -/** - * A constant that indicates whether the environment the code is running is Deno. - */ -const isDeno = typeof Deno !== "undefined" && - typeof Deno.version !== "undefined" && - typeof Deno.version.deno !== "undefined"; -/** - * A constant that indicates whether the environment the code is running is Node.JS. - */ -const isNode = typeof process !== "undefined" && - Boolean(process.version) && - Boolean((_d = process.versions) === null || _d === void 0 ? void 0 : _d.node) && - // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions - !isDeno; -/** - * A constant that indicates whether the environment the code is running is Bun.sh. - */ -const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; /** - * A constant that indicates whether the environment the code is running is in React-Native. + * Use to query or read all conflicts. + * + * @see {@link Conflict} to read or delete a given {@link Conflict} by id. */ -// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js -const isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; +class Conflicts { + constructor(container, clientContext) { + this.container = container; + this.clientContext = clientContext; + } + query(query, options) { + const path = getPathFromLink(this.container.url, exports.ResourceType.conflicts); + const id = getIdFromLink(this.container.url); + return new QueryIterator(this.clientContext, query, options, (diagNode, innerOptions) => { + return this.clientContext.queryFeed({ + path, + resourceType: exports.ResourceType.conflicts, + resourceId: id, + resultFn: (result) => result.Conflicts, + query, + options: innerOptions, + diagnosticNode: diagNode, + }); + }); + } + /** + * Reads all conflicts + * @param options - Use to set options like response page size, continuation tokens, etc. + */ + readAll(options) { + return this.query(undefined, options); + } +} // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -/** - * The helper that transforms bytes with specific character encoding into string - * @param bytes - the uint8array bytes - * @param format - the format we use to encode the byte - * @returns a string of the encoded string - */ -function uint8ArrayToString(bytes, format) { - return Buffer.from(bytes).toString(format); +exports.ConflictResolutionMode = void 0; +(function (ConflictResolutionMode) { + ConflictResolutionMode["Custom"] = "Custom"; + ConflictResolutionMode["LastWriterWins"] = "LastWriterWins"; +})(exports.ConflictResolutionMode || (exports.ConflictResolutionMode = {})); + +class ItemResponse extends ResourceResponse { + constructor(resource, headers, statusCode, subsstatusCode, item, diagnostics) { + super(resource, headers, statusCode, diagnostics, subsstatusCode); + this.item = item; + } } + /** - * The helper that transforms string to specific character encoded bytes array. - * @param value - the string to be converted - * @param format - the format we use to decode the value - * @returns a uint8array + * Used to perform operations on a specific item. + * + * @see {@link Items} for operations on all items; see `container.items`. */ -function stringToUint8Array(value, format) { - return Buffer.from(value, format); +class Item { + /** + * Returns a reference URL to the resource. Used for linking in Permissions. + */ + get url() { + return createDocumentUri(this.container.database.id, this.container.id, this.id); + } + /** + * @hidden + * @param container - The parent {@link Container}. + * @param id - The id of the given {@link Item}. + * @param partitionKey - The primary key of the given {@link Item} (only for partitioned containers). + */ + constructor(container, id, clientContext, partitionKey) { + this.container = container; + this.id = id; + this.clientContext = clientContext; + this.partitionKey = + partitionKey === undefined ? undefined : convertToInternalPartitionKey(partitionKey); + } + /** + * Read the item's definition. + * + * Any provided type, T, is not necessarily enforced by the SDK. + * You may get more or less properties and it's up to your logic to enforce it. + * If the type, T, is a class, it won't pass `typeof` comparisons, because it won't have a match prototype. + * It's recommended to only use interfaces. + * + * There is no set schema for JSON items. They may contain any number of custom properties. + * + * @param options - Additional options for the request + * + * @example Using custom type for response + * ```typescript + * interface TodoItem { + * title: string; + * done: bool; + * id: string; + * } + * + * let item: TodoItem; + * ({body: item} = await item.read()); + * ``` + */ + async read(options = {}) { + return withDiagnostics(async (diagnosticNode) => { + if (this.partitionKey === undefined) { + const partitionKeyDefinition = await readPartitionKeyDefinition(diagnosticNode, this.container); + this.partitionKey = undefinedPartitionKey(partitionKeyDefinition); + } + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + let response; + try { + response = await this.clientContext.read({ + path, + resourceType: exports.ResourceType.item, + resourceId: id, + options, + partitionKey: this.partitionKey, + diagnosticNode, + }); + } + catch (error) { + if (error.code !== StatusCodes.NotFound) { + throw error; + } + response = error; + } + return new ItemResponse(response.result, response.headers, response.code, response.substatus, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); + } + async replace(body, options = {}) { + return withDiagnostics(async (diagnosticNode) => { + if (this.partitionKey === undefined) { + const partitionKeyResponse = await readPartitionKeyDefinition(diagnosticNode, this.container); + this.partitionKey = extractPartitionKeys(body, partitionKeyResponse); + } + const err = {}; + if (!isItemResourceValid(body, err)) { + throw err; + } + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.replace({ + body, + path, + resourceType: exports.ResourceType.item, + resourceId: id, + options, + partitionKey: this.partitionKey, + diagnosticNode, + }); + return new ItemResponse(response.result, response.headers, response.code, response.substatus, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); + } + /** + * Delete the item. + * + * Any provided type, T, is not necessarily enforced by the SDK. + * You may get more or less properties and it's up to your logic to enforce it. + * + * @param options - Additional options for the request + */ + async delete(options = {}) { + return withDiagnostics(async (diagnosticNode) => { + if (this.partitionKey === undefined) { + const partitionKeyResponse = await readPartitionKeyDefinition(diagnosticNode, this.container); + this.partitionKey = undefinedPartitionKey(partitionKeyResponse); + } + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.delete({ + path, + resourceType: exports.ResourceType.item, + resourceId: id, + options, + partitionKey: this.partitionKey, + diagnosticNode, + }); + return new ItemResponse(response.result, response.headers, response.code, response.substatus, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); + } + /** + * Perform a JSONPatch on the item. + * + * Any provided type, T, is not necessarily enforced by the SDK. + * You may get more or less properties and it's up to your logic to enforce it. + * + * @param options - Additional options for the request + */ + async patch(body, options = {}) { + return withDiagnostics(async (diagnosticNode) => { + if (this.partitionKey === undefined) { + const partitionKeyResponse = await readPartitionKeyDefinition(diagnosticNode, this.container); + this.partitionKey = extractPartitionKeys(body, partitionKeyResponse); + } + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.patch({ + body, + path, + resourceType: exports.ResourceType.item, + resourceId: id, + options, + partitionKey: this.partitionKey, + diagnosticNode, + }); + return new ItemResponse(response.result, response.headers, response.code, response.substatus, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); + } } -exports.cancelablePromiseRace = cancelablePromiseRace; -exports.computeSha256Hash = computeSha256Hash; -exports.computeSha256Hmac = computeSha256Hmac; -exports.createAbortablePromise = createAbortablePromise; -exports.delay = delay; -exports.getErrorMessage = getErrorMessage; -exports.getRandomIntegerInclusive = getRandomIntegerInclusive; -exports.isBrowser = isBrowser; -exports.isBun = isBun; -exports.isDefined = isDefined; -exports.isDeno = isDeno; -exports.isError = isError; -exports.isNode = isNode; -exports.isObject = isObject; -exports.isObjectWithProperties = isObjectWithProperties; -exports.isReactNative = isReactNative; -exports.isWebWorker = isWebWorker; -exports.objectHasProperty = objectHasProperty; -exports.randomUUID = randomUUID; -exports.stringToUint8Array = stringToUint8Array; -exports.uint8ArrayToString = uint8ArrayToString; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 82076: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var crypto = __nccwpck_require__(6113); -var logger$5 = __nccwpck_require__(3233); -var uuid$3 = __nccwpck_require__(50378); -var tslib = __nccwpck_require__(81675); -var stableStringify = __nccwpck_require__(30969); -var PriorityQueue = __nccwpck_require__(77986); -var semaphore = __nccwpck_require__(33165); -var coreRestPipeline = __nccwpck_require__(88121); -var nodeAbortController = __nccwpck_require__(85220); -var universalUserAgent = __nccwpck_require__(45030); -var JSBI = __nccwpck_require__(51778); -var abortController = __nccwpck_require__(52557); - -function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } - -var stableStringify__default = /*#__PURE__*/_interopDefaultLegacy(stableStringify); -var PriorityQueue__default = /*#__PURE__*/_interopDefaultLegacy(PriorityQueue); -var semaphore__default = /*#__PURE__*/_interopDefaultLegacy(semaphore); -var JSBI__default = /*#__PURE__*/_interopDefaultLegacy(JSBI); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const DEFAULT_PARTITION_KEY_PATH = "/_partitionKey"; // eslint-disable-line @typescript-eslint/prefer-as-const - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * @hidden + * A single response page from the Azure Cosmos DB Change Feed */ -const Constants$1 = { - HttpHeaders: { - Authorization: "authorization", - ETag: "etag", - MethodOverride: "X-HTTP-Method", - Slug: "Slug", - ContentType: "Content-Type", - LastModified: "Last-Modified", - ContentEncoding: "Content-Encoding", - CharacterSet: "CharacterSet", - UserAgent: "User-Agent", - IfModifiedSince: "If-Modified-Since", - IfMatch: "If-Match", - IfNoneMatch: "If-None-Match", - ContentLength: "Content-Length", - AcceptEncoding: "Accept-Encoding", - KeepAlive: "Keep-Alive", - CacheControl: "Cache-Control", - TransferEncoding: "Transfer-Encoding", - ContentLanguage: "Content-Language", - ContentLocation: "Content-Location", - ContentMd5: "Content-Md5", - ContentRange: "Content-Range", - Accept: "Accept", - AcceptCharset: "Accept-Charset", - AcceptLanguage: "Accept-Language", - IfRange: "If-Range", - IfUnmodifiedSince: "If-Unmodified-Since", - MaxForwards: "Max-Forwards", - ProxyAuthorization: "Proxy-Authorization", - AcceptRanges: "Accept-Ranges", - ProxyAuthenticate: "Proxy-Authenticate", - RetryAfter: "Retry-After", - SetCookie: "Set-Cookie", - WwwAuthenticate: "Www-Authenticate", - Origin: "Origin", - Host: "Host", - AccessControlAllowOrigin: "Access-Control-Allow-Origin", - AccessControlAllowHeaders: "Access-Control-Allow-Headers", - KeyValueEncodingFormat: "application/x-www-form-urlencoded", - WrapAssertionFormat: "wrap_assertion_format", - WrapAssertion: "wrap_assertion", - WrapScope: "wrap_scope", - SimpleToken: "SWT", - HttpDate: "date", - Prefer: "Prefer", - Location: "Location", - Referer: "referer", - A_IM: "A-IM", - // Query - Query: "x-ms-documentdb-query", - IsQuery: "x-ms-documentdb-isquery", - IsQueryPlan: "x-ms-cosmos-is-query-plan-request", - SupportedQueryFeatures: "x-ms-cosmos-supported-query-features", - QueryVersion: "x-ms-cosmos-query-version", - // Our custom Azure Cosmos DB headers - Continuation: "x-ms-continuation", - ContinuationToken: "x-ms-continuation-token", - PageSize: "x-ms-max-item-count", - ItemCount: "x-ms-item-count", - // Request sender generated. Simply echoed by backend. - ActivityId: "x-ms-activity-id", - PreTriggerInclude: "x-ms-documentdb-pre-trigger-include", - PreTriggerExclude: "x-ms-documentdb-pre-trigger-exclude", - PostTriggerInclude: "x-ms-documentdb-post-trigger-include", - PostTriggerExclude: "x-ms-documentdb-post-trigger-exclude", - IndexingDirective: "x-ms-indexing-directive", - SessionToken: "x-ms-session-token", - ConsistencyLevel: "x-ms-consistency-level", - XDate: "x-ms-date", - CollectionPartitionInfo: "x-ms-collection-partition-info", - CollectionServiceInfo: "x-ms-collection-service-info", - // Deprecated, use RetryAfterInMs instead. - RetryAfterInMilliseconds: "x-ms-retry-after-ms", - RetryAfterInMs: "x-ms-retry-after-ms", - IsFeedUnfiltered: "x-ms-is-feed-unfiltered", - ResourceTokenExpiry: "x-ms-documentdb-expiry-seconds", - EnableScanInQuery: "x-ms-documentdb-query-enable-scan", - EmitVerboseTracesInQuery: "x-ms-documentdb-query-emit-traces", - EnableCrossPartitionQuery: "x-ms-documentdb-query-enablecrosspartition", - ParallelizeCrossPartitionQuery: "x-ms-documentdb-query-parallelizecrosspartitionquery", - ResponseContinuationTokenLimitInKB: "x-ms-documentdb-responsecontinuationtokenlimitinkb", - // QueryMetrics - // Request header to tell backend to give you query metrics. - PopulateQueryMetrics: "x-ms-documentdb-populatequerymetrics", - // Response header that holds the serialized version of query metrics. - QueryMetrics: "x-ms-documentdb-query-metrics", - // IndexMetrics - // Request header to tell backend to give you index metrics. - PopulateIndexMetrics: "x-ms-cosmos-populateindexmetrics", - // Response header that holds the serialized version of index metrics. - IndexUtilization: "x-ms-cosmos-index-utilization", - // Version headers and values - Version: "x-ms-version", - // Owner name - OwnerFullName: "x-ms-alt-content-path", - // Owner ID used for name based request in session token. - OwnerId: "x-ms-content-path", - // Partition Key - PartitionKey: "x-ms-documentdb-partitionkey", - PartitionKeyRangeID: "x-ms-documentdb-partitionkeyrangeid", - // Epk Range headers - StartEpk: "x-ms-start-epk", - EndEpk: "x-ms-end-epk", - // Read Feed Type - ReadFeedKeyType: "x-ms-read-key-type", - // Quota Info - MaxEntityCount: "x-ms-root-entity-max-count", - CurrentEntityCount: "x-ms-root-entity-current-count", - CollectionQuotaInMb: "x-ms-collection-quota-mb", - CollectionCurrentUsageInMb: "x-ms-collection-usage-mb", - MaxMediaStorageUsageInMB: "x-ms-max-media-storage-usage-mb", - CurrentMediaStorageUsageInMB: "x-ms-media-storage-usage-mb", - RequestCharge: "x-ms-request-charge", - PopulateQuotaInfo: "x-ms-documentdb-populatequotainfo", - MaxResourceQuota: "x-ms-resource-quota", - // Offer header - OfferType: "x-ms-offer-type", - OfferThroughput: "x-ms-offer-throughput", - AutoscaleSettings: "x-ms-cosmos-offer-autopilot-settings", - // Custom RUs/minute headers - DisableRUPerMinuteUsage: "x-ms-documentdb-disable-ru-per-minute-usage", - IsRUPerMinuteUsed: "x-ms-documentdb-is-ru-per-minute-used", - OfferIsRUPerMinuteThroughputEnabled: "x-ms-offer-is-ru-per-minute-throughput-enabled", - // Index progress headers - IndexTransformationProgress: "x-ms-documentdb-collection-index-transformation-progress", - LazyIndexingProgress: "x-ms-documentdb-collection-lazy-indexing-progress", - // Upsert header - IsUpsert: "x-ms-documentdb-is-upsert", - // Sub status of the error - SubStatus: "x-ms-substatus", - // StoredProcedure related headers - EnableScriptLogging: "x-ms-documentdb-script-enable-logging", - ScriptLogResults: "x-ms-documentdb-script-log-results", - // Multi-Region Write - ALLOW_MULTIPLE_WRITES: "x-ms-cosmos-allow-tentative-writes", - // Bulk/Batch header - IsBatchRequest: "x-ms-cosmos-is-batch-request", - IsBatchAtomic: "x-ms-cosmos-batch-atomic", - BatchContinueOnError: "x-ms-cosmos-batch-continue-on-error", - // Dedicated Gateway Headers - DedicatedGatewayPerRequestCacheStaleness: "x-ms-dedicatedgateway-max-age", - // Cache Refresh header - ForceRefresh: "x-ms-force-refresh", - // Priority Based throttling header - PriorityLevel: "x-ms-cosmos-priority-level", - }, - // GlobalDB related constants - WritableLocations: "writableLocations", - ReadableLocations: "readableLocations", - LocationUnavailableExpirationTimeInMs: 5 * 60 * 1000, - // ServiceDocument Resource - ENABLE_MULTIPLE_WRITABLE_LOCATIONS: "enableMultipleWriteLocations", - // Background refresh time - DefaultUnavailableLocationExpirationTimeMS: 5 * 60 * 1000, - // Client generated retry count response header - ThrottleRetryCount: "x-ms-throttle-retry-count", - ThrottleRetryWaitTimeInMs: "x-ms-throttle-retry-wait-time-ms", - // Platform - CurrentVersion: "2020-07-15", - AzureNamespace: "Azure.Cosmos", - AzurePackageName: "@azure/cosmos", - SDKName: "azure-cosmos-js", - SDKVersion: "4.0.0", - // Diagnostics - CosmosDbDiagnosticLevelEnvVarName: "AZURE_COSMOSDB_DIAGNOSTICS_LEVEL", - // Bulk Operations - DefaultMaxBulkRequestBodySizeInBytes: 220201, - Quota: { - CollectionSize: "collectionSize", - }, - Path: { - Root: "/", - DatabasesPathSegment: "dbs", - CollectionsPathSegment: "colls", - UsersPathSegment: "users", - DocumentsPathSegment: "docs", - PermissionsPathSegment: "permissions", - StoredProceduresPathSegment: "sprocs", - TriggersPathSegment: "triggers", - UserDefinedFunctionsPathSegment: "udfs", - ConflictsPathSegment: "conflicts", - AttachmentsPathSegment: "attachments", - PartitionKeyRangesPathSegment: "pkranges", - SchemasPathSegment: "schemas", - OffersPathSegment: "offers", - TopologyPathSegment: "topology", - DatabaseAccountPathSegment: "databaseaccount", - }, - PartitionKeyRange: { - // Partition Key Range Constants - MinInclusive: "minInclusive", - MaxExclusive: "maxExclusive", - Id: "id", - }, - QueryRangeConstants: { - // Partition Key Range Constants - MinInclusive: "minInclusive", - MaxExclusive: "maxExclusive", - min: "min", - }, +class ChangeFeedResponse { /** - * @deprecated Use EffectivePartitionKeyConstants instead + * @internal */ - EffectiveParitionKeyConstants: { - MinimumInclusiveEffectivePartitionKey: "", - MaximumExclusiveEffectivePartitionKey: "FF", - }, - EffectivePartitionKeyConstants: { - MinimumInclusiveEffectivePartitionKey: "", - MaximumExclusiveEffectivePartitionKey: "FF", - }, -}; -/** - * @hidden - */ -exports.ResourceType = void 0; -(function (ResourceType) { - ResourceType["none"] = ""; - ResourceType["database"] = "dbs"; - ResourceType["offer"] = "offers"; - ResourceType["user"] = "users"; - ResourceType["permission"] = "permissions"; - ResourceType["container"] = "colls"; - ResourceType["conflicts"] = "conflicts"; - ResourceType["sproc"] = "sprocs"; - ResourceType["udf"] = "udfs"; - ResourceType["trigger"] = "triggers"; - ResourceType["item"] = "docs"; - ResourceType["pkranges"] = "pkranges"; - ResourceType["partitionkey"] = "partitionKey"; -})(exports.ResourceType || (exports.ResourceType = {})); -/** - * @hidden - */ -exports.HTTPMethod = void 0; -(function (HTTPMethod) { - HTTPMethod["get"] = "GET"; - HTTPMethod["patch"] = "PATCH"; - HTTPMethod["post"] = "POST"; - HTTPMethod["put"] = "PUT"; - HTTPMethod["delete"] = "DELETE"; -})(exports.HTTPMethod || (exports.HTTPMethod = {})); -/** - * @hidden - */ -exports.OperationType = void 0; -(function (OperationType) { - OperationType["Create"] = "create"; - OperationType["Replace"] = "replace"; - OperationType["Upsert"] = "upsert"; - OperationType["Delete"] = "delete"; - OperationType["Read"] = "read"; - OperationType["Query"] = "query"; - OperationType["Execute"] = "execute"; - OperationType["Batch"] = "batch"; - OperationType["Patch"] = "patch"; -})(exports.OperationType || (exports.OperationType = {})); -/** - * @hidden - */ -var CosmosKeyType; -(function (CosmosKeyType) { - CosmosKeyType["PrimaryMaster"] = "PRIMARY_MASTER"; - CosmosKeyType["SecondaryMaster"] = "SECONDARY_MASTER"; - CosmosKeyType["PrimaryReadOnly"] = "PRIMARY_READONLY"; - CosmosKeyType["SecondaryReadOnly"] = "SECONDARY_READONLY"; -})(CosmosKeyType || (CosmosKeyType = {})); -/** - * @hidden - */ -var CosmosContainerChildResourceKind; -(function (CosmosContainerChildResourceKind) { - CosmosContainerChildResourceKind["Item"] = "ITEM"; - CosmosContainerChildResourceKind["StoredProcedure"] = "STORED_PROCEDURE"; - CosmosContainerChildResourceKind["UserDefinedFunction"] = "USER_DEFINED_FUNCTION"; - CosmosContainerChildResourceKind["Trigger"] = "TRIGGER"; -})(CosmosContainerChildResourceKind || (CosmosContainerChildResourceKind = {})); -/** - * @hidden - */ -var PermissionScopeValues; -(function (PermissionScopeValues) { + constructor( /** - * Values which set permission Scope applicable to control plane related operations. + * Gets the items returned in the response from Azure Cosmos DB */ - PermissionScopeValues[PermissionScopeValues["ScopeAccountReadValue"] = 1] = "ScopeAccountReadValue"; - PermissionScopeValues[PermissionScopeValues["ScopeAccountListDatabasesValue"] = 2] = "ScopeAccountListDatabasesValue"; - PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReadValue"] = 4] = "ScopeDatabaseReadValue"; - PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReadOfferValue"] = 8] = "ScopeDatabaseReadOfferValue"; - PermissionScopeValues[PermissionScopeValues["ScopeDatabaseListContainerValue"] = 16] = "ScopeDatabaseListContainerValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReadValue"] = 32] = "ScopeContainerReadValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReadOfferValue"] = 64] = "ScopeContainerReadOfferValue"; - PermissionScopeValues[PermissionScopeValues["ScopeAccountCreateDatabasesValue"] = 1] = "ScopeAccountCreateDatabasesValue"; - PermissionScopeValues[PermissionScopeValues["ScopeAccountDeleteDatabasesValue"] = 2] = "ScopeAccountDeleteDatabasesValue"; - PermissionScopeValues[PermissionScopeValues["ScopeDatabaseDeleteValue"] = 4] = "ScopeDatabaseDeleteValue"; - PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReplaceOfferValue"] = 8] = "ScopeDatabaseReplaceOfferValue"; - PermissionScopeValues[PermissionScopeValues["ScopeDatabaseCreateContainerValue"] = 16] = "ScopeDatabaseCreateContainerValue"; - PermissionScopeValues[PermissionScopeValues["ScopeDatabaseDeleteContainerValue"] = 32] = "ScopeDatabaseDeleteContainerValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceValue"] = 64] = "ScopeContainerReplaceValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteValue"] = 128] = "ScopeContainerDeleteValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceOfferValue"] = 256] = "ScopeContainerReplaceOfferValue"; - PermissionScopeValues[PermissionScopeValues["ScopeAccountReadAllAccessValue"] = 65535] = "ScopeAccountReadAllAccessValue"; - PermissionScopeValues[PermissionScopeValues["ScopeDatabaseReadAllAccessValue"] = 124] = "ScopeDatabaseReadAllAccessValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainersReadAllAccessValue"] = 96] = "ScopeContainersReadAllAccessValue"; - PermissionScopeValues[PermissionScopeValues["ScopeAccountWriteAllAccessValue"] = 65535] = "ScopeAccountWriteAllAccessValue"; - PermissionScopeValues[PermissionScopeValues["ScopeDatabaseWriteAllAccessValue"] = 508] = "ScopeDatabaseWriteAllAccessValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainersWriteAllAccessValue"] = 448] = "ScopeContainersWriteAllAccessValue"; + result, /** - * Values which set permission Scope applicable to data plane related operations. + * Gets the number of items returned in the response from Azure Cosmos DB */ - PermissionScopeValues[PermissionScopeValues["ScopeContainerExecuteQueriesValue"] = 1] = "ScopeContainerExecuteQueriesValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReadFeedsValue"] = 2] = "ScopeContainerReadFeedsValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReadStoredProceduresValue"] = 4] = "ScopeContainerReadStoredProceduresValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReadUserDefinedFunctionsValue"] = 8] = "ScopeContainerReadUserDefinedFunctionsValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReadTriggersValue"] = 16] = "ScopeContainerReadTriggersValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReadConflictsValue"] = 32] = "ScopeContainerReadConflictsValue"; - PermissionScopeValues[PermissionScopeValues["ScopeItemReadValue"] = 64] = "ScopeItemReadValue"; - PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureReadValue"] = 128] = "ScopeStoredProcedureReadValue"; - PermissionScopeValues[PermissionScopeValues["ScopeUserDefinedFunctionReadValue"] = 256] = "ScopeUserDefinedFunctionReadValue"; - PermissionScopeValues[PermissionScopeValues["ScopeTriggerReadValue"] = 512] = "ScopeTriggerReadValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateItemsValue"] = 1] = "ScopeContainerCreateItemsValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceItemsValue"] = 2] = "ScopeContainerReplaceItemsValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerUpsertItemsValue"] = 4] = "ScopeContainerUpsertItemsValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteItemsValue"] = 8] = "ScopeContainerDeleteItemsValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateStoredProceduresValue"] = 16] = "ScopeContainerCreateStoredProceduresValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceStoredProceduresValue"] = 32] = "ScopeContainerReplaceStoredProceduresValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteStoredProceduresValue"] = 64] = "ScopeContainerDeleteStoredProceduresValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerExecuteStoredProceduresValue"] = 128] = "ScopeContainerExecuteStoredProceduresValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateTriggersValue"] = 256] = "ScopeContainerCreateTriggersValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceTriggersValue"] = 512] = "ScopeContainerReplaceTriggersValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteTriggersValue"] = 1024] = "ScopeContainerDeleteTriggersValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerCreateUserDefinedFunctionsValue"] = 2048] = "ScopeContainerCreateUserDefinedFunctionsValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReplaceUserDefinedFunctionsValue"] = 4096] = "ScopeContainerReplaceUserDefinedFunctionsValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteUserDefinedFunctionSValue"] = 8192] = "ScopeContainerDeleteUserDefinedFunctionSValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerDeleteCONFLICTSValue"] = 16384] = "ScopeContainerDeleteCONFLICTSValue"; - PermissionScopeValues[PermissionScopeValues["ScopeItemReplaceValue"] = 65536] = "ScopeItemReplaceValue"; - PermissionScopeValues[PermissionScopeValues["ScopeItemUpsertValue"] = 131072] = "ScopeItemUpsertValue"; - PermissionScopeValues[PermissionScopeValues["ScopeItemDeleteValue"] = 262144] = "ScopeItemDeleteValue"; - PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureReplaceValue"] = 1048576] = "ScopeStoredProcedureReplaceValue"; - PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureDeleteValue"] = 2097152] = "ScopeStoredProcedureDeleteValue"; - PermissionScopeValues[PermissionScopeValues["ScopeStoredProcedureExecuteValue"] = 4194304] = "ScopeStoredProcedureExecuteValue"; - PermissionScopeValues[PermissionScopeValues["ScopeUserDefinedFunctionReplaceValue"] = 8388608] = "ScopeUserDefinedFunctionReplaceValue"; - PermissionScopeValues[PermissionScopeValues["ScopeUserDefinedFunctionDeleteValue"] = 16777216] = "ScopeUserDefinedFunctionDeleteValue"; - PermissionScopeValues[PermissionScopeValues["ScopeTriggerReplaceValue"] = 33554432] = "ScopeTriggerReplaceValue"; - PermissionScopeValues[PermissionScopeValues["ScopeTriggerDeleteValue"] = 67108864] = "ScopeTriggerDeleteValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerReadAllAccessValue"] = 4294967295] = "ScopeContainerReadAllAccessValue"; - PermissionScopeValues[PermissionScopeValues["ScopeItemReadAllAccessValue"] = 65] = "ScopeItemReadAllAccessValue"; - PermissionScopeValues[PermissionScopeValues["ScopeContainerWriteAllAccessValue"] = 4294967295] = "ScopeContainerWriteAllAccessValue"; - PermissionScopeValues[PermissionScopeValues["ScopeItemWriteAllAccessValue"] = 458767] = "ScopeItemWriteAllAccessValue"; - PermissionScopeValues[PermissionScopeValues["NoneValue"] = 0] = "NoneValue"; -})(PermissionScopeValues || (PermissionScopeValues = {})); -/** - * @hidden - */ -exports.SasTokenPermissionKind = void 0; -(function (SasTokenPermissionKind) { - SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateItems"] = 1] = "ContainerCreateItems"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceItems"] = 2] = "ContainerReplaceItems"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerUpsertItems"] = 4] = "ContainerUpsertItems"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteItems"] = 128] = "ContainerDeleteItems"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerExecuteQueries"] = 1] = "ContainerExecuteQueries"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadFeeds"] = 2] = "ContainerReadFeeds"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateStoreProcedure"] = 16] = "ContainerCreateStoreProcedure"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadStoreProcedure"] = 4] = "ContainerReadStoreProcedure"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceStoreProcedure"] = 32] = "ContainerReplaceStoreProcedure"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteStoreProcedure"] = 64] = "ContainerDeleteStoreProcedure"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateTriggers"] = 256] = "ContainerCreateTriggers"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadTriggers"] = 16] = "ContainerReadTriggers"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceTriggers"] = 512] = "ContainerReplaceTriggers"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteTriggers"] = 1024] = "ContainerDeleteTriggers"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerCreateUserDefinedFunctions"] = 2048] = "ContainerCreateUserDefinedFunctions"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadUserDefinedFunctions"] = 8] = "ContainerReadUserDefinedFunctions"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerReplaceUserDefinedFunctions"] = 4096] = "ContainerReplaceUserDefinedFunctions"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteUserDefinedFunctions"] = 8192] = "ContainerDeleteUserDefinedFunctions"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerExecuteStoredProcedure"] = 128] = "ContainerExecuteStoredProcedure"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadConflicts"] = 32] = "ContainerReadConflicts"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerDeleteConflicts"] = 16384] = "ContainerDeleteConflicts"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerReadAny"] = 64] = "ContainerReadAny"; - SasTokenPermissionKind[SasTokenPermissionKind["ContainerFullAccess"] = 4294967295] = "ContainerFullAccess"; - SasTokenPermissionKind[SasTokenPermissionKind["ItemReadAny"] = 65536] = "ItemReadAny"; - SasTokenPermissionKind[SasTokenPermissionKind["ItemFullAccess"] = 65] = "ItemFullAccess"; - SasTokenPermissionKind[SasTokenPermissionKind["ItemRead"] = 64] = "ItemRead"; - SasTokenPermissionKind[SasTokenPermissionKind["ItemReplace"] = 65536] = "ItemReplace"; - SasTokenPermissionKind[SasTokenPermissionKind["ItemUpsert"] = 131072] = "ItemUpsert"; - SasTokenPermissionKind[SasTokenPermissionKind["ItemDelete"] = 262144] = "ItemDelete"; - SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureRead"] = 128] = "StoreProcedureRead"; - SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureReplace"] = 1048576] = "StoreProcedureReplace"; - SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureDelete"] = 2097152] = "StoreProcedureDelete"; - SasTokenPermissionKind[SasTokenPermissionKind["StoreProcedureExecute"] = 4194304] = "StoreProcedureExecute"; - SasTokenPermissionKind[SasTokenPermissionKind["UserDefinedFuntionRead"] = 256] = "UserDefinedFuntionRead"; - SasTokenPermissionKind[SasTokenPermissionKind["UserDefinedFuntionReplace"] = 8388608] = "UserDefinedFuntionReplace"; - SasTokenPermissionKind[SasTokenPermissionKind["UserDefinedFuntionDelete"] = 16777216] = "UserDefinedFuntionDelete"; - SasTokenPermissionKind[SasTokenPermissionKind["TriggerRead"] = 512] = "TriggerRead"; - SasTokenPermissionKind[SasTokenPermissionKind["TriggerReplace"] = 33554432] = "TriggerReplace"; - SasTokenPermissionKind[SasTokenPermissionKind["TriggerDelete"] = 67108864] = "TriggerDelete"; -})(exports.SasTokenPermissionKind || (exports.SasTokenPermissionKind = {})); - -const trimLeftSlashes = new RegExp("^[/]+"); -const trimRightSlashes = new RegExp("[/]+$"); -const illegalResourceIdCharacters = new RegExp("[/\\\\?#]"); -const illegalItemResourceIdCharacters = new RegExp("[/\\\\#]"); -/** @hidden */ -function jsonStringifyAndEscapeNonASCII(arg) { - // TODO: better way for this? Not sure. - // escapes non-ASCII characters as \uXXXX - return JSON.stringify(arg).replace(/[\u007F-\uFFFF]/g, (m) => { - return "\\u" + ("0000" + m.charCodeAt(0).toString(16)).slice(-4); - }); -} -/** - * @hidden - */ -function parseLink(resourcePath) { - if (resourcePath.length === 0) { - /* for DatabaseAccount case, both type and objectBody will be undefined. */ - return { - type: undefined, - objectBody: undefined, - }; + count, + /** + * Gets the status code of the response from Azure Cosmos DB + */ + statusCode, headers, diagnostics) { + this.result = result; + this.count = count; + this.statusCode = statusCode; + this.diagnostics = diagnostics; + this.headers = Object.freeze(headers); } - if (resourcePath[resourcePath.length - 1] !== "/") { - resourcePath = resourcePath + "/"; + /** + * Gets the request charge for this request from the Azure Cosmos DB service. + */ + get requestCharge() { + const rus = this.headers[Constants$1.HttpHeaders.RequestCharge]; + return rus ? parseInt(rus, 10) : null; } - if (resourcePath[0] !== "/") { - resourcePath = "/" + resourcePath; + /** + * Gets the activity ID for the request from the Azure Cosmos DB service. + */ + get activityId() { + return this.headers[Constants$1.HttpHeaders.ActivityId]; } - /* - The path will be in the form of /[resourceType]/[resourceId]/ .... - /[resourceType]//[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/ - or /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/[resourceType]/[resourceId]/ .... - /[resourceType]/[resourceId]/ - The result of split will be in the form of - [[[resourceType], [resourceId] ... ,[resourceType], [resourceId], ""] - In the first case, to extract the resourceId it will the element before last ( at length -2 ) - and the type will be before it ( at length -3 ) - In the second case, to extract the resource type it will the element before last ( at length -2 ) - */ - const pathParts = resourcePath.split("/"); - let id; - let type; - if (pathParts.length % 2 === 0) { - // request in form /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]. - id = pathParts[pathParts.length - 2]; - type = pathParts[pathParts.length - 3]; + /** + * Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service. + * + * This is equivalent to the `etag` property. + */ + get continuation() { + return this.etag; } - else { - // request in form /[resourceType]/[resourceId]/ .... /[resourceType]/. - id = pathParts[pathParts.length - 3]; - type = pathParts[pathParts.length - 2]; + /** + * Gets the session token for use in session consistency reads from the Azure Cosmos DB service. + */ + get sessionToken() { + return this.headers[Constants$1.HttpHeaders.SessionToken]; + } + /** + * Gets the entity tag associated with last transaction in the Azure Cosmos DB service, + * which can be used as If-Non-Match Access condition for ReadFeed REST request or + * `continuation` property of `ChangeFeedOptions` parameter for + * `Items.changeFeed()` + * to get feed changes since the transaction specified by this entity tag. + * + * This is equivalent to the `continuation` property. + */ + get etag() { + return this.headers[Constants$1.HttpHeaders.ETag]; } - const result = { - type, - objectBody: { - id, - self: resourcePath, - }, - }; - return result; -} -/** - * @hidden - */ -function isReadRequest(operationType) { - return operationType === exports.OperationType.Read || operationType === exports.OperationType.Query; -} -/** - * @hidden - */ -function sleep(time) { - return new Promise((resolve) => { - setTimeout(() => { - resolve(); - }, time); - }); -} -/** - * @hidden - */ -function getContainerLink(link) { - return link.split("/").slice(0, 4).join("/"); -} -/** - * @hidden - */ -function prepareURL(endpoint, path) { - return trimSlashes(endpoint) + path; -} -/** - * @hidden - */ -function trimSlashes(source) { - return source.replace(trimLeftSlashes, "").replace(trimRightSlashes, ""); } + /** - * @hidden + * Provides iterator for change feed. + * + * Use `Items.changeFeed()` to get an instance of the iterator. */ -function parsePath(path) { - const pathParts = []; - let currentIndex = 0; - const throwError = () => { - throw new Error("Path " + path + " is invalid at index " + currentIndex); - }; - const getEscapedToken = () => { - const quote = path[currentIndex]; - let newIndex = ++currentIndex; - for (;;) { - newIndex = path.indexOf(quote, newIndex); - if (newIndex === -1) { - throwError(); - } - if (path[newIndex - 1] !== "\\") { - break; - } - ++newIndex; +class ChangeFeedIterator { + /** + * @internal + */ + constructor(clientContext, resourceId, resourceLink, partitionKey, changeFeedOptions) { + this.clientContext = clientContext; + this.resourceId = resourceId; + this.resourceLink = resourceLink; + this.partitionKey = partitionKey; + this.changeFeedOptions = changeFeedOptions; + // partition key XOR partition key range id + const partitionKeyValid = partitionKey !== undefined; + this.isPartitionSpecified = partitionKeyValid; + let canUseStartFromBeginning = true; + if (changeFeedOptions.continuation) { + this.nextIfNoneMatch = changeFeedOptions.continuation; + canUseStartFromBeginning = false; } - const token = path.substr(currentIndex, newIndex - currentIndex); - currentIndex = newIndex + 1; - return token; - }; - const getToken = () => { - const newIndex = path.indexOf("/", currentIndex); - let token = null; - if (newIndex === -1) { - token = path.substr(currentIndex); - currentIndex = path.length; + if (changeFeedOptions.startTime) { + // .toUTCString() is platform specific, but most platforms use RFC 1123. + // In ECMAScript 2018, this was standardized to RFC 1123. + // See for more info: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString + this.ifModifiedSince = changeFeedOptions.startTime.toUTCString(); + canUseStartFromBeginning = false; } - else { - token = path.substr(currentIndex, newIndex - currentIndex); - currentIndex = newIndex; + if (canUseStartFromBeginning && !changeFeedOptions.startFromBeginning) { + this.nextIfNoneMatch = ChangeFeedIterator.IfNoneMatchAllHeaderValue; } - token = token.trim(); - return token; - }; - while (currentIndex < path.length) { - if (path[currentIndex] !== "/") { - throwError(); + } + /** + * Gets a value indicating whether there are potentially additional results that can be retrieved. + * + * Initially returns true. This value is set based on whether the last execution returned a continuation token. + * + * @returns Boolean value representing if whether there are potentially additional results that can be retrieved. + */ + get hasMoreResults() { + return this.lastStatusCode !== StatusCodes.NotModified; + } + /** + * Gets an async iterator which will yield pages of results from Azure Cosmos DB. + */ + getAsyncIterator() { + return tslib.__asyncGenerator(this, arguments, function* getAsyncIterator_1() { + do { + const result = yield tslib.__await(this.fetchNext()); + if (result.count > 0) { + yield yield tslib.__await(result); + } + } while (this.hasMoreResults); + }); + } + /** + * Read feed and retrieves the next page of results in Azure Cosmos DB. + */ + async fetchNext() { + return withDiagnostics(async (diagnosticNode) => { + const response = await this.getFeedResponse(diagnosticNode); + this.lastStatusCode = response.statusCode; + this.nextIfNoneMatch = response.headers[Constants$1.HttpHeaders.ETag]; + return response; + }, this.clientContext); + } + async getFeedResponse(diagnosticNode) { + if (!this.isPartitionSpecified) { + throw new Error("Container is partitioned, but no partition key or partition key range id was specified."); } - if (++currentIndex === path.length) { - break; + const feedOptions = { initialHeaders: {}, useIncrementalFeed: true }; + if (typeof this.changeFeedOptions.maxItemCount === "number") { + feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount; } - if (path[currentIndex] === '"' || path[currentIndex] === "'") { - pathParts.push(getEscapedToken()); + if (this.changeFeedOptions.sessionToken) { + feedOptions.sessionToken = this.changeFeedOptions.sessionToken; } - else { - pathParts.push(getToken()); + if (this.nextIfNoneMatch) { + feedOptions.accessCondition = { + type: Constants$1.HttpHeaders.IfNoneMatch, + condition: this.nextIfNoneMatch, + }; } + if (this.ifModifiedSince) { + feedOptions.initialHeaders[Constants$1.HttpHeaders.IfModifiedSince] = this.ifModifiedSince; + } + const response = await this.clientContext.queryFeed({ + path: this.resourceLink, + resourceType: exports.ResourceType.item, + resourceId: this.resourceId, + resultFn: (result) => (result ? result.Documents : []), + query: undefined, + options: feedOptions, + partitionKey: this.partitionKey, + diagnosticNode: diagnosticNode, + }); // TODO: some funky issues with query feed. Probably need to change it up. + return new ChangeFeedResponse(response.result, response.result ? response.result.length : 0, response.code, response.headers, getEmptyCosmosDiagnostics()); } - return pathParts; } -/** - * @hidden - */ -function isResourceValid(resource, err) { - // TODO: fix strictness issues so that caller contexts respects the types of the functions - if (resource.id) { - if (typeof resource.id !== "string") { - err.message = "Id must be a string."; - return false; - } - if (resource.id.indexOf("/") !== -1 || - resource.id.indexOf("\\") !== -1 || - resource.id.indexOf("?") !== -1 || - resource.id.indexOf("#") !== -1) { - err.message = "Id contains illegal chars."; - return false; - } - if (resource.id[resource.id.length - 1] === " ") { - err.message = "Id ends with a space."; - return false; +ChangeFeedIterator.IfNoneMatchAllHeaderValue = "*"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const BytePrefix = { + Undefined: "00", + Null: "01", + False: "02", + True: "03", + MinNumber: "04", + Number: "05", + MaxNumber: "06", + MinString: "07", + String: "08", + MaxString: "09", + Int64: "0a", + Int32: "0b", + Int16: "0c", + Int8: "0d", + Uint64: "0e", + Uint32: "0f", + Uint16: "10", + Uint8: "11", + Binary: "12", + Guid: "13", + Float: "14", + Infinity: "FF", +}; + +// Copyright (c) Microsoft Corporation. +function writeNumberForBinaryEncodingJSBI(hash) { + let payload = encodeNumberAsUInt64JSBI(hash); + let outputStream = Buffer.from(BytePrefix.Number, "hex"); + const firstChunk = JSBI__default["default"].asUintN(64, JSBI__default["default"].signedRightShift(payload, JSBI__default["default"].BigInt(56))); + outputStream = Buffer.concat([outputStream, Buffer.from(firstChunk.toString(16), "hex")]); + payload = JSBI__default["default"].asUintN(64, JSBI__default["default"].leftShift(JSBI__default["default"].BigInt(payload), JSBI__default["default"].BigInt(0x8))); + let byteToWrite = JSBI__default["default"].BigInt(0); + let shifted; + let padded; + do { + { + // we pad because after shifting because we will produce characters like "f" or similar, + // which cannot be encoded as hex in a buffer because they are invalid hex + // https://github.com/nodejs/node/issues/24491 + padded = byteToWrite.toString(16).padStart(2, "0"); + if (padded !== "00") { + outputStream = Buffer.concat([outputStream, Buffer.from(padded, "hex")]); + } } + shifted = JSBI__default["default"].asUintN(64, JSBI__default["default"].signedRightShift(payload, JSBI__default["default"].BigInt(56))); + byteToWrite = JSBI__default["default"].asUintN(64, JSBI__default["default"].bitwiseOr(shifted, JSBI__default["default"].BigInt(0x01))); + payload = JSBI__default["default"].asUintN(64, JSBI__default["default"].leftShift(payload, JSBI__default["default"].BigInt(7))); + } while (JSBI__default["default"].notEqual(payload, JSBI__default["default"].BigInt(0))); + const lastChunk = JSBI__default["default"].asUintN(64, JSBI__default["default"].bitwiseAnd(byteToWrite, JSBI__default["default"].BigInt(0xfe))); + // we pad because after shifting because we will produce characters like "f" or similar, + // which cannot be encoded as hex in a buffer because they are invalid hex + // https://github.com/nodejs/node/issues/24491 + padded = lastChunk.toString(16).padStart(2, "0"); + if (padded !== "00") { + outputStream = Buffer.concat([outputStream, Buffer.from(padded, "hex")]); } - return true; + return outputStream; } -/** - * @hidden - */ -function isItemResourceValid(resource, err) { - // TODO: fix strictness issues so that caller contexts respects the types of the functions - if (resource.id) { - if (typeof resource.id !== "string") { - err.message = "Id must be a string."; - return false; - } - if (resource.id.indexOf("/") !== -1 || - resource.id.indexOf("\\") !== -1 || - resource.id.indexOf("#") !== -1) { - err.message = "Id contains illegal chars."; - return false; - } +function encodeNumberAsUInt64JSBI(value) { + const rawValueBits = getRawBitsJSBI(value); + const mask = JSBI__default["default"].BigInt(0x8000000000000000); + const returned = rawValueBits < mask + ? JSBI__default["default"].bitwiseXor(rawValueBits, mask) + : JSBI__default["default"].add(JSBI__default["default"].bitwiseNot(rawValueBits), JSBI__default["default"].BigInt(1)); + return returned; +} +function doubleToByteArrayJSBI(double) { + const output = Buffer.alloc(8); + const lng = getRawBitsJSBI(double); + for (let i = 0; i < 8; i++) { + output[i] = JSBI__default["default"].toNumber(JSBI__default["default"].bitwiseAnd(JSBI__default["default"].signedRightShift(lng, JSBI__default["default"].multiply(JSBI__default["default"].BigInt(i), JSBI__default["default"].BigInt(8))), JSBI__default["default"].BigInt(0xff))); } - return true; + return output; } -/** @hidden */ -function getIdFromLink(resourceLink) { - resourceLink = trimSlashes(resourceLink); - return resourceLink; +function getRawBitsJSBI(value) { + const view = new DataView(new ArrayBuffer(8)); + view.setFloat64(0, value); + return JSBI__default["default"].BigInt(`0x${buf2hex(view.buffer)}`); } -/** @hidden */ -function getPathFromLink(resourceLink, resourceType) { - resourceLink = trimSlashes(resourceLink); - if (resourceType) { - return "/" + encodeURI(resourceLink) + "/" + resourceType; +function buf2hex(buffer) { + return Array.prototype.map + .call(new Uint8Array(buffer), (x) => ("00" + x.toString(16)).slice(-2)) + .join(""); +} + +// +----------------------------------------------------------------------+ +// | murmurHash3js.js v3.0.1 // https://github.com/pid/murmurHash3js +// | A javascript implementation of MurmurHash3's x86 hashing algorithms. | +// |----------------------------------------------------------------------| +// | Copyright (c) 2012-2015 Karan Lyons | +// | https://github.com/karanlyons/murmurHash3.js/blob/c1778f75792abef7bdd74bc85d2d4e1a3d25cfe9/murmurHash3.js | +// | Freely distributable under the MIT license. | +// +----------------------------------------------------------------------+ +// PRIVATE FUNCTIONS +// ----------------- +function _x86Multiply(m, n) { + // + // Given two 32bit ints, returns the two multiplied together as a + // 32bit int. + // + return (m & 0xffff) * n + ((((m >>> 16) * n) & 0xffff) << 16); +} +function _x86Rotl(m, n) { + // + // Given a 32bit int and an int representing a number of bit positions, + // returns the 32bit int rotated left by that number of positions. + // + return (m << n) | (m >>> (32 - n)); +} +function _x86Fmix(h) { + // + // Given a block, returns murmurHash3's final x86 mix of that block. + // + h ^= h >>> 16; + h = _x86Multiply(h, 0x85ebca6b); + h ^= h >>> 13; + h = _x86Multiply(h, 0xc2b2ae35); + h ^= h >>> 16; + return h; +} +function _x64Add(m, n) { + // + // Given two 64bit ints (as an array of two 32bit ints) returns the two + // added together as a 64bit int (as an array of two 32bit ints). + // + m = [m[0] >>> 16, m[0] & 0xffff, m[1] >>> 16, m[1] & 0xffff]; + n = [n[0] >>> 16, n[0] & 0xffff, n[1] >>> 16, n[1] & 0xffff]; + const o = [0, 0, 0, 0]; + o[3] += m[3] + n[3]; + o[2] += o[3] >>> 16; + o[3] &= 0xffff; + o[2] += m[2] + n[2]; + o[1] += o[2] >>> 16; + o[2] &= 0xffff; + o[1] += m[1] + n[1]; + o[0] += o[1] >>> 16; + o[1] &= 0xffff; + o[0] += m[0] + n[0]; + o[0] &= 0xffff; + return [(o[0] << 16) | o[1], (o[2] << 16) | o[3]]; +} +function _x64Multiply(m, n) { + // + // Given two 64bit ints (as an array of two 32bit ints) returns the two + // multiplied together as a 64bit int (as an array of two 32bit ints). + // + m = [m[0] >>> 16, m[0] & 0xffff, m[1] >>> 16, m[1] & 0xffff]; + n = [n[0] >>> 16, n[0] & 0xffff, n[1] >>> 16, n[1] & 0xffff]; + const o = [0, 0, 0, 0]; + o[3] += m[3] * n[3]; + o[2] += o[3] >>> 16; + o[3] &= 0xffff; + o[2] += m[2] * n[3]; + o[1] += o[2] >>> 16; + o[2] &= 0xffff; + o[2] += m[3] * n[2]; + o[1] += o[2] >>> 16; + o[2] &= 0xffff; + o[1] += m[1] * n[3]; + o[0] += o[1] >>> 16; + o[1] &= 0xffff; + o[1] += m[2] * n[2]; + o[0] += o[1] >>> 16; + o[1] &= 0xffff; + o[1] += m[3] * n[1]; + o[0] += o[1] >>> 16; + o[1] &= 0xffff; + o[0] += m[0] * n[3] + m[1] * n[2] + m[2] * n[1] + m[3] * n[0]; + o[0] &= 0xffff; + return [(o[0] << 16) | o[1], (o[2] << 16) | o[3]]; +} +function _x64Rotl(m, n) { + // + // Given a 64bit int (as an array of two 32bit ints) and an int + // representing a number of bit positions, returns the 64bit int (as an + // array of two 32bit ints) rotated left by that number of positions. + // + n %= 64; + if (n === 32) { + return [m[1], m[0]]; + } + else if (n < 32) { + return [(m[0] << n) | (m[1] >>> (32 - n)), (m[1] << n) | (m[0] >>> (32 - n))]; } else { - return "/" + encodeURI(resourceLink); + n -= 32; + return [(m[1] << n) | (m[0] >>> (32 - n)), (m[0] << n) | (m[1] >>> (32 - n))]; } } -/** - * @hidden - */ -function isStringNullOrEmpty(inputString) { - // checks whether string is null, undefined, empty or only contains space - return !inputString || /^\s*$/.test(inputString); +function _x64LeftShift(m, n) { + // + // Given a 64bit int (as an array of two 32bit ints) and an int + // representing a number of bit positions, returns the 64bit int (as an + // array of two 32bit ints) shifted left by that number of positions. + // + n %= 64; + if (n === 0) { + return m; + } + else if (n < 32) { + return [(m[0] << n) | (m[1] >>> (32 - n)), m[1] << n]; + } + else { + return [m[1] << (n - 32), 0]; + } } -/** - * @hidden - */ -function trimSlashFromLeftAndRight(inputString) { - if (typeof inputString !== "string") { - throw new Error("invalid input: input is not string"); +function _x64Xor(m, n) { + // + // Given two 64bit ints (as an array of two 32bit ints) returns the two + // xored together as a 64bit int (as an array of two 32bit ints). + // + return [m[0] ^ n[0], m[1] ^ n[1]]; +} +function _x64Fmix(h) { + // + // Given a block, returns murmurHash3's final x64 mix of that block. + // (`[0, h[0] >>> 1]` is a 33 bit unsigned right shift. This is the + // only place where we need to right shift 64bit ints.) + // + h = _x64Xor(h, [0, h[0] >>> 1]); + h = _x64Multiply(h, [0xff51afd7, 0xed558ccd]); + h = _x64Xor(h, [0, h[0] >>> 1]); + h = _x64Multiply(h, [0xc4ceb9fe, 0x1a85ec53]); + h = _x64Xor(h, [0, h[0] >>> 1]); + return h; +} +// PUBLIC FUNCTIONS +// ---------------- +function x86Hash32(bytes, seed) { + // + // Given a string and an optional seed as an int, returns a 32 bit hash + // using the x86 flavor of MurmurHash3, as an unsigned int. + // + seed = seed || 0; + const remainder = bytes.length % 4; + const blocks = bytes.length - remainder; + let h1 = seed; + let k1 = 0; + const c1 = 0xcc9e2d51; + const c2 = 0x1b873593; + let j = 0; + for (let i = 0; i < blocks; i = i + 4) { + k1 = bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24); + k1 = _x86Multiply(k1, c1); + k1 = _x86Rotl(k1, 15); + k1 = _x86Multiply(k1, c2); + h1 ^= k1; + h1 = _x86Rotl(h1, 13); + h1 = _x86Multiply(h1, 5) + 0xe6546b64; + j = i + 4; } - return inputString.replace(trimLeftSlashes, "").replace(trimRightSlashes, ""); + k1 = 0; + switch (remainder) { + case 3: + k1 ^= bytes[j + 2] << 16; + case 2: + k1 ^= bytes[j + 1] << 8; + case 1: + k1 ^= bytes[j]; + k1 = _x86Multiply(k1, c1); + k1 = _x86Rotl(k1, 15); + k1 = _x86Multiply(k1, c2); + h1 ^= k1; + } + h1 ^= bytes.length; + h1 = _x86Fmix(h1); + return h1 >>> 0; } -/** - * @hidden - */ -function validateResourceId(resourceId) { - // if resourceId is not a string or is empty throw an error - if (typeof resourceId !== "string" || isStringNullOrEmpty(resourceId)) { - throw new Error("Resource ID must be a string and cannot be undefined, null or empty"); +function x86Hash128(bytes, seed) { + // + // Given a string and an optional seed as an int, returns a 128 bit + // hash using the x86 flavor of MurmurHash3, as an unsigned hex. + // + seed = seed || 0; + const remainder = bytes.length % 16; + const blocks = bytes.length - remainder; + let h1 = seed; + let h2 = seed; + let h3 = seed; + let h4 = seed; + let k1 = 0; + let k2 = 0; + let k3 = 0; + let k4 = 0; + const c1 = 0x239b961b; + const c2 = 0xab0e9789; + const c3 = 0x38b34ae5; + const c4 = 0xa1e38b93; + let j = 0; + for (let i = 0; i < blocks; i = i + 16) { + k1 = bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24); + k2 = bytes[i + 4] | (bytes[i + 5] << 8) | (bytes[i + 6] << 16) | (bytes[i + 7] << 24); + k3 = bytes[i + 8] | (bytes[i + 9] << 8) | (bytes[i + 10] << 16) | (bytes[i + 11] << 24); + k4 = bytes[i + 12] | (bytes[i + 13] << 8) | (bytes[i + 14] << 16) | (bytes[i + 15] << 24); + k1 = _x86Multiply(k1, c1); + k1 = _x86Rotl(k1, 15); + k1 = _x86Multiply(k1, c2); + h1 ^= k1; + h1 = _x86Rotl(h1, 19); + h1 += h2; + h1 = _x86Multiply(h1, 5) + 0x561ccd1b; + k2 = _x86Multiply(k2, c2); + k2 = _x86Rotl(k2, 16); + k2 = _x86Multiply(k2, c3); + h2 ^= k2; + h2 = _x86Rotl(h2, 17); + h2 += h3; + h2 = _x86Multiply(h2, 5) + 0x0bcaa747; + k3 = _x86Multiply(k3, c3); + k3 = _x86Rotl(k3, 17); + k3 = _x86Multiply(k3, c4); + h3 ^= k3; + h3 = _x86Rotl(h3, 15); + h3 += h4; + h3 = _x86Multiply(h3, 5) + 0x96cd1c35; + k4 = _x86Multiply(k4, c4); + k4 = _x86Rotl(k4, 18); + k4 = _x86Multiply(k4, c1); + h4 ^= k4; + h4 = _x86Rotl(h4, 13); + h4 += h1; + h4 = _x86Multiply(h4, 5) + 0x32ac3b17; + j = i + 16; } - // if resource id contains illegal characters throw an error - if (illegalResourceIdCharacters.test(resourceId)) { - throw new Error("Illegal characters ['/', '\\', '#', '?'] cannot be used in Resource ID"); + k1 = 0; + k2 = 0; + k3 = 0; + k4 = 0; + switch (remainder) { + case 15: + k4 ^= bytes[j + 14] << 16; + case 14: + k4 ^= bytes[j + 13] << 8; + case 13: + k4 ^= bytes[j + 12]; + k4 = _x86Multiply(k4, c4); + k4 = _x86Rotl(k4, 18); + k4 = _x86Multiply(k4, c1); + h4 ^= k4; + case 12: + k3 ^= bytes[j + 11] << 24; + case 11: + k3 ^= bytes[j + 10] << 16; + case 10: + k3 ^= bytes[j + 9] << 8; + case 9: + k3 ^= bytes[j + 8]; + k3 = _x86Multiply(k3, c3); + k3 = _x86Rotl(k3, 17); + k3 = _x86Multiply(k3, c4); + h3 ^= k3; + case 8: + k2 ^= bytes[j + 7] << 24; + case 7: + k2 ^= bytes[j + 6] << 16; + case 6: + k2 ^= bytes[j + 5] << 8; + case 5: + k2 ^= bytes[j + 4]; + k2 = _x86Multiply(k2, c2); + k2 = _x86Rotl(k2, 16); + k2 = _x86Multiply(k2, c3); + h2 ^= k2; + case 4: + k1 ^= bytes[j + 3] << 24; + case 3: + k1 ^= bytes[j + 2] << 16; + case 2: + k1 ^= bytes[j + 1] << 8; + case 1: + k1 ^= bytes[j]; + k1 = _x86Multiply(k1, c1); + k1 = _x86Rotl(k1, 15); + k1 = _x86Multiply(k1, c2); + h1 ^= k1; } - return true; + h1 ^= bytes.length; + h2 ^= bytes.length; + h3 ^= bytes.length; + h4 ^= bytes.length; + h1 += h2; + h1 += h3; + h1 += h4; + h2 += h1; + h3 += h1; + h4 += h1; + h1 = _x86Fmix(h1); + h2 = _x86Fmix(h2); + h3 = _x86Fmix(h3); + h4 = _x86Fmix(h4); + h1 += h2; + h1 += h3; + h1 += h4; + h2 += h1; + h3 += h1; + h4 += h1; + return (("00000000" + (h1 >>> 0).toString(16)).slice(-8) + + ("00000000" + (h2 >>> 0).toString(16)).slice(-8) + + ("00000000" + (h3 >>> 0).toString(16)).slice(-8) + + ("00000000" + (h4 >>> 0).toString(16)).slice(-8)); } -/** - * @hidden - */ -function validateItemResourceId(resourceId) { - // if resourceId is not a string or is empty throw an error - if (typeof resourceId !== "string" || isStringNullOrEmpty(resourceId)) { - throw new Error("Resource ID must be a string and cannot be undefined, null or empty"); +function x64Hash128(bytes, seed) { + // + // Given a string and an optional seed as an int, returns a 128 bit + // hash using the x64 flavor of MurmurHash3, as an unsigned hex. + // + seed = seed || 0; + const remainder = bytes.length % 16; + const blocks = bytes.length - remainder; + let h1 = [0, seed]; + let h2 = [0, seed]; + let k1 = [0, 0]; + let k2 = [0, 0]; + const c1 = [0x87c37b91, 0x114253d5]; + const c2 = [0x4cf5ad43, 0x2745937f]; + let j = 0; + for (let i = 0; i < blocks; i = i + 16) { + k1 = [ + bytes[i + 4] | (bytes[i + 5] << 8) | (bytes[i + 6] << 16) | (bytes[i + 7] << 24), + bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24), + ]; + k2 = [ + bytes[i + 12] | (bytes[i + 13] << 8) | (bytes[i + 14] << 16) | (bytes[i + 15] << 24), + bytes[i + 8] | (bytes[i + 9] << 8) | (bytes[i + 10] << 16) | (bytes[i + 11] << 24), + ]; + k1 = _x64Multiply(k1, c1); + k1 = _x64Rotl(k1, 31); + k1 = _x64Multiply(k1, c2); + h1 = _x64Xor(h1, k1); + h1 = _x64Rotl(h1, 27); + h1 = _x64Add(h1, h2); + h1 = _x64Add(_x64Multiply(h1, [0, 5]), [0, 0x52dce729]); + k2 = _x64Multiply(k2, c2); + k2 = _x64Rotl(k2, 33); + k2 = _x64Multiply(k2, c1); + h2 = _x64Xor(h2, k2); + h2 = _x64Rotl(h2, 31); + h2 = _x64Add(h2, h1); + h2 = _x64Add(_x64Multiply(h2, [0, 5]), [0, 0x38495ab5]); + j = i + 16; } - // if resource id contains illegal characters throw an error - if (illegalItemResourceIdCharacters.test(resourceId)) { - throw new Error("Illegal characters ['/', '\\', '#'] cannot be used in Resource ID"); + k1 = [0, 0]; + k2 = [0, 0]; + switch (remainder) { + case 15: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 14]], 48)); + case 14: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 13]], 40)); + case 13: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 12]], 32)); + case 12: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 11]], 24)); + case 11: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 10]], 16)); + case 10: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 9]], 8)); + case 9: + k2 = _x64Xor(k2, [0, bytes[j + 8]]); + k2 = _x64Multiply(k2, c2); + k2 = _x64Rotl(k2, 33); + k2 = _x64Multiply(k2, c1); + h2 = _x64Xor(h2, k2); + case 8: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 7]], 56)); + case 7: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 6]], 48)); + case 6: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 5]], 40)); + case 5: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 4]], 32)); + case 4: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 3]], 24)); + case 3: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 2]], 16)); + case 2: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 1]], 8)); + case 1: + k1 = _x64Xor(k1, [0, bytes[j]]); + k1 = _x64Multiply(k1, c1); + k1 = _x64Rotl(k1, 31); + k1 = _x64Multiply(k1, c2); + h1 = _x64Xor(h1, k1); } - return true; + h1 = _x64Xor(h1, [0, bytes.length]); + h2 = _x64Xor(h2, [0, bytes.length]); + h1 = _x64Add(h1, h2); + h2 = _x64Add(h2, h1); + h1 = _x64Fmix(h1); + h2 = _x64Fmix(h2); + h1 = _x64Add(h1, h2); + h2 = _x64Add(h2, h1); + // Here we reverse h1 and h2 in Cosmos + // This is an implementation detail and not part of the public spec + const h1Buff = Buffer.from(("00000000" + (h1[0] >>> 0).toString(16)).slice(-8) + + ("00000000" + (h1[1] >>> 0).toString(16)).slice(-8), "hex"); + const h1Reversed = reverse$1(h1Buff).toString("hex"); + const h2Buff = Buffer.from(("00000000" + (h2[0] >>> 0).toString(16)).slice(-8) + + ("00000000" + (h2[1] >>> 0).toString(16)).slice(-8), "hex"); + const h2Reversed = reverse$1(h2Buff).toString("hex"); + return h1Reversed + h2Reversed; } -/** - * @hidden - */ -function getResourceIdFromPath(resourcePath) { - if (!resourcePath || typeof resourcePath !== "string") { - return null; +function reverse$1(buff) { + const buffer = Buffer.allocUnsafe(buff.length); + for (let i = 0, j = buff.length - 1; i <= j; ++i, --j) { + buffer[i] = buff[j]; + buffer[j] = buff[i]; } - const trimmedPath = trimSlashFromLeftAndRight(resourcePath); - const pathSegments = trimmedPath.split("/"); - // number of segments of a path must always be even - if (pathSegments.length % 2 !== 0) { - return null; + return buffer; +} +var MurmurHash = { + version: "3.0.0", + x86: { + hash32: x86Hash32, + hash128: x86Hash128, + }, + x64: { + hash128: x64Hash128, + }, + inputValidation: true, +}; + +// Copyright (c) Microsoft Corporation. +function hashV2PartitionKey(partitionKey) { + const toHash = Buffer.concat(partitionKey.map(prefixKeyByType$1)); + const hash = MurmurHash.x64.hash128(toHash); + const reverseBuff = reverse(Buffer.from(hash, "hex")); + reverseBuff[0] &= 0x3f; + return reverseBuff.toString("hex").toUpperCase(); +} +function prefixKeyByType$1(key) { + let bytes; + switch (typeof key) { + case "string": { + bytes = Buffer.concat([ + Buffer.from(BytePrefix.String, "hex"), + Buffer.from(key), + Buffer.from(BytePrefix.Infinity, "hex"), + ]); + return bytes; + } + case "number": { + const numberBytes = doubleToByteArrayJSBI(key); + bytes = Buffer.concat([Buffer.from(BytePrefix.Number, "hex"), numberBytes]); + return bytes; + } + case "boolean": { + const prefix = key ? BytePrefix.True : BytePrefix.False; + return Buffer.from(prefix, "hex"); + } + case "object": { + if (key === null) { + return Buffer.from(BytePrefix.Null, "hex"); + } + return Buffer.from(BytePrefix.Undefined, "hex"); + } + case "undefined": { + return Buffer.from(BytePrefix.Undefined, "hex"); + } + default: + throw new Error(`Unexpected type: ${typeof key}`); } - return pathSegments[pathSegments.length - 1]; } +function reverse(buff) { + const buffer = Buffer.allocUnsafe(buff.length); + for (let i = 0, j = buff.length - 1; i <= j; ++i, --j) { + buffer[i] = buff[j]; + buffer[j] = buff[i]; + } + return buffer; +} + /** - * @hidden + * Generate Hash for a `Multi Hash` type partition. + * @param partitionKey - to be hashed. + * @returns */ -function parseConnectionString(connectionString) { - const keyValueStrings = connectionString.split(";"); - const { AccountEndpoint, AccountKey } = keyValueStrings.reduce((connectionObject, keyValueString) => { - const [key, ...value] = keyValueString.split("="); - connectionObject[key] = value.join("="); - return connectionObject; - }, {}); - if (!AccountEndpoint || !AccountKey) { - throw new Error("Could not parse the provided connection string"); +function hashMultiHashPartitionKey(partitionKey) { + return partitionKey.map((keys) => hashV2PartitionKey([keys])).join(""); +} + +// Copyright (c) Microsoft Corporation. +function writeStringForBinaryEncoding(payload) { + let outputStream = Buffer.from(BytePrefix.String, "hex"); + const MAX_STRING_BYTES_TO_APPEND = 100; + const byteArray = [...Buffer.from(payload)]; + const isShortString = payload.length <= MAX_STRING_BYTES_TO_APPEND; + for (let index = 0; index < (isShortString ? byteArray.length : MAX_STRING_BYTES_TO_APPEND + 1); index++) { + let charByte = byteArray[index]; + if (charByte < 0xff) { + charByte++; + } + outputStream = Buffer.concat([outputStream, Buffer.from(charByte.toString(16), "hex")]); } - return { - endpoint: AccountEndpoint, - key: AccountKey, - }; + if (isShortString) { + outputStream = Buffer.concat([outputStream, Buffer.from(BytePrefix.Undefined, "hex")]); + } + return outputStream; } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * @hidden - */ -const StatusCodes = { - // Success - Ok: 200, - Created: 201, - Accepted: 202, - NoContent: 204, - NotModified: 304, - // Client error - BadRequest: 400, - Unauthorized: 401, - Forbidden: 403, - NotFound: 404, - MethodNotAllowed: 405, - RequestTimeout: 408, - Conflict: 409, - Gone: 410, - PreconditionFailed: 412, - RequestEntityTooLarge: 413, - TooManyRequests: 429, - RetryWith: 449, - // Server Error - InternalServerError: 500, - ServiceUnavailable: 503, - // System codes - ENOTFOUND: "ENOTFOUND", - // Operation pause and cancel. These are FAKE status codes for QOS logging purpose only. - OperationPaused: 1200, - OperationCancelled: 1201, -}; -/** - * @hidden - */ -const SubStatusCodes = { - Unknown: 0, - // 400: Bad Request Substatus - CrossPartitionQueryNotServable: 1004, - // 410: StatusCodeType_Gone: substatus - PartitionKeyRangeGone: 1002, - CompletingSplit: 1007, - // 404: NotFound Substatus - ReadSessionNotAvailable: 1002, - // 403: Forbidden Substatus - WriteForbidden: 3, - DatabaseAccountNotFound: 1008, -}; - -// Copyright (c) Microsoft Corporation. -/** - * Would be used when creating or deleting a DocumentCollection - * or a User in Azure Cosmos DB database service - * @hidden - * Given a database id, this creates a database link. - * @param databaseId - The database id - * @returns A database link in the format of `dbs/{0}` - * with `{0}` being a Uri escaped version of the databaseId - */ -function createDatabaseUri(databaseId) { - databaseId = trimSlashFromLeftAndRight(databaseId); - validateResourceId(databaseId); - return Constants$1.Path.DatabasesPathSegment + "/" + databaseId; -} -/** - * Given a database and collection id, this creates a collection link. - * Would be used when updating or deleting a DocumentCollection, creating a - * Document, a StoredProcedure, a Trigger, a UserDefinedFunction, or when executing a query - * with CreateDocumentQuery in Azure Cosmos DB database service. - * @param databaseId - The database id - * @param collectionId - The collection id - * @returns A collection link in the format of `dbs/{0}/colls/{1}` - * with `{0}` being a Uri escaped version of the databaseId and `{1}` being collectionId - * @hidden - */ -function createDocumentCollectionUri(databaseId, collectionId) { - collectionId = trimSlashFromLeftAndRight(collectionId); - validateResourceId(collectionId); - return (createDatabaseUri(databaseId) + "/" + Constants$1.Path.CollectionsPathSegment + "/" + collectionId); -} -/** - * Given a database and user id, this creates a user link. - * Would be used when creating a Permission, or when replacing or deleting - * a User in Azure Cosmos DB database service - * @param databaseId - The database id - * @param userId - The user id - * @returns A user link in the format of `dbs/{0}/users/{1}` - * with `{0}` being a Uri escaped version of the databaseId and `{1}` being userId - * @hidden - */ -function createUserUri(databaseId, userId) { - userId = trimSlashFromLeftAndRight(userId); - validateResourceId(userId); - return createDatabaseUri(databaseId) + "/" + Constants$1.Path.UsersPathSegment + "/" + userId; -} -/** - * Given a database and collection id, this creates a collection link. - * Would be used when creating an Attachment, or when replacing - * or deleting a Document in Azure Cosmos DB database service - * @param databaseId - The database id - * @param collectionId - The collection id - * @param documentId - The document id - * @returns A document link in the format of - * `dbs/{0}/colls/{1}/docs/{2}` with `{0}` being a Uri escaped version of - * the databaseId, `{1}` being collectionId and `{2}` being the documentId - * @hidden - */ -function createDocumentUri(databaseId, collectionId, documentId) { - documentId = trimSlashFromLeftAndRight(documentId); - validateItemResourceId(documentId); - return (createDocumentCollectionUri(databaseId, collectionId) + - "/" + - Constants$1.Path.DocumentsPathSegment + - "/" + - documentId); -} -/** - * Given a database, collection and document id, this creates a document link. - * Would be used when replacing or deleting a Permission in Azure Cosmos DB database service. - * @param databaseId -The database Id - * @param userId -The user Id - * @param permissionId - The permissionId - * @returns A permission link in the format of `dbs/{0}/users/{1}/permissions/{2}` - * with `{0}` being a Uri escaped version of the databaseId, `{1}` being userId and `{2}` being permissionId - * @hidden - */ -function createPermissionUri(databaseId, userId, permissionId) { - permissionId = trimSlashFromLeftAndRight(permissionId); - validateResourceId(permissionId); - return (createUserUri(databaseId, userId) + - "/" + - Constants$1.Path.PermissionsPathSegment + - "/" + - permissionId); -} -/** - * Given a database, collection and stored proc id, this creates a stored proc link. - * Would be used when replacing, executing, or deleting a StoredProcedure in - * Azure Cosmos DB database service. - * @param databaseId -The database Id - * @param collectionId -The collection Id - * @param storedProcedureId -The stored procedure Id - * @returns A stored procedure link in the format of - * `dbs/{0}/colls/{1}/sprocs/{2}` with `{0}` being a Uri escaped version of the databaseId, - * `{1}` being collectionId and `{2}` being the storedProcedureId - * @hidden - */ -function createStoredProcedureUri(databaseId, collectionId, storedProcedureId) { - storedProcedureId = trimSlashFromLeftAndRight(storedProcedureId); - validateResourceId(storedProcedureId); - return (createDocumentCollectionUri(databaseId, collectionId) + - "/" + - Constants$1.Path.StoredProceduresPathSegment + - "/" + - storedProcedureId); -} -/** - * Given a database, collection and trigger id, this creates a trigger link. - * Would be used when replacing, executing, or deleting a Trigger in Azure Cosmos DB database service - * @param databaseId -The database Id - * @param collectionId -The collection Id - * @param triggerId -The trigger Id - * @returns A trigger link in the format of - * `dbs/{0}/colls/{1}/triggers/{2}` with `{0}` being a Uri escaped version of the databaseId, - * `{1}` being collectionId and `{2}` being the triggerId - * @hidden - */ -function createTriggerUri(databaseId, collectionId, triggerId) { - triggerId = trimSlashFromLeftAndRight(triggerId); - validateResourceId(triggerId); - return (createDocumentCollectionUri(databaseId, collectionId) + - "/" + - Constants$1.Path.TriggersPathSegment + - "/" + - triggerId); -} -/** - * Given a database, collection and udf id, this creates a udf link. - * Would be used when replacing, executing, or deleting a UserDefinedFunction in - * Azure Cosmos DB database service - * @param databaseId -The database Id - * @param collectionId -The collection Id - * @param udfId -The User Defined Function Id - * @returns A udf link in the format of `dbs/{0}/colls/{1}/udfs/{2}` - * with `{0}` being a Uri escaped version of the databaseId, `{1}` being collectionId and `{2}` being the udfId - * @hidden - */ -function createUserDefinedFunctionUri(databaseId, collectionId, udfId) { - udfId = trimSlashFromLeftAndRight(udfId); - validateResourceId(udfId); - return (createDocumentCollectionUri(databaseId, collectionId) + - "/" + - Constants$1.Path.UserDefinedFunctionsPathSegment + - "/" + - udfId); -} - -// Copyright (c) Microsoft Corporation. -async function hmac(key, message) { - return crypto.createHmac("sha256", Buffer.from(key, "base64")).update(message).digest("base64"); -} - -// Copyright (c) Microsoft Corporation. -async function generateHeaders(masterKey, method, resourceType = exports.ResourceType.none, resourceId = "", date = new Date()) { - if (masterKey.startsWith("type=sas&")) { - return { - [Constants$1.HttpHeaders.Authorization]: encodeURIComponent(masterKey), - [Constants$1.HttpHeaders.XDate]: date.toUTCString(), - }; - } - const sig = await signature(masterKey, method, resourceType, resourceId, date); - return { - [Constants$1.HttpHeaders.Authorization]: sig, - [Constants$1.HttpHeaders.XDate]: date.toUTCString(), - }; -} -async function signature(masterKey, method, resourceType, resourceId = "", date = new Date()) { - const type = "master"; - const version = "1.0"; - const text = method.toLowerCase() + - "\n" + - resourceType.toLowerCase() + - "\n" + - resourceId + - "\n" + - date.toUTCString().toLowerCase() + - "\n" + - "" + - "\n"; - const signed = await hmac(masterKey, text); - return encodeURIComponent("type=" + type + "&ver=" + version + "&sig=" + signed); +const MAX_STRING_CHARS = 100; +function hashV1PartitionKey(partitionKey) { + const key = partitionKey[0]; + const toHash = prefixKeyByType(key); + const hash = MurmurHash.x86.hash32(toHash); + const encodedJSBI = writeNumberForBinaryEncodingJSBI(hash); + const encodedValue = encodeByType(key); + const finalHash = Buffer.concat([encodedJSBI, encodedValue]).toString("hex").toUpperCase(); + return finalHash; } - -// Copyright (c) Microsoft Corporation. -/** - * @hidden - */ -async function setAuthorizationHeader(clientOptions, verb, path, resourceId, resourceType, headers) { - if (clientOptions.permissionFeed) { - clientOptions.resourceTokens = {}; - for (const permission of clientOptions.permissionFeed) { - const id = getResourceIdFromPath(permission.resource); - if (!id) { - throw new Error(`authorization error: ${id} \ - is an invalid resourceId in permissionFeed`); +function prefixKeyByType(key) { + let bytes; + switch (typeof key) { + case "string": { + const truncated = key.substr(0, MAX_STRING_CHARS); + bytes = Buffer.concat([ + Buffer.from(BytePrefix.String, "hex"), + Buffer.from(truncated), + Buffer.from(BytePrefix.Undefined, "hex"), + ]); + return bytes; + } + case "number": { + const numberBytes = doubleToByteArrayJSBI(key); + bytes = Buffer.concat([Buffer.from(BytePrefix.Number, "hex"), numberBytes]); + return bytes; + } + case "boolean": { + const prefix = key ? BytePrefix.True : BytePrefix.False; + return Buffer.from(prefix, "hex"); + } + case "object": { + if (key === null) { + return Buffer.from(BytePrefix.Null, "hex"); } - clientOptions.resourceTokens[id] = permission._token; // TODO: any + return Buffer.from(BytePrefix.Undefined, "hex"); } + case "undefined": { + return Buffer.from(BytePrefix.Undefined, "hex"); + } + default: + throw new Error(`Unexpected type: ${typeof key}`); } - if (clientOptions.key) { - await setAuthorizationTokenHeaderUsingMasterKey(verb, resourceId, resourceType, headers, clientOptions.key); - } - else if (clientOptions.resourceTokens) { - headers[Constants$1.HttpHeaders.Authorization] = encodeURIComponent(getAuthorizationTokenUsingResourceTokens(clientOptions.resourceTokens, path, resourceId)); - } - else if (clientOptions.tokenProvider) { - headers[Constants$1.HttpHeaders.Authorization] = encodeURIComponent(await clientOptions.tokenProvider({ verb, path, resourceId, resourceType, headers })); - } -} -/** - * The default function for setting header token using the masterKey - * @hidden - */ -async function setAuthorizationTokenHeaderUsingMasterKey(verb, resourceId, resourceType, headers, masterKey) { - // TODO This should live in cosmos-sign - if (resourceType === exports.ResourceType.offer) { - resourceId = resourceId && resourceId.toLowerCase(); - } - headers = Object.assign(headers, await generateHeaders(masterKey, verb, resourceType, resourceId)); } -/** - * @hidden - */ -// TODO: Resource tokens -function getAuthorizationTokenUsingResourceTokens(resourceTokens, path, resourceId) { - if (resourceTokens && Object.keys(resourceTokens).length > 0) { - // For database account access(through getDatabaseAccount API), path and resourceId are "", - // so in this case we return the first token to be used for creating the auth header as the - // service will accept any token in this case - if (!path && !resourceId) { - return resourceTokens[Object.keys(resourceTokens)[0]]; - } - // If we have exact resource token for the path use it - if (resourceId && resourceTokens[resourceId]) { - return resourceTokens[resourceId]; +function encodeByType(key) { + switch (typeof key) { + case "string": { + const truncated = key.substr(0, MAX_STRING_CHARS); + return writeStringForBinaryEncoding(truncated); } - // minimum valid path /dbs - if (!path || path.length < 4) { - // TODO: This should throw an error - return null; + case "number": { + const encodedJSBI = writeNumberForBinaryEncodingJSBI(key); + return encodedJSBI; } - path = trimSlashFromLeftAndRight(path); - const pathSegments = (path && path.split("/")) || []; - // Item path - if (pathSegments.length === 6) { - // Look for a container token matching the item path - const containerPath = pathSegments.slice(0, 4).map(decodeURIComponent).join("/"); - if (resourceTokens[containerPath]) { - return resourceTokens[containerPath]; - } + case "boolean": { + const prefix = key ? BytePrefix.True : BytePrefix.False; + return Buffer.from(prefix, "hex"); } - // TODO remove in v4: This is legacy behavior that lets someone use a resource token pointing ONLY at an ID - // It was used when _rid was exposed by the SDK, but now that we are using user provided ids it is not needed - // However removing it now would be a breaking change - // if it's an incomplete path like /dbs/db1/colls/, start from the parent resource - let index = pathSegments.length % 2 === 0 ? pathSegments.length - 1 : pathSegments.length - 2; - for (; index > 0; index -= 2) { - const id = decodeURI(pathSegments[index]); - if (resourceTokens[id]) { - return resourceTokens[id]; + case "object": + if (key === null) { + return Buffer.from(BytePrefix.Null, "hex"); } - } + return Buffer.from(BytePrefix.Undefined, "hex"); + case "undefined": + return Buffer.from(BytePrefix.Undefined, "hex"); + default: + throw new Error(`Unexpected type: ${typeof key}`); } - // TODO: This should throw an error - return null; } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** Determines the connection behavior of the CosmosClient. Note, we currently only support Gateway Mode. */ -exports.ConnectionMode = void 0; -(function (ConnectionMode) { - /** Gateway mode talks to an intermediate gateway which handles the direct communication with your individual partitions. */ - ConnectionMode[ConnectionMode["Gateway"] = 0] = "Gateway"; -})(exports.ConnectionMode || (exports.ConnectionMode = {})); - /** - * @hidden + * Generate hash of a PartitonKey based on it PartitionKeyDefinition. + * @param partitionKey - to be hashed. + * @param partitionDefinition - container's partitionKey definition + * @returns */ -const defaultConnectionPolicy = Object.freeze({ - connectionMode: exports.ConnectionMode.Gateway, - requestTimeout: 60000, - enableEndpointDiscovery: true, - preferredLocations: [], - retryOptions: { - maxRetryAttemptCount: 9, - fixedRetryIntervalInMilliseconds: 0, - maxWaitTimeInSeconds: 30, - }, - useMultipleWriteLocations: true, - endpointRefreshRateInMs: 300000, - enableBackgroundEndpointRefreshing: true, -}); +function hashPartitionKey(partitionKey, partitionDefinition) { + const kind = (partitionDefinition === null || partitionDefinition === void 0 ? void 0 : partitionDefinition.kind) || exports.PartitionKeyKind.Hash; // Default value. + const isV2 = partitionDefinition && + partitionDefinition.version && + partitionDefinition.version === exports.PartitionKeyDefinitionVersion.V2; + switch (kind) { + case exports.PartitionKeyKind.Hash: + return isV2 ? hashV2PartitionKey(partitionKey) : hashV1PartitionKey(partitionKey); + case exports.PartitionKeyKind.MultiHash: + return hashMultiHashPartitionKey(partitionKey); + } +} // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Represents the consistency levels supported for Azure Cosmos DB client operations.
- * The requested ConsistencyLevel must match or be weaker than that provisioned for the database account. - * Consistency levels. - * - * Consistency levels by order of strength are Strong, BoundedStaleness, Session, Consistent Prefix, and Eventual. - * - * See https://aka.ms/cosmos-consistency for more detailed documentation on Consistency Levels. + * @internal + * FeedRange for which change feed is being requested. */ -exports.ConsistencyLevel = void 0; -(function (ConsistencyLevel) { +class ChangeFeedRange { + constructor(minInclusive, maxExclusive, continuationToken, epkMinHeader, epkMaxHeader) { + this.minInclusive = minInclusive; + this.maxExclusive = maxExclusive; + this.continuationToken = continuationToken; + this.epkMinHeader = epkMinHeader; + this.epkMaxHeader = epkMaxHeader; + } +} + +/** + * A single response page from the Azure Cosmos DB Change Feed + */ +class ChangeFeedIteratorResponse { /** - * Strong Consistency guarantees that read operations always return the value that was last written. + * @internal */ - ConsistencyLevel["Strong"] = "Strong"; + constructor( /** - * Bounded Staleness guarantees that reads are not too out-of-date. - * This can be configured based on number of operations (MaxStalenessPrefix) or time (MaxStalenessIntervalInSeconds). + * Gets the items returned in the response from Azure Cosmos DB */ - ConsistencyLevel["BoundedStaleness"] = "BoundedStaleness"; + result, /** - * Session Consistency guarantees monotonic reads (you never read old data, then new, then old again), - * monotonic writes (writes are ordered) and read your writes (your writes are immediately visible to your reads) - * within any single session. + * Gets the number of items returned in the response from Azure Cosmos DB */ - ConsistencyLevel["Session"] = "Session"; + count, /** - * Eventual Consistency guarantees that reads will return a subset of writes. - * All writes will be eventually be available for reads. + * Gets the status code of the response from Azure Cosmos DB */ - ConsistencyLevel["Eventual"] = "Eventual"; + statusCode, /** - * ConsistentPrefix Consistency guarantees that reads will return some prefix of all writes with no gaps. - * All writes will be eventually be available for reads. + * Headers related to cosmos DB and change feed. */ - ConsistencyLevel["ConsistentPrefix"] = "ConsistentPrefix"; -})(exports.ConsistencyLevel || (exports.ConsistencyLevel = {})); - -// Copyright (c) Microsoft Corporation. -/** - * Represents a DatabaseAccount in the Azure Cosmos DB database service. - */ -class DatabaseAccount { + headers, /** - * The self-link for Databases in the databaseAccount. - * @deprecated Use `databasesLink` + * Cosmos Diagnostic Object. */ - get DatabasesLink() { - return this.databasesLink; - } + diagnostics, /** - * The self-link for Media in the databaseAccount. - * @deprecated Use `mediaLink` + * Gets the subStatusCodes of the response from Azure Cosmos DB. Useful in partition split or partition gone. */ - get MediaLink() { - return this.mediaLink; + subStatusCode) { + this.result = result; + this.count = count; + this.statusCode = statusCode; + this.diagnostics = diagnostics; + this.subStatusCode = subStatusCode; + this.headers = headers; } /** - * Attachment content (media) storage quota in MBs ( Retrieved from gateway ). - * @deprecated use `maxMediaStorageUsageInMB` + * Gets the request charge for this request from the Azure Cosmos DB service. */ - get MaxMediaStorageUsageInMB() { - return this.maxMediaStorageUsageInMB; + get requestCharge() { + const rus = this.headers[Constants$1.HttpHeaders.RequestCharge]; + return rus ? parseInt(rus, 10) : null; } /** - * Current attachment content (media) usage in MBs (Retrieved from gateway ) - * - * Value is returned from cached information updated periodically and is not guaranteed - * to be real time. - * - * @deprecated use `currentMediaStorageUsageInMB` + * Gets the activity ID for the request from the Azure Cosmos DB service. */ - get CurrentMediaStorageUsageInMB() { - return this.currentMediaStorageUsageInMB; + get activityId() { + return this.headers[Constants$1.HttpHeaders.ActivityId]; } /** - * Gets the UserConsistencyPolicy settings. - * @deprecated use `consistencyPolicy` + * Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service. */ - get ConsistencyPolicy() { - return this.consistencyPolicy; - } - // TODO: body - any - constructor(body, headers) { - /** The list of writable locations for a geo-replicated database account. */ - this.writableLocations = []; - /** The list of readable locations for a geo-replicated database account. */ - this.readableLocations = []; - this.databasesLink = "/dbs/"; - this.mediaLink = "/media/"; - this.maxMediaStorageUsageInMB = headers[Constants$1.HttpHeaders.MaxMediaStorageUsageInMB]; - this.currentMediaStorageUsageInMB = headers[Constants$1.HttpHeaders.CurrentMediaStorageUsageInMB]; - this.consistencyPolicy = body.userConsistencyPolicy - ? body.userConsistencyPolicy.defaultConsistencyLevel - : exports.ConsistencyLevel.Session; - if (body[Constants$1.WritableLocations] && body.id !== "localhost") { - this.writableLocations = body[Constants$1.WritableLocations]; - } - if (body[Constants$1.ReadableLocations] && body.id !== "localhost") { - this.readableLocations = body[Constants$1.ReadableLocations]; - } - if (body[Constants$1.ENABLE_MULTIPLE_WRITABLE_LOCATIONS]) { - this.enableMultipleWritableLocations = - body[Constants$1.ENABLE_MULTIPLE_WRITABLE_LOCATIONS] === true || - body[Constants$1.ENABLE_MULTIPLE_WRITABLE_LOCATIONS] === "true"; - } + get continuationToken() { + return this.headers[Constants$1.HttpHeaders.ContinuationToken]; } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** Defines a target data type of an index path specification in the Azure Cosmos DB service. */ -exports.DataType = void 0; -(function (DataType) { - /** Represents a numeric data type. */ - DataType["Number"] = "Number"; - /** Represents a string data type. */ - DataType["String"] = "String"; - /** Represents a point data type. */ - DataType["Point"] = "Point"; - /** Represents a line string data type. */ - DataType["LineString"] = "LineString"; - /** Represents a polygon data type. */ - DataType["Polygon"] = "Polygon"; - /** Represents a multi-polygon data type. */ - DataType["MultiPolygon"] = "MultiPolygon"; -})(exports.DataType || (exports.DataType = {})); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Specifies the supported indexing modes. - */ -exports.IndexingMode = void 0; -(function (IndexingMode) { - /** - * Index is updated synchronously with a create or update operation. - * - * With consistent indexing, query behavior is the same as the default consistency level for the container. - * The index is always kept up to date with the data. - */ - IndexingMode["consistent"] = "consistent"; - /** - * Index is updated asynchronously with respect to a create or update operation. - * - * With lazy indexing, queries are eventually consistent. The index is updated when the container is idle. - */ - IndexingMode["lazy"] = "lazy"; - /** No Index is provided. */ - IndexingMode["none"] = "none"; -})(exports.IndexingMode || (exports.IndexingMode = {})); - -/* The target data type of a spatial path */ -exports.SpatialType = void 0; -(function (SpatialType) { - SpatialType["LineString"] = "LineString"; - SpatialType["MultiPolygon"] = "MultiPolygon"; - SpatialType["Point"] = "Point"; - SpatialType["Polygon"] = "Polygon"; -})(exports.SpatialType || (exports.SpatialType = {})); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Specifies the supported Index types. - */ -exports.IndexKind = void 0; -(function (IndexKind) { - /** - * This is supplied for a path which requires sorting. - */ - IndexKind["Range"] = "Range"; /** - * This is supplied for a path which requires geospatial indexing. + * Gets the session token for use in session consistency reads from the Azure Cosmos DB service. */ - IndexKind["Spatial"] = "Spatial"; -})(exports.IndexKind || (exports.IndexKind = {})); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * @hidden - * None PartitionKey Literal - */ -const NonePartitionKeyLiteral = {}; -/** - * @hidden - * Null PartitionKey Literal - */ -const NullPartitionKeyLiteral = null; -/** - * @hidden - * Maps PartitionKey to InternalPartitionKey. - * @param partitionKey - PartitonKey to be converted. - * @returns PartitionKeyInternal - */ -function convertToInternalPartitionKey(partitionKey) { - if (Array.isArray(partitionKey)) { - return partitionKey.map((key) => (key === undefined ? NonePartitionKeyLiteral : key)); + get sessionToken() { + return this.headers[Constants$1.HttpHeaders.SessionToken]; } - else - return [partitionKey]; } // Copyright (c) Microsoft Corporation. /** - * Builder class for building PartitionKey. + * @hidden + * A queue for iterating over specified Epk ranges and fetch change feed for the given epk ranges. */ -class PartitionKeyBuilder { +class FeedRangeQueue { constructor() { - this.values = []; + this.elements = []; } - addValue(value) { - this.values.push(value); - return this; + modifyFirstElement(newItem) { + if (!this.isEmpty()) { + this.elements[0] = newItem; + } } - addNullValue() { - this.values.push(NullPartitionKeyLiteral); - return this; + enqueue(item) { + this.elements.push(item); } - addNoneValue() { - this.values.push(NonePartitionKeyLiteral); - return this; + dequeue() { + return this.elements.shift(); } - build() { - return [...this.values]; + peek() { + return !this.isEmpty() ? this.elements[0] : undefined; + } + isEmpty() { + return this.elements.length === 0; + } + moveFirstElementToTheEnd() { + if (!this.isEmpty()) { + this.elements.push(this.dequeue()); + } + } + /** + * Returns a snapshot of the queue as an array to be used as Continuation token. + */ + returnSnapshot() { + const allFeedRanges = []; + this.elements.map((element) => { + const minInclusive = element.epkMinHeader ? element.epkMinHeader : element.minInclusive; + const maxExclusive = element.epkMaxHeader ? element.epkMaxHeader : element.maxExclusive; + const feedRangeElement = new ChangeFeedRange(minInclusive, maxExclusive, element.continuationToken); + allFeedRanges.push(feedRangeElement); + }); + return allFeedRanges; } } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * PartitionKey Definition Version - */ -exports.PartitionKeyDefinitionVersion = void 0; -(function (PartitionKeyDefinitionVersion) { - PartitionKeyDefinitionVersion[PartitionKeyDefinitionVersion["V1"] = 1] = "V1"; - PartitionKeyDefinitionVersion[PartitionKeyDefinitionVersion["V2"] = 2] = "V2"; -})(exports.PartitionKeyDefinitionVersion || (exports.PartitionKeyDefinitionVersion = {})); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * Type of PartitionKey i.e. Hash, MultiHash + * Continuation token for change feed of entire container, or a specific Epk Range. + * @internal */ -exports.PartitionKeyKind = void 0; -(function (PartitionKeyKind) { - PartitionKeyKind["Hash"] = "Hash"; - PartitionKeyKind["MultiHash"] = "MultiHash"; -})(exports.PartitionKeyKind || (exports.PartitionKeyKind = {})); +class CompositeContinuationToken { + constructor(rid, Continuation) { + this.rid = rid; + this.Continuation = Continuation; + } +} -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * Enum for permission mode values. + * @hidden + * Class which specifies the ChangeFeedIterator to start reading changes from beginning of time. */ -exports.PermissionMode = void 0; -(function (PermissionMode) { - /** Permission not valid. */ - PermissionMode["None"] = "none"; - /** Permission applicable for read operations only. */ - PermissionMode["Read"] = "read"; - /** Permission applicable for all operations. */ - PermissionMode["All"] = "all"; -})(exports.PermissionMode || (exports.PermissionMode = {})); +class ChangeFeedStartFromBeginning { + constructor(cfResource) { + this.cfResource = cfResource; + } + getCfResource() { + return this.cfResource; + } +} // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Represents Priority Level associated with each Azure Cosmos DB client requests.
- * The Low priority requests are always throttled before any High priority requests. - * - * By default all requests are considered as High priority requests. - * - * See https://aka.ms/CosmosDB/PriorityBasedExecution for more detailed documentation on Priority based throttling. + * @hidden + * Class which specifies the ChangeFeedIterator to start reading changes from this moment in time. */ -exports.PriorityLevel = void 0; -(function (PriorityLevel) { - /** - * High Priority requests are throttled after Low priority requests. - */ - PriorityLevel["High"] = "High"; - /** - * Low Priority requests are throttled before High priority requests. - */ - PriorityLevel["Low"] = "Low"; -})(exports.PriorityLevel || (exports.PriorityLevel = {})); +class ChangeFeedStartFromNow { + constructor(cfResource) { + this.cfResource = cfResource; + } + getCfResource() { + return this.cfResource; + } +} -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * Enum for trigger operation values. - * specifies the operations on which a trigger should be executed. + * @hidden + * Class which specifies the ChangeFeedIterator to start reading changes from a particular point of time. */ -exports.TriggerOperation = void 0; -(function (TriggerOperation) { - /** All operations. */ - TriggerOperation["All"] = "all"; - /** Create operations only. */ - TriggerOperation["Create"] = "create"; - /** Update operations only. */ - TriggerOperation["Update"] = "update"; - /** Delete operations only. */ - TriggerOperation["Delete"] = "delete"; - /** Replace operations only. */ - TriggerOperation["Replace"] = "replace"; -})(exports.TriggerOperation || (exports.TriggerOperation = {})); +class ChangeFeedStartFromTime { + constructor(startTime, cfResource) { + this.startTime = startTime; + this.cfResource = cfResource; + } + getCfResource() { + return this.cfResource; + } + getStartTime() { + return this.startTime; + } +} // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * Enum for trigger type values. - * Specifies the type of the trigger. + * Specifies a feed range for the changefeed. */ -exports.TriggerType = void 0; -(function (TriggerType) { - /** Trigger should be executed before the associated operation(s). */ - TriggerType["Pre"] = "pre"; - /** Trigger should be executed after the associated operation(s). */ - TriggerType["Post"] = "post"; -})(exports.TriggerType || (exports.TriggerType = {})); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +class FeedRange { + /** + * @internal + */ + constructor(minInclusive, maxExclusive) { + // only way to explictly block users from creating FeedRange directly in JS + if (new.target === FeedRange) { + throw new ErrorResponse("Cannot instantiate abstract class FeedRange"); + } + this.minInclusive = minInclusive; + this.maxExclusive = maxExclusive; + } +} /** - * Enum for udf type values. - * Specifies the types of user defined functions. + * @hidden + * Specifies a feed range for the changefeed. */ -exports.UserDefinedFunctionType = void 0; -(function (UserDefinedFunctionType) { - /** The User Defined Function is written in JavaScript. This is currently the only option. */ - UserDefinedFunctionType["Javascript"] = "Javascript"; -})(exports.UserDefinedFunctionType || (exports.UserDefinedFunctionType = {})); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -exports.GeospatialType = void 0; -(function (GeospatialType) { - /** Represents data in round-earth coordinate system. */ - GeospatialType["Geography"] = "Geography"; - /** Represents data in Eucledian(flat) coordinate system. */ - GeospatialType["Geometry"] = "Geometry"; -})(exports.GeospatialType || (exports.GeospatialType = {})); +class FeedRangeInternal extends FeedRange { + /* eslint-disable @typescript-eslint/no-useless-constructor */ + constructor(minInclusive, maxExclusive) { + super(minInclusive, maxExclusive); + } +} -// Copyright (c) Microsoft Corporation. -const logger$4 = logger$5.createClientLogger("extractPartitionKey"); /** - * Function to extract PartitionKey based on {@link PartitionKeyDefinition} - * from an object. - * Retuns - * 1. PartitionKeyInternal[] if extraction is successful. - * 2. undefined if either {@link partitionKeyDefinition} is not well formed - * or an unsupported partitionkey type is encountered. * @hidden + * Validates the change feed options passed by the user */ -function extractPartitionKeys(document, partitionKeyDefinition) { - if (partitionKeyDefinition && - partitionKeyDefinition.paths && - partitionKeyDefinition.paths.length > 0) { - if (partitionKeyDefinition.systemKey === true) { - return []; - } - if (partitionKeyDefinition.paths.length === 1 && - partitionKeyDefinition.paths[0] === DEFAULT_PARTITION_KEY_PATH) { - return [extractPartitionKey(DEFAULT_PARTITION_KEY_PATH, document)]; - } - const partitionKeys = []; - partitionKeyDefinition.paths.forEach((path) => { - const obj = extractPartitionKey(path, document); - if (obj === undefined) { - logger$4.warning("Unsupported PartitionKey found."); - return undefined; - } - partitionKeys.push(obj); - }); - return partitionKeys; +function validateChangeFeedIteratorOptions(options) { + if (!isChangeFeedIteratorOptions(options)) { + throw new ErrorResponse("Invalid Changefeed Iterator Options."); } - logger$4.error("Unexpected Partition Key Definition Found."); - return undefined; -} -function extractPartitionKey(path, obj) { - const pathParts = parsePath(path); - for (const part of pathParts) { - if (typeof obj === "object" && obj !== null && part in obj) { - obj = obj[part]; - } - else { - obj = undefined; - break; - } + if ((options === null || options === void 0 ? void 0 : options.maxItemCount) && typeof (options === null || options === void 0 ? void 0 : options.maxItemCount) !== "number") { + throw new ErrorResponse("maxItemCount must be number"); } - if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { - return obj; + if ((options === null || options === void 0 ? void 0 : options.maxItemCount) !== undefined && (options === null || options === void 0 ? void 0 : options.maxItemCount) < 1) { + throw new ErrorResponse("maxItemCount must be a positive number"); } - else if (obj === NullPartitionKeyLiteral) { - return NullPartitionKeyLiteral; +} +function isChangeFeedIteratorOptions(options) { + if (typeof options !== "object") { + return false; } - else if (obj === undefined || JSON.stringify(obj) === JSON.stringify(NonePartitionKeyLiteral)) { - return NonePartitionKeyLiteral; + if (Object.keys(options).length === 0 && JSON.stringify(options) === "{}") { + return true; } - return undefined; + return options && !(isPrimitivePartitionKeyValue(options) || Array.isArray(options)); } /** * @hidden + * Checks if pkRange entirely covers the given overLapping range or there is only partial overlap. + * + * If no complete overlap, exact range which overlaps is retured which is used to set minEpk and maxEpk headers while quering change feed. */ -function undefinedPartitionKey(partitionKeyDefinition) { - if (partitionKeyDefinition.systemKey === true) { - return []; +async function extractOverlappingRanges(epkRange, overLappingRange) { + if (overLappingRange.minInclusive >= epkRange.min && + overLappingRange.maxExclusive <= epkRange.max) { + return [undefined, undefined]; } - else { - return partitionKeyDefinition.paths.map(() => NonePartitionKeyLiteral); + else if (overLappingRange.minInclusive <= epkRange.min && + overLappingRange.maxExclusive >= epkRange.max) { + return [epkRange.min, epkRange.max]; } -} - -// Copyright (c) Microsoft Corporation. -/** - * Utility function to avoid writing boilder plate code while checking for - * undefined values. It throws Error if the input value is undefined. - * @param value - Value which is potentially undefined. - * @param msg - Error Message to throw if value is undefined. - * @returns - */ -function assertNotUndefined(value, msg) { - if (value !== undefined) { - return value; + // Right Side of overlapping range is covered + else if (overLappingRange.minInclusive <= epkRange.min && + overLappingRange.maxExclusive <= epkRange.max && + overLappingRange.maxExclusive >= epkRange.min) { + return [epkRange.min, overLappingRange.maxExclusive]; + } + // Left Side of overlapping range is covered + else { + return [overLappingRange.minInclusive, epkRange.max]; } - throw new Error(msg || "Unexpected 'undefined' value encountered"); -} -/** - * Check for value being PrimitivePartitionKeyValue. - * @internal - */ -function isPrimitivePartitionKeyValue(value) { - return (isWellDefinedPartitionKeyValue(value) || - isNonePartitionKeyValue(value) || - isNullPartitionKeyValue(value)); } /** - * Check for value being string, number or boolean. - * @internal + * @hidden + * Checks if the object is a valid EpkRange */ -function isWellDefinedPartitionKeyValue(value) { - return typeof value === "string" || typeof value === "boolean" || typeof value === "number"; +function isEpkRange(obj) { + return (obj instanceof FeedRangeInternal && + typeof obj.minInclusive === "string" && + typeof obj.maxExclusive === "string" && + obj.minInclusive >= + Constants$1.EffectivePartitionKeyConstants.MinimumInclusiveEffectivePartitionKey && + obj.maxExclusive <= + Constants$1.EffectivePartitionKeyConstants.MaximumExclusiveEffectivePartitionKey && + obj.maxExclusive > obj.minInclusive); } /** - * Check for value being NonePartitionKeyType. - * @internal + * @hidden */ -function isNonePartitionKeyValue(value) { - return value !== undefined && JSON.stringify(value) === JSON.stringify(NonePartitionKeyLiteral); +function buildInternalChangeFeedOptions(options, continuationToken, startTime) { + const internalCfOptions = {}; + internalCfOptions.maxItemCount = options === null || options === void 0 ? void 0 : options.maxItemCount; + internalCfOptions.sessionToken = options === null || options === void 0 ? void 0 : options.sessionToken; + internalCfOptions.continuationToken = continuationToken; + // Default option of changefeed is to start from now. + internalCfOptions.startTime = startTime; + return internalCfOptions; } /** - * Check for value being NullPartitionKeyType. - * @internal + * @hidden */ -function isNullPartitionKeyValue(value) { - return value === NullPartitionKeyLiteral; +function fetchStartTime(changeFeedStartFrom) { + if (changeFeedStartFrom instanceof ChangeFeedStartFromBeginning) { + return undefined; + } + else if (changeFeedStartFrom instanceof ChangeFeedStartFromNow) { + return new Date(); + } + else if (changeFeedStartFrom instanceof ChangeFeedStartFromTime) { + return changeFeedStartFrom.getStartTime(); + } } /** - * Verify validity of partition key. - * @internal + * @hidden */ -function isPartitionKey(partitionKey) { - return isPrimitivePartitionKeyValue(partitionKey) || Array.isArray(partitionKey); +function isNullOrEmpty(text) { + return text === null || text === undefined || text.trim() === ""; } -// Copyright (c) Microsoft Corporation. -/** - * The \@azure/logger configuration for this package. - */ -const defaultLogger = logger$5.createClientLogger("cosmosdb"); - -// Copyright (c) Microsoft Corporation. -// ---------------------------------------------------------------------------- -// Utility methods -// -/** @hidden */ -function javaScriptFriendlyJSONStringify(s) { - // two line terminators (Line separator and Paragraph separator) are not needed to be escaped in JSON - // but are needed to be escaped in JavaScript. - return JSON.stringify(s) - .replace(/\u2028/g, "\\u2028") - .replace(/\u2029/g, "\\u2029"); -} -/** @hidden */ -function bodyFromData(data) { - if (typeof data === "object") { - return javaScriptFriendlyJSONStringify(data); - } - return data; -} -const JsonContentType = "application/json"; /** * @hidden + * Provides iterator for change feed for entire container or an epk range. + * + * Use `Items.getChangeFeedIterator()` to get an instance of the iterator. */ -async function getHeaders({ clientOptions, defaultHeaders, verb, path, resourceId, resourceType, options = {}, partitionKeyRangeId, useMultipleWriteLocations, partitionKey, }) { - const headers = Object.assign({ [Constants$1.HttpHeaders.ResponseContinuationTokenLimitInKB]: 1, [Constants$1.HttpHeaders.EnableCrossPartitionQuery]: true }, defaultHeaders); - if (useMultipleWriteLocations) { - headers[Constants$1.HttpHeaders.ALLOW_MULTIPLE_WRITES] = true; - } - if (options.continuationTokenLimitInKB) { - headers[Constants$1.HttpHeaders.ResponseContinuationTokenLimitInKB] = - options.continuationTokenLimitInKB; - } - if (options.continuationToken) { - headers[Constants$1.HttpHeaders.Continuation] = options.continuationToken; - } - else if (options.continuation) { - headers[Constants$1.HttpHeaders.Continuation] = options.continuation; - } - if (options.preTriggerInclude) { - headers[Constants$1.HttpHeaders.PreTriggerInclude] = - options.preTriggerInclude.constructor === Array - ? options.preTriggerInclude.join(",") - : options.preTriggerInclude; - } - if (options.postTriggerInclude) { - headers[Constants$1.HttpHeaders.PostTriggerInclude] = - options.postTriggerInclude.constructor === Array - ? options.postTriggerInclude.join(",") - : options.postTriggerInclude; - } - if (options.offerType) { - headers[Constants$1.HttpHeaders.OfferType] = options.offerType; +class ChangeFeedForEpkRange { + /** + * @internal + */ + constructor(clientContext, container, partitionKeyRangeCache, resourceId, resourceLink, url, changeFeedOptions, epkRange) { + this.clientContext = clientContext; + this.container = container; + this.partitionKeyRangeCache = partitionKeyRangeCache; + this.resourceId = resourceId; + this.resourceLink = resourceLink; + this.url = url; + this.changeFeedOptions = changeFeedOptions; + this.epkRange = epkRange; + this.generateContinuationToken = () => { + return JSON.stringify(new CompositeContinuationToken(this.rId, this.queue.returnSnapshot())); + }; + this.queue = new FeedRangeQueue(); + this.continuationToken = changeFeedOptions.continuationToken + ? JSON.parse(changeFeedOptions.continuationToken) + : undefined; + this.startTime = changeFeedOptions.startTime + ? changeFeedOptions.startTime.toUTCString() + : undefined; + this.isInstantiated = false; } - if (options.offerThroughput) { - headers[Constants$1.HttpHeaders.OfferThroughput] = options.offerThroughput; + async setIteratorRid(diagnosticNode) { + const { resource } = await this.container.readInternal(diagnosticNode); + this.rId = resource._rid; } - if (options.maxItemCount) { - headers[Constants$1.HttpHeaders.PageSize] = options.maxItemCount; + continuationTokenRidMatchContainerRid() { + if (this.continuationToken.rid !== this.rId) { + return false; + } + return true; } - if (options.accessCondition) { - if (options.accessCondition.type === "IfMatch") { - headers[Constants$1.HttpHeaders.IfMatch] = options.accessCondition.condition; + async fillChangeFeedQueue(diagnosticNode) { + if (this.continuationToken) { + // fill the queue with feed ranges in continuation token. + await this.fetchContinuationTokenFeedRanges(diagnosticNode); } else { - headers[Constants$1.HttpHeaders.IfNoneMatch] = options.accessCondition.condition; + // fill the queue with feed ranges overlapping the given epk range. + await this.fetchOverLappingFeedRanges(diagnosticNode); } + this.isInstantiated = true; } - if (options.useIncrementalFeed) { - headers[Constants$1.HttpHeaders.A_IM] = "Incremental Feed"; - } - if (options.indexingDirective) { - headers[Constants$1.HttpHeaders.IndexingDirective] = options.indexingDirective; - } - if (options.consistencyLevel) { - headers[Constants$1.HttpHeaders.ConsistencyLevel] = options.consistencyLevel; - } - if (options.priorityLevel) { - headers[Constants$1.HttpHeaders.PriorityLevel] = options.priorityLevel; + /** + * Fill the queue with the feed ranges overlapping with the given epk range. + */ + async fetchOverLappingFeedRanges(diagnosticNode) { + try { + const overLappingRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, this.epkRange, diagnosticNode); + for (const overLappingRange of overLappingRanges) { + const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(this.epkRange, overLappingRange); + const feedRange = new ChangeFeedRange(overLappingRange.minInclusive, overLappingRange.maxExclusive, "", epkMinHeader, epkMaxHeader); + this.queue.enqueue(feedRange); + } + } + catch (err) { + throw new ErrorResponse(err.message); + } } - if (options.maxIntegratedCacheStalenessInMs && resourceType === exports.ResourceType.item) { - if (typeof options.maxIntegratedCacheStalenessInMs === "number") { - headers[Constants$1.HttpHeaders.DedicatedGatewayPerRequestCacheStaleness] = - options.maxIntegratedCacheStalenessInMs.toString(); + /** + * Fill the queue with feed ranges from continuation token + */ + async fetchContinuationTokenFeedRanges(diagnosticNode) { + const contToken = this.continuationToken; + if (!this.continuationTokenRidMatchContainerRid()) { + throw new ErrorResponse("The continuation token is not for the current container definition"); } else { - defaultLogger.error(`RangeError: maxIntegratedCacheStalenessInMs "${options.maxIntegratedCacheStalenessInMs}" is not a valid parameter.`); - headers[Constants$1.HttpHeaders.DedicatedGatewayPerRequestCacheStaleness] = "null"; + for (const cToken of contToken.Continuation) { + const queryRange = new QueryRange(cToken.minInclusive, cToken.maxExclusive, true, false); + try { + const overLappingRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, queryRange, diagnosticNode); + for (const overLappingRange of overLappingRanges) { + // check if the epk range present in continuation token entirely covers the overlapping range. + // If yes, minInclusive and maxExclusive of the overlapping range will be set. + // If no, i.e. there is only partial overlap, epkMinHeader and epkMaxHeader are set as min and max of overlap. + // This will be used when we make a call to fetch change feed. + const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(queryRange, overLappingRange); + const feedRange = new ChangeFeedRange(overLappingRange.minInclusive, overLappingRange.maxExclusive, cToken.continuationToken, epkMinHeader, epkMaxHeader); + this.queue.enqueue(feedRange); + } + } + catch (err) { + throw new ErrorResponse(err.message); + } + } } } - if (options.resourceTokenExpirySeconds) { - headers[Constants$1.HttpHeaders.ResourceTokenExpiry] = options.resourceTokenExpirySeconds; - } - if (options.sessionToken) { - headers[Constants$1.HttpHeaders.SessionToken] = options.sessionToken; - } - if (options.enableScanInQuery) { - headers[Constants$1.HttpHeaders.EnableScanInQuery] = options.enableScanInQuery; - } - if (options.populateQuotaInfo) { - headers[Constants$1.HttpHeaders.PopulateQuotaInfo] = options.populateQuotaInfo; - } - if (options.populateQueryMetrics) { - headers[Constants$1.HttpHeaders.PopulateQueryMetrics] = options.populateQueryMetrics; - } - if (options.maxDegreeOfParallelism !== undefined) { - headers[Constants$1.HttpHeaders.ParallelizeCrossPartitionQuery] = true; - } - if (options.populateQuotaInfo) { - headers[Constants$1.HttpHeaders.PopulateQuotaInfo] = true; + /** + * Change feed is an infinite feed. hasMoreResults is always true. + */ + get hasMoreResults() { + return true; } - if (partitionKey !== undefined && !headers[Constants$1.HttpHeaders.PartitionKey]) { - headers[Constants$1.HttpHeaders.PartitionKey] = jsonStringifyAndEscapeNonASCII(partitionKey); + /** + * Gets an async iterator which will yield change feed results. + */ + getAsyncIterator() { + return tslib.__asyncGenerator(this, arguments, function* getAsyncIterator_1() { + do { + const result = yield tslib.__await(this.readNext()); + yield yield tslib.__await(result); + } while (this.hasMoreResults); + }); } - if (clientOptions.key || clientOptions.tokenProvider) { - headers[Constants$1.HttpHeaders.XDate] = new Date().toUTCString(); + /** + * Gets an async iterator which will yield pages of results from Azure Cosmos DB. + * + * Keeps iterating over the feedranges and checks if any feed range has new result. Keeps note of the last feed range which returned non 304 result. + * + * When same feed range is reached and no new changes are found, a 304 (not Modified) is returned to the end user. Then starts process all over again. + */ + async readNext() { + return withDiagnostics(async (diagnosticNode) => { + // validate if the internal queue is filled up with feed ranges. + if (!this.isInstantiated) { + await this.setIteratorRid(diagnosticNode); + await this.fillChangeFeedQueue(diagnosticNode); + } + // stores the last feedRange for which statusCode is not 304 i.e. there were new changes in that feed range. + let firstNotModifiedFeedRange = undefined; + let result; + do { + const [processedFeedRange, response] = await this.fetchNext(diagnosticNode); + result = response; + if (result !== undefined) { + { + if (firstNotModifiedFeedRange === undefined) { + firstNotModifiedFeedRange = processedFeedRange; + } + // move current feed range to end of queue to fetch result of next feed range. + // This is done to fetch changes in breadth first manner and avoid starvation. + this.queue.moveFirstElementToTheEnd(); + // check if there are new results for the given feed range. + if (result.statusCode === StatusCodes.Ok) { + result.headers[Constants$1.HttpHeaders.ContinuationToken] = + this.generateContinuationToken(); + return result; + } + } + } + } while (!this.checkedAllFeedRanges(firstNotModifiedFeedRange)); + // set the continuation token after processing. + result.headers[Constants$1.HttpHeaders.ContinuationToken] = this.generateContinuationToken(); + return result; + }, this.clientContext); } - if (verb === exports.HTTPMethod.post || verb === exports.HTTPMethod.put) { - if (!headers[Constants$1.HttpHeaders.ContentType]) { - headers[Constants$1.HttpHeaders.ContentType] = JsonContentType; + /** + * Read feed and retrieves the next page of results in Azure Cosmos DB. + */ + async fetchNext(diagnosticNode) { + const feedRange = this.queue.peek(); + if (feedRange) { + // fetch results for feed range at the beginning of the queue. + const result = await this.getFeedResponse(feedRange, diagnosticNode); + // check if results need to be fetched again depending on status code returned. + // Eg. in case of paritionSplit, results need to be fetched for the child partitions. + const shouldRetry = await this.shouldRetryOnFailure(feedRange, result, diagnosticNode); + if (shouldRetry) { + this.queue.dequeue(); + return this.fetchNext(diagnosticNode); + } + else { + // update the continuation value for the current feed range. + const continuationValueForFeedRange = result.headers[Constants$1.HttpHeaders.ETag]; + const newFeedRange = this.queue.peek(); + newFeedRange.continuationToken = continuationValueForFeedRange; + return [[newFeedRange.minInclusive, newFeedRange.maxExclusive], result]; + } + } + else { + return [[undefined, undefined], undefined]; } } - if (!headers[Constants$1.HttpHeaders.Accept]) { - headers[Constants$1.HttpHeaders.Accept] = JsonContentType; - } - if (partitionKeyRangeId !== undefined) { - headers[Constants$1.HttpHeaders.PartitionKeyRangeID] = partitionKeyRangeId; - } - if (options.enableScriptLogging) { - headers[Constants$1.HttpHeaders.EnableScriptLogging] = options.enableScriptLogging; - } - if (options.disableRUPerMinuteUsage) { - headers[Constants$1.HttpHeaders.DisableRUPerMinuteUsage] = true; - } - if (options.populateIndexMetrics) { - headers[Constants$1.HttpHeaders.PopulateIndexMetrics] = options.populateIndexMetrics; + checkedAllFeedRanges(firstNotModifiedFeedRange) { + if (firstNotModifiedFeedRange === undefined) { + return false; + } + const feedRangeQueueFirstElement = this.queue.peek(); + return (firstNotModifiedFeedRange[0] === (feedRangeQueueFirstElement === null || feedRangeQueueFirstElement === void 0 ? void 0 : feedRangeQueueFirstElement.minInclusive) && + firstNotModifiedFeedRange[1] === (feedRangeQueueFirstElement === null || feedRangeQueueFirstElement === void 0 ? void 0 : feedRangeQueueFirstElement.maxExclusive)); } - if (clientOptions.key || - clientOptions.resourceTokens || - clientOptions.tokenProvider || - clientOptions.permissionFeed) { - await setAuthorizationHeader(clientOptions, verb, path, resourceId, resourceType, headers); + /** + * Checks whether the current EpkRange is split into multiple ranges or not. + * + * If yes, it force refreshes the partitionKeyRange cache and enqueue children epk ranges. + */ + async shouldRetryOnFailure(feedRange, response, diagnosticNode) { + if (response.statusCode === StatusCodes.Ok || response.statusCode === StatusCodes.NotModified) { + return false; + } + const partitionSplit = response.statusCode === StatusCodes.Gone && + (response.subStatusCode === SubStatusCodes.PartitionKeyRangeGone || + response.subStatusCode === SubStatusCodes.CompletingSplit); + if (partitionSplit) { + const queryRange = new QueryRange(feedRange.minInclusive, feedRange.maxExclusive, true, false); + const resolvedRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, queryRange, diagnosticNode, true); + if (resolvedRanges.length < 1) { + throw new ErrorResponse("Partition split/merge detected but no overlapping ranges found."); + } + // This covers both cases of merge and split. + // resolvedRanges.length > 1 in case of split. + // resolvedRanges.length === 1 in case of merge. EpkRange headers will be added in this case. + if (resolvedRanges.length >= 1) { + await this.handleSplit(false, resolvedRanges, queryRange, feedRange.continuationToken); + } + return true; + } + return false; } - return headers; -} - -// Copyright (c) Microsoft Corporation. -const uuid$2 = uuid$3.v4; -function isKeyInRange(min, max, key) { - const isAfterMinInclusive = key.localeCompare(min) >= 0; - const isBeforeMax = key.localeCompare(max) < 0; - return isAfterMinInclusive && isBeforeMax; -} -const BulkOperationType = { - Create: "Create", - Upsert: "Upsert", - Read: "Read", - Delete: "Delete", - Replace: "Replace", - Patch: "Patch", -}; -/** - * Maps OperationInput to Operation by - * - generating Ids if needed. - * - choosing partitionKey which can be used to choose which batch this - * operation should be part of. The order is - - * 1. If the operationInput itself has partitionKey field set it is used. - * 2. Other wise for create/replace/upsert it is extracted from resource body. - * 3. For read/delete/patch type operations undefined partitionKey is used. - * - Here one nuance is that, the partitionKey field inside Operation needs to - * be serialized as a JSON string. - * @param operationInput - OperationInput - * @param definition - PartitionKeyDefinition - * @param options - RequestOptions - * @returns - */ -function prepareOperations(operationInput, definition, options = {}) { - populateIdsIfNeeded(operationInput, options); - let partitionKey; - if (Object.prototype.hasOwnProperty.call(operationInput, "partitionKey")) { - if (operationInput.partitionKey === undefined) { - partitionKey = definition.paths.map(() => NonePartitionKeyLiteral); + /* + * Enqueues all the children feed ranges for the given feed range. + */ + async handleSplit(shiftLeft, resolvedRanges, oldFeedRange, continuationToken) { + let flag = 0; + if (shiftLeft) { + // This section is only applicable when handleSplit is called by getPartitionRangeId(). + // used only when existing partition key range cache is used to check for any overlapping ranges. + // Modifies the first element with the first overlapping range. + const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(oldFeedRange, resolvedRanges[0]); + const newFeedRange = new ChangeFeedRange(resolvedRanges[0].minInclusive, resolvedRanges[0].maxExclusive, continuationToken, epkMinHeader, epkMaxHeader); + this.queue.modifyFirstElement(newFeedRange); + flag = 1; } - else { - partitionKey = convertToInternalPartitionKey(operationInput.partitionKey); + // Enqueue the overlapping ranges. + for (let i = flag; i < resolvedRanges.length; i++) { + const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(oldFeedRange, resolvedRanges[i]); + const newFeedRange = new ChangeFeedRange(resolvedRanges[i].minInclusive, resolvedRanges[i].maxExclusive, continuationToken, epkMinHeader, epkMaxHeader); + this.queue.enqueue(newFeedRange); } } - else { - switch (operationInput.operationType) { - case BulkOperationType.Create: - case BulkOperationType.Replace: - case BulkOperationType.Upsert: - partitionKey = assertNotUndefined(extractPartitionKeys(operationInput.resourceBody, definition), "Unexpected undefined Partition Key Found."); - break; - case BulkOperationType.Read: - case BulkOperationType.Delete: - case BulkOperationType.Patch: - partitionKey = definition.paths.map(() => NonePartitionKeyLiteral); + /** + * Fetch the partitionKeyRangeId for the given feed range. + * + * This partitionKeyRangeId is passed to queryFeed to fetch the results. + */ + async getPartitionRangeId(feedRange, diagnosticNode) { + const min = feedRange.epkMinHeader ? feedRange.epkMinHeader : feedRange.minInclusive; + const max = feedRange.epkMaxHeader ? feedRange.epkMaxHeader : feedRange.maxExclusive; + const queryRange = new QueryRange(min, max, true, false); + const resolvedRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, queryRange, diagnosticNode, false); + if (resolvedRanges.length < 1) { + throw new ErrorResponse("No overlapping ranges found."); + } + const firstResolvedRange = resolvedRanges[0]; + if (resolvedRanges.length > 1) { + await this.handleSplit(true, resolvedRanges, queryRange, feedRange.continuationToken); } + return firstResolvedRange.id; } - return { - operation: Object.assign(Object.assign({}, operationInput), { partitionKey: JSON.stringify(partitionKey) }), - partitionKey, - }; -} -/** - * For operations requiring Id genrate random uuids. - * @param operationInput - OperationInput to be checked. - * @param options - RequestOptions - */ -function populateIdsIfNeeded(operationInput, options) { - if (operationInput.operationType === BulkOperationType.Create || - operationInput.operationType === BulkOperationType.Upsert) { - if ((operationInput.resourceBody.id === undefined || operationInput.resourceBody.id === "") && - !options.disableAutomaticIdGeneration) { - operationInput.resourceBody.id = uuid$2(); + async getFeedResponse(feedRange, diagnosticNode) { + const feedOptions = { initialHeaders: {}, useIncrementalFeed: true }; + if (typeof this.changeFeedOptions.maxItemCount === "number") { + feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount; + } + if (this.changeFeedOptions.sessionToken) { + feedOptions.sessionToken = this.changeFeedOptions.sessionToken; + } + if (feedRange.continuationToken) { + feedOptions.accessCondition = { + type: Constants$1.HttpHeaders.IfNoneMatch, + condition: feedRange.continuationToken, + }; + } + if (this.startTime) { + feedOptions.initialHeaders[Constants$1.HttpHeaders.IfModifiedSince] = this.startTime; + } + const rangeId = await this.getPartitionRangeId(feedRange, diagnosticNode); + try { + // startEpk and endEpk are only valid in case we want to fetch result for a part of partition and not the entire partition. + const response = await this.clientContext.queryFeed({ + path: this.resourceLink, + resourceType: exports.ResourceType.item, + resourceId: this.resourceId, + resultFn: (result) => (result ? result.Documents : []), + query: undefined, + options: feedOptions, + diagnosticNode, + partitionKey: undefined, + partitionKeyRangeId: rangeId, + startEpk: feedRange.epkMinHeader, + endEpk: feedRange.epkMaxHeader, + }); + return new ChangeFeedIteratorResponse(response.result, response.result ? response.result.length : 0, response.code, response.headers, getEmptyCosmosDiagnostics()); + } + catch (err) { + // If any errors are encountered, eg. partition split or gone, handle it based on error code and not break the flow. + return new ChangeFeedIteratorResponse([], 0, err.code, err.headers, getEmptyCosmosDiagnostics(), err.substatus); } } } + /** - * Splits a batch into array of batches based on cumulative size of its operations by making sure - * cumulative size of an individual batch is not larger than {@link Constants.DefaultMaxBulkRequestBodySizeInBytes}. - * If a single operation itself is larger than {@link Constants.DefaultMaxBulkRequestBodySizeInBytes}, that - * operation would be moved into a batch containing only that operation. - * @param originalBatch - A batch of operations needed to be checked. - * @returns - * @hidden + * Continuation token for change feed of entire container, or a specific Epk Range. + * @internal */ -function splitBatchBasedOnBodySize(originalBatch) { - if ((originalBatch === null || originalBatch === void 0 ? void 0 : originalBatch.operations) === undefined || originalBatch.operations.length < 1) - return []; - let currentBatchSize = calculateObjectSizeInBytes(originalBatch.operations[0]); - let currentBatch = Object.assign(Object.assign({}, originalBatch), { operations: [originalBatch.operations[0]], indexes: [originalBatch.indexes[0]] }); - const processedBatches = []; - processedBatches.push(currentBatch); - for (let index = 1; index < originalBatch.operations.length; index++) { - const operation = originalBatch.operations[index]; - const currentOpSize = calculateObjectSizeInBytes(operation); - if (currentBatchSize + currentOpSize > Constants$1.DefaultMaxBulkRequestBodySizeInBytes) { - currentBatch = Object.assign(Object.assign({}, originalBatch), { operations: [], indexes: [] }); - processedBatches.push(currentBatch); - currentBatchSize = 0; - } - currentBatch.operations.push(operation); - currentBatch.indexes.push(originalBatch.indexes[index]); - currentBatchSize += currentOpSize; +class ContinuationTokenForPartitionKey { + constructor(rid, partitionKey, continuation) { + this.rid = rid; + this.partitionKey = partitionKey; + this.Continuation = continuation; } - return processedBatches; } + /** - * Calculates size of an JSON object in bytes with utf-8 encoding. * @hidden + * Provides iterator for change feed for one partition key. + * + * Use `Items.getChangeFeedIterator()` to get an instance of the iterator. */ -function calculateObjectSizeInBytes(obj) { - return new TextEncoder().encode(bodyFromData(obj)).length; -} -function decorateBatchOperation(operation, options = {}) { - if (operation.operationType === BulkOperationType.Create || - operation.operationType === BulkOperationType.Upsert) { - if ((operation.resourceBody.id === undefined || operation.resourceBody.id === "") && - !options.disableAutomaticIdGeneration) { - operation.resourceBody.id = uuid$2(); +class ChangeFeedForPartitionKey { + /** + * @internal + */ + constructor(clientContext, container, resourceId, resourceLink, partitionKey, changeFeedOptions) { + this.clientContext = clientContext; + this.container = container; + this.resourceId = resourceId; + this.resourceLink = resourceLink; + this.partitionKey = partitionKey; + this.changeFeedOptions = changeFeedOptions; + this.continuationToken = changeFeedOptions.continuationToken + ? JSON.parse(changeFeedOptions.continuationToken) + : undefined; + this.isInstantiated = false; + if (changeFeedOptions.startTime) { + this.startTime = changeFeedOptions.startTime.toUTCString(); } } - return operation; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const PatchOperationType = { - add: "add", - replace: "replace", - remove: "remove", - set: "set", - incr: "incr", -}; - -class ErrorResponse extends Error { -} - -class ResourceResponse { - constructor(resource, headers, statusCode, diagnostics, substatus) { - this.resource = resource; - this.headers = headers; - this.statusCode = statusCode; - this.diagnostics = diagnostics; - this.substatus = substatus; + async instantiateIterator(diagnosticNode) { + await this.setIteratorRid(diagnosticNode); + if (this.continuationToken) { + if (!this.continuationTokenRidMatchContainerRid()) { + throw new ErrorResponse("The continuation is not for the current container definition."); + } + } + else { + this.continuationToken = new ContinuationTokenForPartitionKey(this.rId, this.partitionKey, ""); + } + this.isInstantiated = true; } - get requestCharge() { - return Number(this.headers[Constants$1.HttpHeaders.RequestCharge]) || 0; + continuationTokenRidMatchContainerRid() { + if (this.continuationToken.rid !== this.rId) { + return false; + } + return true; } - get activityId() { - return this.headers[Constants$1.HttpHeaders.ActivityId]; + async setIteratorRid(diagnosticNode) { + const { resource } = await this.container.readInternal(diagnosticNode); + this.rId = resource._rid; } - get etag() { - return this.headers[Constants$1.HttpHeaders.ETag]; + /** + * Change feed is an infinite feed. hasMoreResults is always true. + */ + get hasMoreResults() { + return true; } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -class ClientSideMetrics { - constructor(requestCharge) { - this.requestCharge = requestCharge; + /** + * Gets an async iterator which will yield change feed results. + */ + getAsyncIterator() { + return tslib.__asyncGenerator(this, arguments, function* getAsyncIterator_1() { + do { + const result = yield tslib.__await(this.readNext()); + yield yield tslib.__await(result); + } while (this.hasMoreResults); + }); } /** - * Adds one or more ClientSideMetrics to a copy of this instance and returns the result. + * Returns the result of change feed from Azure Cosmos DB. */ - add(...clientSideMetricsArray) { - let requestCharge = this.requestCharge; - for (const clientSideMetrics of clientSideMetricsArray) { - if (clientSideMetrics == null) { - throw new Error("clientSideMetrics has null or undefined item(s)"); + async readNext() { + return withDiagnostics(async (diagnosticNode) => { + if (!this.isInstantiated) { + await this.instantiateIterator(diagnosticNode); } - requestCharge += clientSideMetrics.requestCharge; - } - return new ClientSideMetrics(requestCharge); + const result = await this.fetchNext(diagnosticNode); + return result; + }, this.clientContext); } - static createFromArray(...clientSideMetricsArray) { - if (clientSideMetricsArray == null) { - throw new Error("clientSideMetricsArray is null or undefined item(s)"); + /** + * Read feed and retrieves the next set of results in Azure Cosmos DB. + */ + async fetchNext(diagnosticNode) { + const response = await this.getFeedResponse(diagnosticNode); + this.continuationToken.Continuation = response.headers[Constants$1.HttpHeaders.ETag]; + response.headers[Constants$1.HttpHeaders.ContinuationToken] = JSON.stringify(this.continuationToken); + return response; + } + async getFeedResponse(diagnosticNode) { + const feedOptions = { initialHeaders: {}, useIncrementalFeed: true }; + if (typeof this.changeFeedOptions.maxItemCount === "number") { + feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount; } - return this.zero.add(...clientSideMetricsArray); + if (this.changeFeedOptions.sessionToken) { + feedOptions.sessionToken = this.changeFeedOptions.sessionToken; + } + const continuation = this.continuationToken.Continuation; + if (continuation) { + feedOptions.accessCondition = { + type: Constants$1.HttpHeaders.IfNoneMatch, + condition: continuation, + }; + } + if (this.startTime) { + feedOptions.initialHeaders[Constants$1.HttpHeaders.IfModifiedSince] = this.startTime; + } + const response = await this.clientContext.queryFeed({ + path: this.resourceLink, + resourceType: exports.ResourceType.item, + resourceId: this.resourceId, + resultFn: (result) => (result ? result.Documents : []), + diagnosticNode, + query: undefined, + options: feedOptions, + partitionKey: this.partitionKey, + }); + return new ChangeFeedIteratorResponse(response.result, response.result ? response.result.length : 0, response.code, response.headers, getEmptyCosmosDiagnostics()); } } -ClientSideMetrics.zero = new ClientSideMetrics(0); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -var QueryMetricsConstants = { - // QueryMetrics - RetrievedDocumentCount: "retrievedDocumentCount", - RetrievedDocumentSize: "retrievedDocumentSize", - OutputDocumentCount: "outputDocumentCount", - OutputDocumentSize: "outputDocumentSize", - IndexHitRatio: "indexUtilizationRatio", - IndexHitDocumentCount: "indexHitDocumentCount", - TotalQueryExecutionTimeInMs: "totalExecutionTimeInMs", - // QueryPreparationTimes - QueryCompileTimeInMs: "queryCompileTimeInMs", - LogicalPlanBuildTimeInMs: "queryLogicalPlanBuildTimeInMs", - PhysicalPlanBuildTimeInMs: "queryPhysicalPlanBuildTimeInMs", - QueryOptimizationTimeInMs: "queryOptimizationTimeInMs", - // QueryTimes - IndexLookupTimeInMs: "indexLookupTimeInMs", - DocumentLoadTimeInMs: "documentLoadTimeInMs", - VMExecutionTimeInMs: "VMExecutionTimeInMs", - DocumentWriteTimeInMs: "writeOutputTimeInMs", - // RuntimeExecutionTimes - QueryEngineTimes: "queryEngineTimes", - SystemFunctionExecuteTimeInMs: "systemFunctionExecuteTimeInMs", - UserDefinedFunctionExecutionTimeInMs: "userFunctionExecuteTimeInMs", - // QueryMetrics Text - RetrievedDocumentCountText: "Retrieved Document Count", - RetrievedDocumentSizeText: "Retrieved Document Size", - OutputDocumentCountText: "Output Document Count", - OutputDocumentSizeText: "Output Document Size", - IndexUtilizationText: "Index Utilization", - TotalQueryExecutionTimeText: "Total Query Execution Time", - // QueryPreparationTimes Text - QueryPreparationTimesText: "Query Preparation Times", - QueryCompileTimeText: "Query Compilation Time", - LogicalPlanBuildTimeText: "Logical Plan Build Time", - PhysicalPlanBuildTimeText: "Physical Plan Build Time", - QueryOptimizationTimeText: "Query Optimization Time", - // QueryTimes Text - QueryEngineTimesText: "Query Engine Times", - IndexLookupTimeText: "Index Lookup Time", - DocumentLoadTimeText: "Document Load Time", - WriteOutputTimeText: "Document Write Time", - // RuntimeExecutionTimes Text - RuntimeExecutionTimesText: "Runtime Execution Times", - TotalExecutionTimeText: "Query Engine Execution Time", - SystemFunctionExecuteTimeText: "System Function Execution Time", - UserDefinedFunctionExecutionTimeText: "User-defined Function Execution Time", - // ClientSideQueryMetrics Text - ClientSideQueryMetricsText: "Client Side Metrics", - RetriesText: "Retry Count", - RequestChargeText: "Request Charge", - FetchExecutionRangesText: "Partition Execution Timeline", - SchedulingMetricsText: "Scheduling Metrics", -}; +/** + * Enum to specify the resource for which change feed is being fetched. + */ +var ChangeFeedResourceType; +(function (ChangeFeedResourceType) { + ChangeFeedResourceType[ChangeFeedResourceType["FeedRange"] = 0] = "FeedRange"; + ChangeFeedResourceType[ChangeFeedResourceType["PartitionKey"] = 1] = "PartitionKey"; +})(ChangeFeedResourceType || (ChangeFeedResourceType = {})); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// Ported this implementation to javascript: -// https://referencesource.microsoft.com/#mscorlib/system/timespan.cs,83e476c1ae112117 -/** @hidden */ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const ticksPerMillisecond = 10000; -/** @hidden */ -const millisecondsPerTick = 1.0 / ticksPerMillisecond; -/** @hidden */ -const ticksPerSecond = ticksPerMillisecond * 1000; // 10,000,000 -/** @hidden */ -const secondsPerTick = 1.0 / ticksPerSecond; // 0.0001 -/** @hidden */ -const ticksPerMinute = ticksPerSecond * 60; // 600,000,000 -/** @hidden */ -const minutesPerTick = 1.0 / ticksPerMinute; // 1.6666666666667e-9 -/** @hidden */ -const ticksPerHour = ticksPerMinute * 60; // 36,000,000,000 -/** @hidden */ -const hoursPerTick = 1.0 / ticksPerHour; // 2.77777777777777778e-11 -/** @hidden */ -const ticksPerDay = ticksPerHour * 24; // 864,000,000,000 -/** @hidden */ -const daysPerTick = 1.0 / ticksPerDay; // 1.1574074074074074074e-12 -/** @hidden */ -const millisPerSecond = 1000; -/** @hidden */ -const millisPerMinute = millisPerSecond * 60; // 60,000 -/** @hidden */ -const millisPerHour = millisPerMinute * 60; // 3,600,000 -/** @hidden */ -const millisPerDay = millisPerHour * 24; // 86,400,000 -/** @hidden */ -const maxMilliSeconds = Number.MAX_SAFE_INTEGER / ticksPerMillisecond; -/** @hidden */ -const minMilliSeconds = Number.MIN_SAFE_INTEGER / ticksPerMillisecond; /** - * Represents a time interval. - * - * @param days - Number of days. - * @param hours - Number of hours. - * @param minutes - Number of minutes. - * @param seconds - Number of seconds. - * @param milliseconds - Number of milliseconds. * @hidden + * Class which specifies the ChangeFeedIterator to start reading changes from a saved point. */ -class TimeSpan { - constructor(days, hours, minutes, seconds, milliseconds) { - // Constructor - if (!Number.isInteger(days)) { - throw new Error("days is not an integer"); - } - if (!Number.isInteger(hours)) { - throw new Error("hours is not an integer"); - } - if (!Number.isInteger(minutes)) { - throw new Error("minutes is not an integer"); - } - if (!Number.isInteger(seconds)) { - throw new Error("seconds is not an integer"); +class ChangeFeedStartFromContinuation { + constructor(continuation) { + this.continuationToken = continuation; + } + getCfResource() { + return this.continuationToken; + } + getCfResourceJson() { + return JSON.parse(this.continuationToken); + } + getResourceType() { + const cToken = this.getCfResourceJson(); + if (Object.prototype.hasOwnProperty.call(cToken, "partitionKey") && + Object.prototype.hasOwnProperty.call(cToken, "Continuation") && + typeof cToken.Continuation === "string") { + return ChangeFeedResourceType.PartitionKey; } - if (!Number.isInteger(milliseconds)) { - throw new Error("milliseconds is not an integer"); + else if (Object.prototype.hasOwnProperty.call(cToken, "Continuation") && + Array.isArray(cToken.Continuation) && + cToken.Continuation.length > 0) { + return ChangeFeedResourceType.FeedRange; } - const totalMilliSeconds = (days * 3600 * 24 + hours * 3600 + minutes * 60 + seconds) * 1000 + milliseconds; - if (totalMilliSeconds > maxMilliSeconds || totalMilliSeconds < minMilliSeconds) { - throw new Error("Total number of milliseconds was either too large or too small"); + else { + throw new ErrorResponse("Invalid continuation token."); } - this._ticks = totalMilliSeconds * ticksPerMillisecond; } +} + +/** + * Base class for where to start a ChangeFeedIterator. + */ +/* eslint-disable @typescript-eslint/no-extraneous-class */ +class ChangeFeedStartFrom { /** - * Returns a new TimeSpan object whose value is the sum of the specified TimeSpan object and this instance. - * @param ts - The time interval to add. + * Returns an object that tells the ChangeFeedIterator to start from the beginning of time. + * @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container. */ - add(ts) { - if (TimeSpan.additionDoesOverflow(this._ticks, ts._ticks)) { - throw new Error("Adding the two timestamps causes an overflow."); - } - const results = this._ticks + ts._ticks; - return TimeSpan.fromTicks(results); + static Beginning(cfResource) { + return new ChangeFeedStartFromBeginning(cfResource); } /** - * Returns a new TimeSpan object whose value is the difference of the specified TimeSpan object and this instance. - * @param ts - The time interval to subtract. - */ - subtract(ts) { - if (TimeSpan.subtractionDoesUnderflow(this._ticks, ts._ticks)) { - throw new Error("Subtracting the two timestamps causes an underflow."); - } - const results = this._ticks - ts._ticks; - return TimeSpan.fromTicks(results); + * Returns an object that tells the ChangeFeedIterator to start reading changes from this moment onward. + * @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container. + **/ + static Now(cfResource) { + return new ChangeFeedStartFromNow(cfResource); } /** - * Compares this instance to a specified object and returns an integer that indicates whether this - * instance is shorter than, equal to, or longer than the specified object. - * @param value - The time interval to add. + * Returns an object that tells the ChangeFeedIterator to start reading changes from some point in time onward. + * @param startTime - Date object specfiying the time to start reading changes from. + * @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container. */ - compareTo(value) { - if (value == null) { - return 1; + static Time(startTime, cfResource) { + if (!startTime) { + throw new ErrorResponse("startTime must be present"); } - if (!TimeSpan.isTimeSpan(value)) { - throw new Error("Argument must be a TimeSpan object"); + if (startTime instanceof Date === true) { + return new ChangeFeedStartFromTime(startTime, cfResource); } - return TimeSpan.compare(this, value); - } - /** - * Returns a new TimeSpan object whose value is the absolute value of the current TimeSpan object. - */ - duration() { - return TimeSpan.fromTicks(this._ticks >= 0 ? this._ticks : -this._ticks); - } - /** - * Returns a value indicating whether this instance is equal to a specified object. - * @param value - The time interval to check for equality. - */ - equals(value) { - if (TimeSpan.isTimeSpan(value)) { - return this._ticks === value._ticks; + else { + throw new ErrorResponse("startTime must be a Date object."); } - return false; } /** - * Returns a new TimeSpan object whose value is the negated value of this instance. - * @param value - The time interval to check for equality. + * Returns an object that tells the ChangeFeedIterator to start reading changes from a save point. + * @param continuation - The continuation to resume from. */ - negate() { - return TimeSpan.fromTicks(-this._ticks); - } - days() { - return Math.floor(this._ticks / ticksPerDay); - } - hours() { - return Math.floor(this._ticks / ticksPerHour); - } - milliseconds() { - return Math.floor(this._ticks / ticksPerMillisecond); - } - seconds() { - return Math.floor(this._ticks / ticksPerSecond); - } - ticks() { - return this._ticks; - } - totalDays() { - return this._ticks * daysPerTick; - } - totalHours() { - return this._ticks * hoursPerTick; - } - totalMilliseconds() { - return this._ticks * millisecondsPerTick; - } - totalMinutes() { - return this._ticks * minutesPerTick; - } - totalSeconds() { - return this._ticks * secondsPerTick; - } - static fromTicks(value) { - const timeSpan = new TimeSpan(0, 0, 0, 0, 0); - timeSpan._ticks = value; - return timeSpan; - } - static isTimeSpan(timespan) { - return timespan._ticks; - } - static additionDoesOverflow(a, b) { - const c = a + b; - return a !== c - b || b !== c - a; + static Continuation(continuationToken) { + if (!continuationToken) { + throw new ErrorResponse("Argument continuation must be passed."); + } + if (isNullOrEmpty(continuationToken)) { + throw new ErrorResponse("Argument continuationToken must be a non-empty string."); + } + return new ChangeFeedStartFromContinuation(continuationToken); } - static subtractionDoesUnderflow(a, b) { - const c = a - b; - return a !== c + b || b !== a - c; +} + +function changeFeedIteratorBuilder(cfOptions, clientContext, container, partitionKeyRangeCache) { + const url = container.url; + const path = getPathFromLink(url, exports.ResourceType.item); + const id = getIdFromLink(url); + let changeFeedStartFrom = cfOptions.changeFeedStartFrom; + if (changeFeedStartFrom === undefined) { + changeFeedStartFrom = ChangeFeedStartFrom.Now(); } - static compare(t1, t2) { - if (t1._ticks > t2._ticks) { - return 1; + if (changeFeedStartFrom instanceof ChangeFeedStartFromContinuation) { + const continuationToken = changeFeedStartFrom.getCfResourceJson(); + const resourceType = changeFeedStartFrom.getResourceType(); + const internalCfOptions = buildInternalChangeFeedOptions(cfOptions, changeFeedStartFrom.getCfResource()); + if (resourceType === ChangeFeedResourceType.PartitionKey && + isPartitionKey(continuationToken.partitionKey)) { + return new ChangeFeedForPartitionKey(clientContext, container, id, path, continuationToken.partitionKey, internalCfOptions); } - if (t1._ticks < t2._ticks) { - return -1; + else if (resourceType === ChangeFeedResourceType.FeedRange) { + return new ChangeFeedForEpkRange(clientContext, container, partitionKeyRangeCache, id, path, url, internalCfOptions, undefined); + } + else { + throw new ErrorResponse("Invalid continuation token."); } - return 0; } - static interval(value, scale) { - if (isNaN(value)) { - throw new Error("value must be a number"); + else if (changeFeedStartFrom instanceof ChangeFeedStartFromNow || + changeFeedStartFrom instanceof ChangeFeedStartFromTime || + changeFeedStartFrom instanceof ChangeFeedStartFromBeginning) { + const startTime = fetchStartTime(changeFeedStartFrom); + const internalCfOptions = buildInternalChangeFeedOptions(cfOptions, undefined, startTime); + const cfResource = changeFeedStartFrom.getCfResource(); + if (isPartitionKey(cfResource)) { + return new ChangeFeedForPartitionKey(clientContext, container, id, path, cfResource, internalCfOptions); } - const milliseconds = value * scale; - if (milliseconds > maxMilliSeconds || milliseconds < minMilliSeconds) { - throw new Error("timespan too long"); + else { + let internalCfResource; + if (cfResource === undefined) { + internalCfResource = new QueryRange(Constants$1.EffectivePartitionKeyConstants.MinimumInclusiveEffectivePartitionKey, Constants$1.EffectivePartitionKeyConstants.MaximumExclusiveEffectivePartitionKey, true, false); + } + else if (isEpkRange(cfResource)) { + internalCfResource = new QueryRange(cfResource.minInclusive, cfResource.maxExclusive, true, false); + } + else { + throw new ErrorResponse("Invalid feed range."); + } + return new ChangeFeedForEpkRange(clientContext, container, partitionKeyRangeCache, id, path, url, internalCfOptions, internalCfResource); } - return TimeSpan.fromTicks(Math.floor(milliseconds * ticksPerMillisecond)); - } - static fromMilliseconds(value) { - return TimeSpan.interval(value, 1); - } - static fromSeconds(value) { - return TimeSpan.interval(value, millisPerSecond); - } - static fromMinutes(value) { - return TimeSpan.interval(value, millisPerMinute); - } - static fromHours(value) { - return TimeSpan.interval(value, millisPerHour); } - static fromDays(value) { - return TimeSpan.interval(value, millisPerDay); + else { + throw new ErrorResponse("Invalid change feed start location."); } } -TimeSpan.zero = new TimeSpan(0, 0, 0, 0, 0); -TimeSpan.maxValue = TimeSpan.fromTicks(Number.MAX_SAFE_INTEGER); -TimeSpan.minValue = TimeSpan.fromTicks(Number.MIN_SAFE_INTEGER); // Copyright (c) Microsoft Corporation. +const uuid$1 = uuid$3.v4; /** * @hidden */ -function parseDelimitedString(delimitedString) { - if (delimitedString == null) { - throw new Error("delimitedString is null or undefined"); - } - const metrics = {}; - const headerAttributes = delimitedString.split(";"); - for (const attribute of headerAttributes) { - const attributeKeyValue = attribute.split("="); - if (attributeKeyValue.length !== 2) { - throw new Error("recieved a malformed delimited string"); - } - const attributeKey = attributeKeyValue[0]; - const attributeValue = parseFloat(attributeKeyValue[1]); - metrics[attributeKey] = attributeValue; - } - return metrics; +function isChangeFeedOptions(options) { + return options && !(isPrimitivePartitionKeyValue(options) || Array.isArray(options)); } /** - * @hidden + * Operations for creating new items, and reading/querying all items + * + * @see {@link Item} for reading, replacing, or deleting an existing container; use `.item(id)`. */ -function timeSpanFromMetrics(metrics /* TODO: any */, key) { - if (key in metrics) { - return TimeSpan.fromMilliseconds(metrics[key]); - } - return TimeSpan.zero; -} - -// Copyright (c) Microsoft Corporation. -class QueryPreparationTimes { - constructor(queryCompilationTime, logicalPlanBuildTime, physicalPlanBuildTime, queryOptimizationTime) { - this.queryCompilationTime = queryCompilationTime; - this.logicalPlanBuildTime = logicalPlanBuildTime; - this.physicalPlanBuildTime = physicalPlanBuildTime; - this.queryOptimizationTime = queryOptimizationTime; - } +class Items { /** - * returns a new QueryPreparationTimes instance that is the addition of this and the arguments. + * Create an instance of {@link Items} linked to the parent {@link Container}. + * @param container - The parent container. + * @hidden */ - add(...queryPreparationTimesArray) { - let queryCompilationTime = this.queryCompilationTime; - let logicalPlanBuildTime = this.logicalPlanBuildTime; - let physicalPlanBuildTime = this.physicalPlanBuildTime; - let queryOptimizationTime = this.queryOptimizationTime; - for (const queryPreparationTimes of queryPreparationTimesArray) { - if (queryPreparationTimes == null) { - throw new Error("queryPreparationTimesArray has null or undefined item(s)"); - } - queryCompilationTime = queryCompilationTime.add(queryPreparationTimes.queryCompilationTime); - logicalPlanBuildTime = logicalPlanBuildTime.add(queryPreparationTimes.logicalPlanBuildTime); - physicalPlanBuildTime = physicalPlanBuildTime.add(queryPreparationTimes.physicalPlanBuildTime); - queryOptimizationTime = queryOptimizationTime.add(queryPreparationTimes.queryOptimizationTime); - } - return new QueryPreparationTimes(queryCompilationTime, logicalPlanBuildTime, physicalPlanBuildTime, queryOptimizationTime); + constructor(container, clientContext) { + this.container = container; + this.clientContext = clientContext; + this.partitionKeyRangeCache = new PartitionKeyRangeCache(this.clientContext); } - /** - * Output the QueryPreparationTimes as a delimited string. - */ - toDelimitedString() { - return (`${QueryMetricsConstants.QueryCompileTimeInMs}=${this.queryCompilationTime.totalMilliseconds()};` + - `${QueryMetricsConstants.LogicalPlanBuildTimeInMs}=${this.logicalPlanBuildTime.totalMilliseconds()};` + - `${QueryMetricsConstants.PhysicalPlanBuildTimeInMs}=${this.physicalPlanBuildTime.totalMilliseconds()};` + - `${QueryMetricsConstants.QueryOptimizationTimeInMs}=${this.queryOptimizationTime.totalMilliseconds()}`); + query(query, options = {}) { + const path = getPathFromLink(this.container.url, exports.ResourceType.item); + const id = getIdFromLink(this.container.url); + const fetchFunction = async (diagnosticNode, innerOptions) => { + const response = await this.clientContext.queryFeed({ + path, + resourceType: exports.ResourceType.item, + resourceId: id, + resultFn: (result) => (result ? result.Documents : []), + query, + options: innerOptions, + partitionKey: options.partitionKey, + diagnosticNode, + }); + return response; + }; + return new QueryIterator(this.clientContext, query, options, fetchFunction, this.container.url, exports.ResourceType.item); } - /** - * Returns a new instance of the QueryPreparationTimes class that is the - * aggregation of an array of QueryPreparationTimes. - */ - static createFromArray(queryPreparationTimesArray) { - if (queryPreparationTimesArray == null) { - throw new Error("queryPreparationTimesArray is null or undefined item(s)"); + readChangeFeed(partitionKeyOrChangeFeedOptions, changeFeedOptions) { + if (isChangeFeedOptions(partitionKeyOrChangeFeedOptions)) { + return this.changeFeed(partitionKeyOrChangeFeedOptions); } - return QueryPreparationTimes.zero.add(...queryPreparationTimesArray); + else { + return this.changeFeed(partitionKeyOrChangeFeedOptions, changeFeedOptions); + } + } + changeFeed(partitionKeyOrChangeFeedOptions, changeFeedOptions) { + let partitionKey; + if (!changeFeedOptions && isChangeFeedOptions(partitionKeyOrChangeFeedOptions)) { + partitionKey = undefined; + changeFeedOptions = partitionKeyOrChangeFeedOptions; + } + else if (partitionKeyOrChangeFeedOptions !== undefined && + !isChangeFeedOptions(partitionKeyOrChangeFeedOptions)) { + partitionKey = partitionKeyOrChangeFeedOptions; + } + if (!changeFeedOptions) { + changeFeedOptions = {}; + } + const path = getPathFromLink(this.container.url, exports.ResourceType.item); + const id = getIdFromLink(this.container.url); + return new ChangeFeedIterator(this.clientContext, id, path, partitionKey, changeFeedOptions); } /** - * Returns a new instance of the QueryPreparationTimes class this is deserialized from a delimited string. + * Returns an iterator to iterate over pages of changes. The iterator returned can be used to fetch changes for a single partition key, feed range or an entire container. */ - static createFromDelimitedString(delimitedString) { - const metrics = parseDelimitedString(delimitedString); - return new QueryPreparationTimes(timeSpanFromMetrics(metrics, QueryMetricsConstants.QueryCompileTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.LogicalPlanBuildTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.PhysicalPlanBuildTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.QueryOptimizationTimeInMs)); + getChangeFeedIterator(changeFeedIteratorOptions) { + const cfOptions = changeFeedIteratorOptions !== undefined ? changeFeedIteratorOptions : {}; + validateChangeFeedIteratorOptions(cfOptions); + const iterator = changeFeedIteratorBuilder(cfOptions, this.clientContext, this.container, this.partitionKeyRangeCache); + return iterator; } -} -QueryPreparationTimes.zero = new QueryPreparationTimes(TimeSpan.zero, TimeSpan.zero, TimeSpan.zero, TimeSpan.zero); - -// Copyright (c) Microsoft Corporation. -class RuntimeExecutionTimes { - constructor(queryEngineExecutionTime, systemFunctionExecutionTime, userDefinedFunctionExecutionTime) { - this.queryEngineExecutionTime = queryEngineExecutionTime; - this.systemFunctionExecutionTime = systemFunctionExecutionTime; - this.userDefinedFunctionExecutionTime = userDefinedFunctionExecutionTime; + readAll(options) { + return this.query("SELECT * from c", options); } /** - * returns a new RuntimeExecutionTimes instance that is the addition of this and the arguments. + * Create an item. + * + * Any provided type, T, is not necessarily enforced by the SDK. + * You may get more or less properties and it's up to your logic to enforce it. + * + * There is no set schema for JSON items. They may contain any number of custom properties. + * + * @param body - Represents the body of the item. Can contain any number of user defined properties. + * @param options - Used for modifying the request (for instance, specifying the partition key). */ - add(...runtimeExecutionTimesArray) { - let queryEngineExecutionTime = this.queryEngineExecutionTime; - let systemFunctionExecutionTime = this.systemFunctionExecutionTime; - let userDefinedFunctionExecutionTime = this.userDefinedFunctionExecutionTime; - for (const runtimeExecutionTimes of runtimeExecutionTimesArray) { - if (runtimeExecutionTimes == null) { - throw new Error("runtimeExecutionTimes has null or undefined item(s)"); + async create(body, options = {}) { + // Generate random document id if the id is missing in the payload and + // options.disableAutomaticIdGeneration != true + return withDiagnostics(async (diagnosticNode) => { + if ((body.id === undefined || body.id === "") && !options.disableAutomaticIdGeneration) { + body.id = uuid$1(); } - queryEngineExecutionTime = queryEngineExecutionTime.add(runtimeExecutionTimes.queryEngineExecutionTime); - systemFunctionExecutionTime = systemFunctionExecutionTime.add(runtimeExecutionTimes.systemFunctionExecutionTime); - userDefinedFunctionExecutionTime = userDefinedFunctionExecutionTime.add(runtimeExecutionTimes.userDefinedFunctionExecutionTime); - } - return new RuntimeExecutionTimes(queryEngineExecutionTime, systemFunctionExecutionTime, userDefinedFunctionExecutionTime); + const partitionKeyDefinition = await readPartitionKeyDefinition(diagnosticNode, this.container); + const partitionKey = extractPartitionKeys(body, partitionKeyDefinition); + const err = {}; + if (!isItemResourceValid(body, err)) { + throw err; + } + const path = getPathFromLink(this.container.url, exports.ResourceType.item); + const id = getIdFromLink(this.container.url); + const response = await this.clientContext.create({ + body, + path, + resourceType: exports.ResourceType.item, + resourceId: id, + diagnosticNode, + options, + partitionKey, + }); + const ref = new Item(this.container, response.result.id, this.clientContext, partitionKey); + return new ItemResponse(response.result, response.headers, response.code, response.substatus, ref, getEmptyCosmosDiagnostics()); + }, this.clientContext); + } + async upsert(body, options = {}) { + return withDiagnostics(async (diagnosticNode) => { + // Generate random document id if the id is missing in the payload and + // options.disableAutomaticIdGeneration != true + if ((body.id === undefined || body.id === "") && !options.disableAutomaticIdGeneration) { + body.id = uuid$1(); + } + const partitionKeyDefinition = await readPartitionKeyDefinition(diagnosticNode, this.container); + const partitionKey = extractPartitionKeys(body, partitionKeyDefinition); + const err = {}; + if (!isItemResourceValid(body, err)) { + throw err; + } + const path = getPathFromLink(this.container.url, exports.ResourceType.item); + const id = getIdFromLink(this.container.url); + const response = await this.clientContext.upsert({ + body, + path, + resourceType: exports.ResourceType.item, + resourceId: id, + options, + partitionKey, + diagnosticNode, + }); + const ref = new Item(this.container, response.result.id, this.clientContext, partitionKey); + return new ItemResponse(response.result, response.headers, response.code, response.substatus, ref, getEmptyCosmosDiagnostics()); + }, this.clientContext); } /** - * Output the RuntimeExecutionTimes as a delimited string. + * Execute bulk operations on items. + * + * Bulk takes an array of Operations which are typed based on what the operation does. + * The choices are: Create, Upsert, Read, Replace, and Delete + * + * Usage example: + * ```typescript + * // partitionKey is optional at the top level if present in the resourceBody + * const operations: OperationInput[] = [ + * { + * operationType: "Create", + * resourceBody: { id: "doc1", name: "sample", key: "A" } + * }, + * { + * operationType: "Upsert", + * partitionKey: 'A', + * resourceBody: { id: "doc2", name: "other", key: "A" } + * } + * ] + * + * await database.container.items.bulk(operations) + * ``` + * + * @param operations - List of operations. Limit 100 + * @param bulkOptions - Optional options object to modify bulk behavior. Pass \{ continueOnError: true \} to continue executing operations when one fails. (Defaults to false) ** NOTE: THIS WILL DEFAULT TO TRUE IN THE 4.0 RELEASE + * @param options - Used for modifying the request. */ - toDelimitedString() { - return (`${QueryMetricsConstants.SystemFunctionExecuteTimeInMs}=${this.systemFunctionExecutionTime.totalMilliseconds()};` + - `${QueryMetricsConstants.UserDefinedFunctionExecutionTimeInMs}=${this.userDefinedFunctionExecutionTime.totalMilliseconds()}`); + async bulk(operations, bulkOptions, options) { + return withDiagnostics(async (diagnosticNode) => { + const { resources: partitionKeyRanges } = await this.container + .readPartitionKeyRanges() + .fetchAll(); + const partitionKeyDefinition = await readPartitionKeyDefinition(diagnosticNode, this.container); + const batches = partitionKeyRanges.map((keyRange) => { + return { + min: keyRange.minInclusive, + max: keyRange.maxExclusive, + rangeId: keyRange.id, + indexes: [], + operations: [], + }; + }); + this.groupOperationsBasedOnPartitionKey(operations, partitionKeyDefinition, options, batches); + const path = getPathFromLink(this.container.url, exports.ResourceType.item); + const orderedResponses = []; + await Promise.all(batches + .filter((batch) => batch.operations.length) + .flatMap((batch) => splitBatchBasedOnBodySize(batch)) + .map(async (batch) => { + if (batch.operations.length > 100) { + throw new Error("Cannot run bulk request with more than 100 operations per partition"); + } + try { + const response = await addDignosticChild(async (childNode) => this.clientContext.bulk({ + body: batch.operations, + partitionKeyRangeId: batch.rangeId, + path, + resourceId: this.container.url, + bulkOptions, + options, + diagnosticNode: childNode, + }), diagnosticNode, exports.DiagnosticNodeType.BATCH_REQUEST); + response.result.forEach((operationResponse, index) => { + orderedResponses[batch.indexes[index]] = operationResponse; + }); + } + catch (err) { + // In the case of 410 errors, we need to recompute the partition key ranges + // and redo the batch request, however, 410 errors occur for unsupported + // partition key types as well since we don't support them, so for now we throw + if (err.code === 410) { + throw new Error("Partition key error. Either the partitions have split or an operation has an unsupported partitionKey type" + + err.message); + } + throw new Error(`Bulk request errored with: ${err.message}`); + } + })); + const response = orderedResponses; + response.diagnostics = diagnosticNode.toDiagnostic(this.clientContext.getClientConfig()); + return response; + }, this.clientContext); } /** - * Returns a new instance of the RuntimeExecutionTimes class that is - * the aggregation of an array of RuntimeExecutionTimes. + * Function to create batches based of partition key Ranges. + * @param operations - operations to group + * @param partitionDefinition - PartitionKey definition of container. + * @param options - Request options for bulk request. + * @param batches - Groups to be filled with operations. */ - static createFromArray(runtimeExecutionTimesArray) { - if (runtimeExecutionTimesArray == null) { - throw new Error("runtimeExecutionTimesArray is null or undefined item(s)"); - } - return RuntimeExecutionTimes.zero.add(...runtimeExecutionTimesArray); + groupOperationsBasedOnPartitionKey(operations, partitionDefinition, options, batches) { + operations.forEach((operationInput, index) => { + const { operation, partitionKey } = prepareOperations(operationInput, partitionDefinition, options); + const hashed = hashPartitionKey(assertNotUndefined(partitionKey, "undefined value for PartitionKey is not expected during grouping of bulk operations."), partitionDefinition); + const batchForKey = assertNotUndefined(batches.find((batch) => { + return isKeyInRange(batch.min, batch.max, hashed); + }), "No suitable Batch found."); + batchForKey.operations.push(operation); + batchForKey.indexes.push(index); + }); } /** - * Returns a new instance of the RuntimeExecutionTimes class this is deserialized from a delimited string. + * Execute transactional batch operations on items. + * + * Batch takes an array of Operations which are typed based on what the operation does. Batch is transactional and will rollback all operations if one fails. + * The choices are: Create, Upsert, Read, Replace, and Delete + * + * Usage example: + * ```typescript + * // partitionKey is required as a second argument to batch, but defaults to the default partition key + * const operations: OperationInput[] = [ + * { + * operationType: "Create", + * resourceBody: { id: "doc1", name: "sample", key: "A" } + * }, + * { + * operationType: "Upsert", + * partitionKey: 'A', + * resourceBody: { id: "doc2", name: "other", key: "A" } + * } + * ] + * + * await database.container.items.batch(operations) + * ``` + * + * @param operations - List of operations. Limit 100 + * @param options - Used for modifying the request */ - static createFromDelimitedString(delimitedString) { - const metrics = parseDelimitedString(delimitedString); - const vmExecutionTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.VMExecutionTimeInMs); - const indexLookupTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.IndexLookupTimeInMs); - const documentLoadTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentLoadTimeInMs); - const documentWriteTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentWriteTimeInMs); - let queryEngineExecutionTime = TimeSpan.zero; - queryEngineExecutionTime = queryEngineExecutionTime.add(vmExecutionTime); - queryEngineExecutionTime = queryEngineExecutionTime.subtract(indexLookupTime); - queryEngineExecutionTime = queryEngineExecutionTime.subtract(documentLoadTime); - queryEngineExecutionTime = queryEngineExecutionTime.subtract(documentWriteTime); - return new RuntimeExecutionTimes(queryEngineExecutionTime, timeSpanFromMetrics(metrics, QueryMetricsConstants.SystemFunctionExecuteTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.UserDefinedFunctionExecutionTimeInMs)); + async batch(operations, partitionKey, options) { + return withDiagnostics(async (diagnosticNode) => { + operations.map((operation) => decorateBatchOperation(operation, options)); + const path = getPathFromLink(this.container.url, exports.ResourceType.item); + if (operations.length > 100) { + throw new Error("Cannot run batch request with more than 100 operations per partition"); + } + try { + const response = await this.clientContext.batch({ + body: operations, + partitionKey, + path, + resourceId: this.container.url, + options, + diagnosticNode, + }); + return response; + } + catch (err) { + throw new Error(`Batch request error: ${err.message}`); + } + }, this.clientContext); } } -RuntimeExecutionTimes.zero = new RuntimeExecutionTimes(TimeSpan.zero, TimeSpan.zero, TimeSpan.zero); -// Copyright (c) Microsoft Corporation. -class QueryMetrics { - constructor(retrievedDocumentCount, retrievedDocumentSize, outputDocumentCount, outputDocumentSize, indexHitDocumentCount, totalQueryExecutionTime, queryPreparationTimes, indexLookupTime, documentLoadTime, vmExecutionTime, runtimeExecutionTimes, documentWriteTime, clientSideMetrics) { - this.retrievedDocumentCount = retrievedDocumentCount; - this.retrievedDocumentSize = retrievedDocumentSize; - this.outputDocumentCount = outputDocumentCount; - this.outputDocumentSize = outputDocumentSize; - this.indexHitDocumentCount = indexHitDocumentCount; - this.totalQueryExecutionTime = totalQueryExecutionTime; - this.queryPreparationTimes = queryPreparationTimes; - this.indexLookupTime = indexLookupTime; - this.documentLoadTime = documentLoadTime; - this.vmExecutionTime = vmExecutionTime; - this.runtimeExecutionTimes = runtimeExecutionTimes; - this.documentWriteTime = documentWriteTime; - this.clientSideMetrics = clientSideMetrics; +class StoredProcedureResponse extends ResourceResponse { + constructor(resource, headers, statusCode, storedProcedure, diagnostics) { + super(resource, headers, statusCode, diagnostics); + this.storedProcedure = storedProcedure; } /** - * Gets the IndexHitRatio - * @hidden + * Alias for storedProcedure. + * + * A reference to the {@link StoredProcedure} which the {@link StoredProcedureDefinition} corresponds to. */ - get indexHitRatio() { - return this.retrievedDocumentCount === 0 - ? 1 - : this.indexHitDocumentCount / this.retrievedDocumentCount; + get sproc() { + return this.storedProcedure; } +} + +/** + * Operations for reading, replacing, deleting, or executing a specific, existing stored procedure by id. + * + * For operations to create, read all, or query Stored Procedures, + */ +class StoredProcedure { /** - * returns a new QueryMetrics instance that is the addition of this and the arguments. + * Returns a reference URL to the resource. Used for linking in Permissions. */ - add(queryMetricsArray) { - let retrievedDocumentCount = 0; - let retrievedDocumentSize = 0; - let outputDocumentCount = 0; - let outputDocumentSize = 0; - let indexHitDocumentCount = 0; - let totalQueryExecutionTime = TimeSpan.zero; - const queryPreparationTimesArray = []; - let indexLookupTime = TimeSpan.zero; - let documentLoadTime = TimeSpan.zero; - let vmExecutionTime = TimeSpan.zero; - const runtimeExecutionTimesArray = []; - let documentWriteTime = TimeSpan.zero; - const clientSideQueryMetricsArray = []; - queryMetricsArray.push(this); - for (const queryMetrics of queryMetricsArray) { - if (queryMetrics) { - retrievedDocumentCount += queryMetrics.retrievedDocumentCount; - retrievedDocumentSize += queryMetrics.retrievedDocumentSize; - outputDocumentCount += queryMetrics.outputDocumentCount; - outputDocumentSize += queryMetrics.outputDocumentSize; - indexHitDocumentCount += queryMetrics.indexHitDocumentCount; - totalQueryExecutionTime = totalQueryExecutionTime.add(queryMetrics.totalQueryExecutionTime); - queryPreparationTimesArray.push(queryMetrics.queryPreparationTimes); - indexLookupTime = indexLookupTime.add(queryMetrics.indexLookupTime); - documentLoadTime = documentLoadTime.add(queryMetrics.documentLoadTime); - vmExecutionTime = vmExecutionTime.add(queryMetrics.vmExecutionTime); - runtimeExecutionTimesArray.push(queryMetrics.runtimeExecutionTimes); - documentWriteTime = documentWriteTime.add(queryMetrics.documentWriteTime); - clientSideQueryMetricsArray.push(queryMetrics.clientSideMetrics); - } - } - return new QueryMetrics(retrievedDocumentCount, retrievedDocumentSize, outputDocumentCount, outputDocumentSize, indexHitDocumentCount, totalQueryExecutionTime, QueryPreparationTimes.createFromArray(queryPreparationTimesArray), indexLookupTime, documentLoadTime, vmExecutionTime, RuntimeExecutionTimes.createFromArray(runtimeExecutionTimesArray), documentWriteTime, ClientSideMetrics.createFromArray(...clientSideQueryMetricsArray)); + get url() { + return createStoredProcedureUri(this.container.database.id, this.container.id, this.id); } /** - * Output the QueryMetrics as a delimited string. + * Creates a new instance of {@link StoredProcedure} linked to the parent {@link Container}. + * @param container - The parent {@link Container}. + * @param id - The id of the given {@link StoredProcedure}. * @hidden */ - toDelimitedString() { - return (QueryMetricsConstants.RetrievedDocumentCount + - "=" + - this.retrievedDocumentCount + - ";" + - QueryMetricsConstants.RetrievedDocumentSize + - "=" + - this.retrievedDocumentSize + - ";" + - QueryMetricsConstants.OutputDocumentCount + - "=" + - this.outputDocumentCount + - ";" + - QueryMetricsConstants.OutputDocumentSize + - "=" + - this.outputDocumentSize + - ";" + - QueryMetricsConstants.IndexHitRatio + - "=" + - this.indexHitRatio + - ";" + - QueryMetricsConstants.TotalQueryExecutionTimeInMs + - "=" + - this.totalQueryExecutionTime.totalMilliseconds() + - ";" + - this.queryPreparationTimes.toDelimitedString() + - ";" + - QueryMetricsConstants.IndexLookupTimeInMs + - "=" + - this.indexLookupTime.totalMilliseconds() + - ";" + - QueryMetricsConstants.DocumentLoadTimeInMs + - "=" + - this.documentLoadTime.totalMilliseconds() + - ";" + - QueryMetricsConstants.VMExecutionTimeInMs + - "=" + - this.vmExecutionTime.totalMilliseconds() + - ";" + - this.runtimeExecutionTimes.toDelimitedString() + - ";" + - QueryMetricsConstants.DocumentWriteTimeInMs + - "=" + - this.documentWriteTime.totalMilliseconds()); + constructor(container, id, clientContext) { + this.container = container; + this.id = id; + this.clientContext = clientContext; } /** - * Returns a new instance of the QueryMetrics class that is the aggregation of an array of query metrics. + * Read the {@link StoredProcedureDefinition} for the given {@link StoredProcedure}. */ - static createFromArray(queryMetricsArray) { - if (!queryMetricsArray) { - throw new Error("queryMetricsArray is null or undefined item(s)"); - } - return QueryMetrics.zero.add(queryMetricsArray); + async read(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.read({ + path, + resourceType: exports.ResourceType.sproc, + resourceId: id, + options, + diagnosticNode, + }); + return new StoredProcedureResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } /** - * Returns a new instance of the QueryMetrics class this is deserialized from a delimited string. + * Replace the given {@link StoredProcedure} with the specified {@link StoredProcedureDefinition}. + * @param body - The specified {@link StoredProcedureDefinition} to replace the existing definition. */ - static createFromDelimitedString(delimitedString, clientSideMetrics) { - const metrics = parseDelimitedString(delimitedString); - const indexHitRatio = metrics[QueryMetricsConstants.IndexHitRatio] || 0; - const retrievedDocumentCount = metrics[QueryMetricsConstants.RetrievedDocumentCount] || 0; - const indexHitCount = indexHitRatio * retrievedDocumentCount; - const outputDocumentCount = metrics[QueryMetricsConstants.OutputDocumentCount] || 0; - const outputDocumentSize = metrics[QueryMetricsConstants.OutputDocumentSize] || 0; - const retrievedDocumentSize = metrics[QueryMetricsConstants.RetrievedDocumentSize] || 0; - const totalQueryExecutionTime = timeSpanFromMetrics(metrics, QueryMetricsConstants.TotalQueryExecutionTimeInMs); - return new QueryMetrics(retrievedDocumentCount, retrievedDocumentSize, outputDocumentCount, outputDocumentSize, indexHitCount, totalQueryExecutionTime, QueryPreparationTimes.createFromDelimitedString(delimitedString), timeSpanFromMetrics(metrics, QueryMetricsConstants.IndexLookupTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentLoadTimeInMs), timeSpanFromMetrics(metrics, QueryMetricsConstants.VMExecutionTimeInMs), RuntimeExecutionTimes.createFromDelimitedString(delimitedString), timeSpanFromMetrics(metrics, QueryMetricsConstants.DocumentWriteTimeInMs), clientSideMetrics || ClientSideMetrics.zero); - } -} -QueryMetrics.zero = new QueryMetrics(0, 0, 0, 0, 0, TimeSpan.zero, QueryPreparationTimes.zero, TimeSpan.zero, TimeSpan.zero, TimeSpan.zero, RuntimeExecutionTimes.zero, TimeSpan.zero, ClientSideMetrics.zero); - -// Copyright (c) Microsoft Corporation. -/** @hidden */ -// TODO: docs -function getRequestChargeIfAny(headers) { - if (typeof headers === "number") { - return headers; - } - else if (typeof headers === "string") { - return parseFloat(headers); + async replace(body, options) { + return withDiagnostics(async (diagnosticNode) => { + if (body.body) { + body.body = body.body.toString(); + } + const err = {}; + if (!isResourceValid(body, err)) { + throw err; + } + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.replace({ + body, + path, + resourceType: exports.ResourceType.sproc, + resourceId: id, + options, + diagnosticNode, + }); + return new StoredProcedureResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } - if (headers) { - const rc = headers[Constants$1.HttpHeaders.RequestCharge]; - if (rc) { - return parseFloat(rc); - } - else { - return 0; - } + /** + * Delete the given {@link StoredProcedure}. + */ + async delete(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.delete({ + path, + resourceType: exports.ResourceType.sproc, + resourceId: id, + options, + diagnosticNode, + }); + return new StoredProcedureResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } - else { - return 0; + /** + * Execute the given {@link StoredProcedure}. + * + * The specified type, T, is not enforced by the client. + * Be sure to validate the response from the stored procedure matches the type, T, you provide. + * + * @param partitionKey - The partition key to use when executing the stored procedure + * @param params - Array of parameters to pass as arguments to the given {@link StoredProcedure}. + * @param options - Additional options, such as the partition key to invoke the {@link StoredProcedure} on. + */ + async execute(partitionKey, params, options) { + return withDiagnostics(async (diagnosticNode) => { + if (partitionKey === undefined) { + const partitionKeyResponse = await readPartitionKeyDefinition(diagnosticNode, this.container); + partitionKey = undefinedPartitionKey(partitionKeyResponse); + } + const response = await this.clientContext.execute({ + sprocLink: this.url, + params, + options, + partitionKey, + diagnosticNode, + }); + return new ResourceResponse(response.result, response.headers, response.code, getEmptyCosmosDiagnostics()); + }, this.clientContext); } } + /** - * @hidden - */ -function getInitialHeader() { - const headers = {}; - headers[Constants$1.HttpHeaders.RequestCharge] = 0; - headers[Constants$1.HttpHeaders.QueryMetrics] = {}; - return headers; -} -/** - * @hidden + * Operations for creating, upserting, or reading/querying all Stored Procedures. + * + * For operations to read, replace, delete, or execute a specific, existing stored procedure by id, see `container.storedProcedure()`. */ -// TODO: The name of this method isn't very accurate to what it does -function mergeHeaders(headers, toBeMergedHeaders) { - if (headers[Constants$1.HttpHeaders.RequestCharge] === undefined) { - headers[Constants$1.HttpHeaders.RequestCharge] = 0; - } - if (headers[Constants$1.HttpHeaders.QueryMetrics] === undefined) { - headers[Constants$1.HttpHeaders.QueryMetrics] = QueryMetrics.zero; +class StoredProcedures { + /** + * @param container - The parent {@link Container}. + * @hidden + */ + constructor(container, clientContext) { + this.container = container; + this.clientContext = clientContext; } - if (!toBeMergedHeaders) { - return; + query(query, options) { + const path = getPathFromLink(this.container.url, exports.ResourceType.sproc); + const id = getIdFromLink(this.container.url); + return new QueryIterator(this.clientContext, query, options, (diagNode, innerOptions) => { + return this.clientContext.queryFeed({ + path, + resourceType: exports.ResourceType.sproc, + resourceId: id, + resultFn: (result) => result.StoredProcedures, + query, + options: innerOptions, + diagnosticNode: diagNode, + }); + }); } - headers[Constants$1.HttpHeaders.RequestCharge] += getRequestChargeIfAny(toBeMergedHeaders); - if (toBeMergedHeaders[Constants$1.HttpHeaders.IsRUPerMinuteUsed]) { - headers[Constants$1.HttpHeaders.IsRUPerMinuteUsed] = - toBeMergedHeaders[Constants$1.HttpHeaders.IsRUPerMinuteUsed]; + /** + * Read all stored procedures. + * @example Read all stored procedures to array. + * ```typescript + * const {body: sprocList} = await containers.storedProcedures.readAll().fetchAll(); + * ``` + */ + readAll(options) { + return this.query(undefined, options); } - if (Constants$1.HttpHeaders.QueryMetrics in toBeMergedHeaders) { - const headerQueryMetrics = headers[Constants$1.HttpHeaders.QueryMetrics]; - const toBeMergedHeaderQueryMetrics = toBeMergedHeaders[Constants$1.HttpHeaders.QueryMetrics]; - for (const partitionId in toBeMergedHeaderQueryMetrics) { - if (headerQueryMetrics[partitionId]) { - const combinedQueryMetrics = headerQueryMetrics[partitionId].add([ - toBeMergedHeaderQueryMetrics[partitionId], - ]); - headerQueryMetrics[partitionId] = combinedQueryMetrics; + /** + * Create a StoredProcedure. + * + * Azure Cosmos DB allows stored procedures to be executed in the storage tier, + * directly against an item container. The script + * gets executed under ACID transactions on the primary storage partition of the + * specified container. For additional details, + * refer to the server-side JavaScript API documentation. + */ + async create(body, options) { + return withDiagnostics(async (diagnosticNode) => { + if (body.body) { + body.body = body.body.toString(); } - else { - headerQueryMetrics[partitionId] = toBeMergedHeaderQueryMetrics[partitionId]; + const err = {}; + if (!isResourceValid(body, err)) { + throw err; } - } - } - if (Constants$1.HttpHeaders.IndexUtilization in toBeMergedHeaders) { - headers[Constants$1.HttpHeaders.IndexUtilization] = - toBeMergedHeaders[Constants$1.HttpHeaders.IndexUtilization]; + const path = getPathFromLink(this.container.url, exports.ResourceType.sproc); + const id = getIdFromLink(this.container.url); + const response = await this.clientContext.create({ + body, + path, + resourceType: exports.ResourceType.sproc, + resourceId: id, + options, + diagnosticNode, + }); + const ref = new StoredProcedure(this.container, response.result.id, this.clientContext); + return new StoredProcedureResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); + }, this.clientContext); } } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -class IndexUtilizationInfo { - constructor(UtilizedSingleIndexes, PotentialSingleIndexes, UtilizedCompositeIndexes, PotentialCompositeIndexes) { - this.UtilizedSingleIndexes = UtilizedSingleIndexes; - this.PotentialSingleIndexes = PotentialSingleIndexes; - this.UtilizedCompositeIndexes = UtilizedCompositeIndexes; - this.PotentialCompositeIndexes = PotentialCompositeIndexes; - } - static tryCreateFromDelimitedBase64String(delimitedString, out) { - if (delimitedString == null) { - out.result = IndexUtilizationInfo.Empty; - return false; - } - return IndexUtilizationInfo.tryCreateFromDelimitedString(Buffer.from(delimitedString, "base64").toString(), out); - } - static tryCreateFromDelimitedString(delimitedString, out) { - if (delimitedString == null) { - out.result = IndexUtilizationInfo.Empty; - return false; - } - try { - out.result = JSON.parse(delimitedString) || IndexUtilizationInfo.Empty; - return true; - } - catch (error) { - out.result = IndexUtilizationInfo.Empty; - return false; - } - } - static createFromString(delimitedString, isBase64Encoded) { - var _a; - const result = { result: undefined }; - if (isBase64Encoded) { - IndexUtilizationInfo.tryCreateFromDelimitedBase64String(delimitedString, result); - } - else { - IndexUtilizationInfo.tryCreateFromDelimitedString(delimitedString, result); - } - return (_a = result.result) !== null && _a !== void 0 ? _a : IndexUtilizationInfo.Empty; +class TriggerResponse extends ResourceResponse { + constructor(resource, headers, statusCode, trigger, diagnostics) { + super(resource, headers, statusCode, diagnostics); + this.trigger = trigger; } } -IndexUtilizationInfo.Empty = new IndexUtilizationInfo([], [], [], []); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -var Constants = { - IndexUtilizationInfo: "Index Utilization Information", - UtilizedSingleIndexes: "Utilized Single Indexes", - PotentialSingleIndexes: "Potential Single Indexes", - UtilizedCompositeIndexes: "Utilized Composite Indexes", - PotentialCompositeIndexes: "Potential Composite Indexes", - IndexExpression: "Index Spec", - IndexImpactScore: "Index Impact Score", - IndexUtilizationSeparator: "---", -}; -// Copyright (c) Microsoft Corporation. -class IndexMetricWriter { - writeIndexMetrics(indexUtilizationInfo) { - let result = ""; - result = this.writeBeforeIndexUtilizationInfo(result); - result = this.writeIndexUtilizationInfo(result, indexUtilizationInfo); - result = this.writeAfterIndexUtilizationInfo(result); - return result; - } - writeBeforeIndexUtilizationInfo(result) { - result = this.appendNewlineToResult(result); - result = this.appendHeaderToResult(result, Constants.IndexUtilizationInfo, 0); - return result; - } - writeIndexUtilizationInfo(result, indexUtilizationInfo) { - result = this.appendHeaderToResult(result, Constants.UtilizedSingleIndexes, 1); - for (const indexUtilizationEntity of indexUtilizationInfo.UtilizedSingleIndexes) { - result = this.writeSingleIndexUtilizationEntity(result, indexUtilizationEntity); - } - result = this.appendHeaderToResult(result, Constants.PotentialSingleIndexes, 1); - for (const indexUtilizationEntity of indexUtilizationInfo.PotentialSingleIndexes) { - result = this.writeSingleIndexUtilizationEntity(result, indexUtilizationEntity); - } - result = this.appendHeaderToResult(result, Constants.UtilizedCompositeIndexes, 1); - for (const indexUtilizationEntity of indexUtilizationInfo.UtilizedCompositeIndexes) { - result = this.writeCompositeIndexUtilizationEntity(result, indexUtilizationEntity); - } - result = this.appendHeaderToResult(result, Constants.PotentialCompositeIndexes, 1); - for (const indexUtilizationEntity of indexUtilizationInfo.PotentialCompositeIndexes) { - result = this.writeCompositeIndexUtilizationEntity(result, indexUtilizationEntity); - } - return result; - } - writeAfterIndexUtilizationInfo(result) { - return result; +/** + * Operations to read, replace, or delete a {@link Trigger}. + * + * Use `container.triggers` to create, upsert, query, or read all. + */ +class Trigger { + /** + * Returns a reference URL to the resource. Used for linking in Permissions. + */ + get url() { + return createTriggerUri(this.container.database.id, this.container.id, this.id); } - writeSingleIndexUtilizationEntity(result, indexUtilizationEntity) { - result = this.appendHeaderToResult(result, `${Constants.IndexExpression}: ${indexUtilizationEntity.IndexSpec}`, 2); - result = this.appendHeaderToResult(result, `${Constants.IndexImpactScore}: ${indexUtilizationEntity.IndexImpactScore}`, 2); - result = this.appendHeaderToResult(result, Constants.IndexUtilizationSeparator, 2); - return result; + /** + * @hidden + * @param container - The parent {@link Container}. + * @param id - The id of the given {@link Trigger}. + */ + constructor(container, id, clientContext) { + this.container = container; + this.id = id; + this.clientContext = clientContext; } - writeCompositeIndexUtilizationEntity(result, indexUtilizationEntity) { - result = this.appendHeaderToResult(result, `${Constants.IndexExpression}: ${indexUtilizationEntity.IndexSpecs.join(", ")}`, 2); - result = this.appendHeaderToResult(result, `${Constants.IndexImpactScore}: ${indexUtilizationEntity.IndexImpactScore}`, 2); - result = this.appendHeaderToResult(result, Constants.IndexUtilizationSeparator, 2); - return result; + /** + * Read the {@link TriggerDefinition} for the given {@link Trigger}. + */ + async read(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.read({ + path, + resourceType: exports.ResourceType.trigger, + resourceId: id, + options, + diagnosticNode, + }); + return new TriggerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } - appendNewlineToResult(result) { - return this.appendHeaderToResult(result, "", 0); + /** + * Replace the given {@link Trigger} with the specified {@link TriggerDefinition}. + * @param body - The specified {@link TriggerDefinition} to replace the existing definition with. + */ + async replace(body, options) { + return withDiagnostics(async (diagnosticNode) => { + if (body.body) { + body.body = body.body.toString(); + } + const err = {}; + if (!isResourceValid(body, err)) { + throw err; + } + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.replace({ + body, + path, + resourceType: exports.ResourceType.trigger, + resourceId: id, + options, + diagnosticNode, + }); + return new TriggerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } - appendHeaderToResult(result, headerTitle, indentLevel) { - const Indent = " "; - const header = `${Indent.repeat(indentLevel)}${headerTitle}\n`; - result += header; - return result; + /** + * Delete the given {@link Trigger}. + */ + async delete(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.delete({ + path, + resourceType: exports.ResourceType.trigger, + resourceId: id, + options, + diagnosticNode, + }); + return new TriggerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } } -// Copyright (c) Microsoft Corporation. -class FeedResponse { - constructor(resources, headers, hasMoreResults, diagnostics) { - this.resources = resources; - this.headers = headers; - this.hasMoreResults = hasMoreResults; - this.diagnostics = diagnostics; - } - get continuation() { - return this.continuationToken; - } - get continuationToken() { - return this.headers[Constants$1.HttpHeaders.Continuation]; - } - get queryMetrics() { - return this.headers[Constants$1.HttpHeaders.QueryMetrics]; +/** + * Operations to create, upsert, query, and read all triggers. + * + * Use `container.triggers` to read, replace, or delete a {@link Trigger}. + */ +class Triggers { + /** + * @hidden + * @param container - The parent {@link Container}. + */ + constructor(container, clientContext) { + this.container = container; + this.clientContext = clientContext; } - get requestCharge() { - return getRequestChargeIfAny(this.headers); + query(query, options) { + const path = getPathFromLink(this.container.url, exports.ResourceType.trigger); + const id = getIdFromLink(this.container.url); + return new QueryIterator(this.clientContext, query, options, (diagnosticNode, innerOptions) => { + return this.clientContext.queryFeed({ + path, + resourceType: exports.ResourceType.trigger, + resourceId: id, + resultFn: (result) => result.Triggers, + query, + options: innerOptions, + diagnosticNode, + }); + }); } - get activityId() { - return this.headers[Constants$1.HttpHeaders.ActivityId]; + /** + * Read all Triggers. + * @example Read all trigger to array. + * ```typescript + * const {body: triggerList} = await container.triggers.readAll().fetchAll(); + * ``` + */ + readAll(options) { + return this.query(undefined, options); } - get indexMetrics() { - const writer = new IndexMetricWriter(); - const indexUtilizationInfo = IndexUtilizationInfo.createFromString(this.headers[Constants$1.HttpHeaders.IndexUtilization], true); - return writer.writeIndexMetrics(indexUtilizationInfo); + /** + * Create a trigger. + * + * Azure Cosmos DB supports pre and post triggers defined in JavaScript to be executed + * on creates, updates and deletes. + * + * For additional details, refer to the server-side JavaScript API documentation. + */ + async create(body, options) { + return withDiagnostics(async (diagnosticNode) => { + if (body.body) { + body.body = body.body.toString(); + } + const err = {}; + if (!isResourceValid(body, err)) { + throw err; + } + const path = getPathFromLink(this.container.url, exports.ResourceType.trigger); + const id = getIdFromLink(this.container.url); + const response = await this.clientContext.create({ + body, + path, + resourceType: exports.ResourceType.trigger, + resourceId: id, + options, + diagnosticNode, + }); + const ref = new Trigger(this.container, response.result.id, this.clientContext); + return new TriggerResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); + }, this.clientContext); } } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * @hidden - */ -const TimeoutErrorCode = "TimeoutError"; -class TimeoutError extends Error { - constructor(message = "Timeout Error") { - super(message); - this.code = TimeoutErrorCode; - this.name = TimeoutErrorCode; +class UserDefinedFunctionResponse extends ResourceResponse { + constructor(resource, headers, statusCode, udf, diagnostics) { + super(resource, headers, statusCode, diagnostics); + this.userDefinedFunction = udf; + } + /** + * Alias for `userDefinedFunction(id)`. + * + * A reference to the {@link UserDefinedFunction} corresponding to the returned {@link UserDefinedFunctionDefinition}. + */ + get udf() { + return this.userDefinedFunction; } } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * @hidden - * Utility function to get currentTime in UTC milliseconds. - * @returns - */ -function getCurrentTimestampInMs() { - return Date.now(); -} - -// Copyright (c) Microsoft Corporation. /** - * @hidden - * Internal class to hold CosmosDiagnostic aggregate information all through the lifecycle of a request. - * This object gathers diagnostic information throughout Client operation which may span across multiple - * Server call, retries etc. - * Functions - recordFailedAttempt, recordMetaDataQuery, recordEndpointContactEvent are used to ingest - * data into the context. At the end of operation, getDiagnostics() is used to - * get final CosmosDiagnostic object. + * Used to read, replace, or delete a specified User Definied Function by id. + * + * @see {@link UserDefinedFunction} to create, upsert, query, read all User Defined Functions. */ -class CosmosDiagnosticContext { - constructor() { - this.failedAttempts = []; - this.metadataLookups = []; - this.gaterwayStatistics = []; - this.locationEndpointsContacted = new Set(); - this.requestStartTimeUTCinMs = getCurrentTimestampInMs(); - } - recordFailedAttempt(gaterwayStatistics, retryAttemptNumber) { - const attempt = { - attemptNumber: retryAttemptNumber, - startTimeUTCInMs: gaterwayStatistics.startTimeUTCInMs, - durationInMs: gaterwayStatistics.durationInMs, - statusCode: gaterwayStatistics.statusCode, - substatusCode: gaterwayStatistics.subStatusCode, - requestPayloadLengthInBytes: gaterwayStatistics.requestPayloadLengthInBytes, - responsePayloadLengthInBytes: gaterwayStatistics.responsePayloadLengthInBytes, - activityId: gaterwayStatistics.activityId, - operationType: gaterwayStatistics.operationType, - resourceType: gaterwayStatistics.resourceType, - }; - this.failedAttempts.push(attempt); - } - recordNetworkCall(gaterwayStatistics) { - this.gaterwayStatistics.push(gaterwayStatistics); +class UserDefinedFunction { + /** + * Returns a reference URL to the resource. Used for linking in Permissions. + */ + get url() { + return createUserDefinedFunctionUri(this.container.database.id, this.container.id, this.id); } /** - * Merge given DiagnosticContext to current node's DiagnosticContext, Treating GatewayRequests of - * given DiagnosticContext, as metadata requests. + * @hidden + * @param container - The parent {@link Container}. + * @param id - The id of the given {@link UserDefinedFunction}. */ - mergeDiagnostics(childDiagnostics, metadataType) { - // Copy Location endpoints contacted. - childDiagnostics.locationEndpointsContacted.forEach((endpoint) => this.locationEndpointsContacted.add(endpoint)); - // Copy child nodes's GatewayStatistics to parent's metadata lookups. - childDiagnostics.gaterwayStatistics.forEach((gateway) => this.metadataLookups.push({ - activityId: gateway.activityId, - requestPayloadLengthInBytes: gateway.requestPayloadLengthInBytes, - responsePayloadLengthInBytes: gateway.responsePayloadLengthInBytes, - startTimeUTCInMs: gateway.startTimeUTCInMs, - operationType: gateway.operationType, - resourceType: gateway.resourceType, - durationInMs: gateway.durationInMs, - metaDataType: metadataType, - })); - // Copy child nodes's metadata lookups to parent's metadata lookups. - childDiagnostics.metadataLookups.forEach((lookup) => this.metadataLookups.push(lookup)); - // Copy child nodes's failed attempts to parent's failed attempts. - childDiagnostics.failedAttempts.forEach((lookup) => this.failedAttempts.push(lookup)); - } - getClientSideStats(endTimeUTCInMs = getCurrentTimestampInMs()) { - return { - requestStartTimeUTCInMs: this.requestStartTimeUTCinMs, - requestDurationInMs: endTimeUTCInMs - this.requestStartTimeUTCinMs, - totalRequestPayloadLengthInBytes: this.getTotalRequestPayloadLength(), - totalResponsePayloadLengthInBytes: this.getTotalResponsePayloadLength(), - locationEndpointsContacted: [...this.locationEndpointsContacted.values()], - metadataDiagnostics: { - metadataLookups: [...this.metadataLookups], - }, - retryDiagnostics: { - failedAttempts: [...this.failedAttempts], - }, - gatewayStatistics: this.gaterwayStatistics, - }; + constructor(container, id, clientContext) { + this.container = container; + this.id = id; + this.clientContext = clientContext; } - getTotalRequestPayloadLength() { - let totalRequestPayloadLength = 0; - this.gaterwayStatistics.forEach((req) => (totalRequestPayloadLength += req.requestPayloadLengthInBytes)); - this.metadataLookups.forEach((req) => (totalRequestPayloadLength += req.requestPayloadLengthInBytes)); - this.failedAttempts.forEach((req) => (totalRequestPayloadLength += req.requestPayloadLengthInBytes)); - return totalRequestPayloadLength; + /** + * Read the {@link UserDefinedFunctionDefinition} for the given {@link UserDefinedFunction}. + */ + async read(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.read({ + path, + resourceType: exports.ResourceType.udf, + resourceId: id, + options, + diagnosticNode, + }); + return new UserDefinedFunctionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } - getTotalResponsePayloadLength() { - let totalResponsePayloadLength = 0; - this.gaterwayStatistics.forEach((req) => (totalResponsePayloadLength += req.responsePayloadLengthInBytes)); - this.metadataLookups.forEach((req) => (totalResponsePayloadLength += req.responsePayloadLengthInBytes)); - this.failedAttempts.forEach((req) => (totalResponsePayloadLength += req.responsePayloadLengthInBytes)); - return totalResponsePayloadLength; + /** + * Replace the given {@link UserDefinedFunction} with the specified {@link UserDefinedFunctionDefinition}. + * @param options - + */ + async replace(body, options) { + return withDiagnostics(async (diagnosticNode) => { + if (body.body) { + body.body = body.body.toString(); + } + const err = {}; + if (!isResourceValid(body, err)) { + throw err; + } + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.replace({ + body, + path, + resourceType: exports.ResourceType.udf, + resourceId: id, + options, + diagnosticNode, + }); + return new UserDefinedFunctionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } - recordEndpointResolution(location) { - this.locationEndpointsContacted.add(location); + /** + * Delete the given {@link UserDefined}. + */ + async delete(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.delete({ + path, + resourceType: exports.ResourceType.udf, + resourceId: id, + options, + diagnosticNode, + }); + return new UserDefinedFunctionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * * This is a Cosmos Diagnostic type that holds collected diagnostic information during a client operations. ie. Item.read(), Container.create(). - * It has three members - - * 1. `clientSideRequestStatistics` member contains aggregate diagnostic information, including - - * - metadata lookups. Here all the server requests, apart from the final intended resource are considered as metadata calls. - * i.e. for item.read(id), if the client makes server call to discover endpoints it would be considered as metadata call. - * - retries - * - endpoints contacted. - * - request, response payload stats. - * - gatewayStatistics - Information corresponding to main operation. For example during Item.read(), the client might perform many operations - * i.e. metadata lookup etc, but gatewayStatistics represents the diagnostics information for actual read operation. + * Used to create, upsert, query, or read all User Defined Functions. * - * 2. diagnosticNode - Is a tree like structure which captures detailed diagnostic information. By default it is disabled, and is intended to be - * used only for debugging on non production environments. The kind of details captured in diagnosticNode is controlled by `CosmosDbDiagnosticLevel`. - * - CosmosDbDiagnosticLevel.info - Is default value. In this level only clientSideRequestStatistics are captured. Is is meant for production environments. - * - CosmosDbDiagnosticLevel.debug - Captures diagnosticNode and clientConfig. No request and response payloads are captured. Is not meant to be used - * in production environment. - * - CosmosDbDiagnosticLevel.debug-unsafe - In addition to data captured in CosmosDbDiagnosticLevel.debug, also captures request and response payloads. - * Is not meant to be used in production environment. - * 3. clientConfig - Captures information related to how client was configured during initialization. + * @see {@link UserDefinedFunction} to read, replace, or delete a given User Defined Function by id. */ -class CosmosDiagnostics { +class UserDefinedFunctions { /** - * @internal + * @hidden + * @param container - The parent {@link Container}. */ - constructor(clientSideRequestStatistics, diagnosticNode, clientConfig) { - this.clientSideRequestStatistics = clientSideRequestStatistics; - this.diagnosticNode = diagnosticNode; - this.clientConfig = clientConfig; + constructor(container, clientContext) { + this.container = container; + this.clientContext = clientContext; } -} -/** - * This is enum for Type of Metadata lookups possible. - */ -exports.MetadataLookUpType = void 0; -(function (MetadataLookUpType) { - MetadataLookUpType["PartitionKeyRangeLookUp"] = "PARTITION_KEY_RANGE_LOOK_UP"; - MetadataLookUpType["DatabaseAccountLookUp"] = "DATABASE_ACCOUNT_LOOK_UP"; - MetadataLookUpType["QueryPlanLookUp"] = "QUERY_PLAN_LOOK_UP"; - MetadataLookUpType["DatabaseLookUp"] = "DATABASE_LOOK_UP"; - MetadataLookUpType["ContainerLookUp"] = "CONTAINER_LOOK_UP"; -})(exports.MetadataLookUpType || (exports.MetadataLookUpType = {})); -function getRootNode(node) { - if (node.parent) - return getRootNode(node.parent); - else - return node; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Cosmos DB Diagnostic Level - */ -exports.CosmosDbDiagnosticLevel = void 0; -(function (CosmosDbDiagnosticLevel) { - CosmosDbDiagnosticLevel["info"] = "info"; - CosmosDbDiagnosticLevel["debug"] = "debug"; - CosmosDbDiagnosticLevel["debugUnsafe"] = "debug-unsafe"; -})(exports.CosmosDbDiagnosticLevel || (exports.CosmosDbDiagnosticLevel = {})); - -// Copyright (c) Microsoft Corporation. -/** - * @hidden - */ -const CosmosDbDiagnosticLevelOrder = [ - exports.CosmosDbDiagnosticLevel.info, - exports.CosmosDbDiagnosticLevel.debug, - exports.CosmosDbDiagnosticLevel.debugUnsafe, -]; -/** - * @hidden - */ -function allowTracing(levelToCheck, clientDiagnosticLevel) { - const indexOfDiagnosticLevelToCheck = CosmosDbDiagnosticLevelOrder.indexOf(levelToCheck); - const indexOfClientDiagnosticLevel = CosmosDbDiagnosticLevelOrder.indexOf(clientDiagnosticLevel); - if (indexOfDiagnosticLevelToCheck === -1 || indexOfClientDiagnosticLevel === -1) { - return false; + query(query, options) { + const path = getPathFromLink(this.container.url, exports.ResourceType.udf); + const id = getIdFromLink(this.container.url); + return new QueryIterator(this.clientContext, query, options, (diagnosticNode, innerOptions) => { + return this.clientContext.queryFeed({ + path, + resourceType: exports.ResourceType.udf, + resourceId: id, + resultFn: (result) => result.UserDefinedFunctions, + query, + options: innerOptions, + diagnosticNode, + }); + }); } - return indexOfDiagnosticLevelToCheck <= indexOfClientDiagnosticLevel; -} - -// Copyright (c) Microsoft Corporation. -/** - * @hidden - * This is Internal Representation for DiagnosticNode. It contains useful helper functions to collect - * diagnostic information throughout the lifetime of Diagnostic session. - * The functions toDiagnosticNode() & toDiagnostic() are given to convert it to public facing counterpart. - */ -class DiagnosticNodeInternal { /** - * @internal + * Read all User Defined Functions. + * @example Read all User Defined Functions to array. + * ```typescript + * const {body: udfList} = await container.userDefinedFunctions.readAll().fetchAll(); + * ``` */ - constructor(diagnosticLevel, type, parent, data = {}, startTimeUTCInMs = getCurrentTimestampInMs(), ctx = new CosmosDiagnosticContext()) { - this.id = uuid$3.v4(); - this.nodeType = type; - this.startTimeUTCInMs = startTimeUTCInMs; - this.data = data; - this.children = []; - this.durationInMs = 0; - this.parent = parent; - this.diagnosticCtx = ctx; - this.diagnosticLevel = diagnosticLevel; + readAll(options) { + return this.query(undefined, options); } /** - * @internal + * Create a UserDefinedFunction. + * + * Azure Cosmos DB supports JavaScript UDFs which can be used inside queries, stored procedures and triggers. + * + * For additional details, refer to the server-side JavaScript API documentation. + * */ - addLog(msg) { - if (!this.data.log) { - this.data.log = []; - } - this.data.log.push(msg); + async create(body, options) { + return withDiagnostics(async (diagnosticNode) => { + if (body.body) { + body.body = body.body.toString(); + } + const err = {}; + if (!isResourceValid(body, err)) { + throw err; + } + const path = getPathFromLink(this.container.url, exports.ResourceType.udf); + const id = getIdFromLink(this.container.url); + const response = await this.clientContext.create({ + body, + path, + resourceType: exports.ResourceType.udf, + resourceId: id, + options, + diagnosticNode, + }); + const ref = new UserDefinedFunction(this.container, response.result.id, this.clientContext); + return new UserDefinedFunctionResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); + }, this.clientContext); } +} + +// Copyright (c) Microsoft Corporation. +class Scripts { /** - * @internal + * @param container - The parent {@link Container}. + * @hidden */ - sanitizeHeaders(headers) { - return headers; + constructor(container, clientContext) { + this.container = container; + this.clientContext = clientContext; } /** - * Updated durationInMs for node, based on endTimeUTCInMs provided. - * @internal + * Used to read, replace, or delete a specific, existing {@link StoredProcedure} by id. + * + * Use `.storedProcedures` for creating new stored procedures, or querying/reading all stored procedures. + * @param id - The id of the {@link StoredProcedure}. */ - updateTimestamp(endTimeUTCInMs = getCurrentTimestampInMs()) { - this.durationInMs = endTimeUTCInMs - this.startTimeUTCInMs; + storedProcedure(id) { + return new StoredProcedure(this.container, id, this.clientContext); } /** - * @internal + * Used to read, replace, or delete a specific, existing {@link Trigger} by id. + * + * Use `.triggers` for creating new triggers, or querying/reading all triggers. + * @param id - The id of the {@link Trigger}. */ - recordSuccessfulNetworkCall(startTimeUTCInMs, requestContext, pipelineResponse, substatus, url) { - const responseHeaders = pipelineResponse.headers.toJSON(); - const gatewayRequest = { - activityId: responseHeaders[Constants$1.HttpHeaders.ActivityId], - startTimeUTCInMs, - durationInMs: getCurrentTimestampInMs() - startTimeUTCInMs, - statusCode: pipelineResponse.status, - subStatusCode: substatus, - requestPayloadLengthInBytes: calculateRequestPayloadLength(requestContext), - responsePayloadLengthInBytes: calculateResponsePayloadLength(pipelineResponse), - operationType: requestContext.operationType, - resourceType: requestContext.resourceType, - partitionKeyRangeId: requestContext.partitionKeyRangeId, - }; - let requestData = { - OperationType: gatewayRequest.operationType, - resourceType: gatewayRequest.resourceType, - requestPayloadLengthInBytes: gatewayRequest.requestPayloadLengthInBytes, - }; - if (allowTracing(exports.CosmosDbDiagnosticLevel.debugUnsafe, this.diagnosticLevel)) { - requestData = Object.assign(Object.assign({}, requestData), { headers: this.sanitizeHeaders(requestContext.headers), requestBody: requestContext.body, responseBody: pipelineResponse.bodyAsText, url: url }); - } - this.addData({ - requestPayloadLengthInBytes: gatewayRequest.requestPayloadLengthInBytes, - responsePayloadLengthInBytes: gatewayRequest.responsePayloadLengthInBytes, - startTimeUTCInMs: gatewayRequest.startTimeUTCInMs, - durationInMs: gatewayRequest.durationInMs, - requestData, - }); - this.diagnosticCtx.recordNetworkCall(gatewayRequest); + trigger(id) { + return new Trigger(this.container, id, this.clientContext); } /** - * @internal + * Used to read, replace, or delete a specific, existing {@link UserDefinedFunction} by id. + * + * Use `.userDefinedFunctions` for creating new user defined functions, or querying/reading all user defined functions. + * @param id - The id of the {@link UserDefinedFunction}. */ - recordFailedNetworkCall(startTimeUTCInMs, requestContext, retryAttemptNumber, statusCode, substatusCode, responseHeaders) { - this.addData({ failedAttempty: true }); - const requestPayloadLengthInBytes = calculateRequestPayloadLength(requestContext); - this.diagnosticCtx.recordFailedAttempt({ - activityId: responseHeaders[Constants$1.HttpHeaders.ActivityId], - startTimeUTCInMs, - durationInMs: getCurrentTimestampInMs() - startTimeUTCInMs, - statusCode, - subStatusCode: substatusCode, - requestPayloadLengthInBytes, - responsePayloadLengthInBytes: 0, - operationType: requestContext.operationType, - resourceType: requestContext.resourceType, - }, retryAttemptNumber); - let requestData = { - OperationType: requestContext.operationType, - resourceType: requestContext.resourceType, - requestPayloadLengthInBytes, - }; - if (allowTracing(exports.CosmosDbDiagnosticLevel.debugUnsafe, this.diagnosticLevel)) { - requestData = Object.assign(Object.assign({}, requestData), { headers: this.sanitizeHeaders(requestContext.headers), requestBody: requestContext.body, url: prepareURL(requestContext.endpoint, requestContext.path) }); - } - this.addData({ - failedAttempty: true, - requestData, - }); + userDefinedFunction(id) { + return new UserDefinedFunction(this.container, id, this.clientContext); } /** - * @internal + * Operations for creating new stored procedures, and reading/querying all stored procedures. + * + * For reading, replacing, or deleting an existing stored procedure, use `.storedProcedure(id)`. */ - recordEndpointResolution(location) { - this.addData({ selectedLocation: location }); - this.diagnosticCtx.recordEndpointResolution(location); + get storedProcedures() { + if (!this.$sprocs) { + this.$sprocs = new StoredProcedures(this.container, this.clientContext); + } + return this.$sprocs; } /** - * @internal + * Operations for creating new triggers, and reading/querying all triggers. + * + * For reading, replacing, or deleting an existing trigger, use `.trigger(id)`. */ - addData(data, msg, level = this.diagnosticLevel) { - if (level !== exports.CosmosDbDiagnosticLevel.info) { - this.data = Object.assign(Object.assign({}, this.data), data); - if (msg) { - this.addLog(msg); - } + get triggers() { + if (!this.$triggers) { + this.$triggers = new Triggers(this.container, this.clientContext); } + return this.$triggers; } /** - * Merge given DiagnosticNodeInternal's context to current node's DiagnosticContext, Treating GatewayRequests of - * given DiagnosticContext, as metadata requests. Given DiagnosticNodeInternal becomes a child of this node. - * @internal + * Operations for creating new user defined functions, and reading/querying all user defined functions. + * + * For reading, replacing, or deleting an existing user defined function, use `.userDefinedFunction(id)`. */ - addChildNode(child, level, metadataType) { - this.diagnosticCtx.mergeDiagnostics(child.diagnosticCtx, metadataType); - if (allowTracing(level, this.diagnosticLevel)) { - child.parent = this; - this.children.push(child); + get userDefinedFunctions() { + if (!this.$udfs) { + this.$udfs = new UserDefinedFunctions(this.container, this.clientContext); } - return child; + return this.$udfs; + } +} + +/** Response object for Container operations */ +class ContainerResponse extends ResourceResponse { + constructor(resource, headers, statusCode, container, diagnostics) { + super(resource, headers, statusCode, diagnostics); + this.container = container; + } +} + +class OfferResponse extends ResourceResponse { + constructor(resource, headers, statusCode, diagnostics, offer) { + super(resource, headers, statusCode, diagnostics); + this.offer = offer; } +} + +/** + * Use to read or replace an existing {@link Offer} by id. + * + * @see {@link Offers} to query or read all offers. + */ +class Offer { /** - * @internal + * Returns a reference URL to the resource. Used for linking in Permissions. */ - initializeChildNode(type, level, data = {}) { - if (allowTracing(level, this.diagnosticLevel)) { - const child = new DiagnosticNodeInternal(this.diagnosticLevel, type, this, data, getCurrentTimestampInMs(), this.diagnosticCtx); - this.children.push(child); - return child; - } - else { - return this; - } + get url() { + return `/${Constants$1.Path.OffersPathSegment}/${this.id}`; } /** - * @internal + * @hidden + * @param client - The parent {@link CosmosClient} for the Database Account. + * @param id - The id of the given {@link Offer}. */ - recordQueryResult(resources, level) { - var _a; - if (allowTracing(level, this.diagnosticLevel)) { - const previousCount = (_a = this.data.queryRecordsRead) !== null && _a !== void 0 ? _a : 0; - if (Array.isArray(resources)) { - this.data.queryRecordsRead = previousCount + resources.length; - } - } + constructor(client, id, clientContext) { + this.client = client; + this.id = id; + this.clientContext = clientContext; } /** - * Convert DiagnosticNodeInternal (internal representation) to DiagnosticNode (public, sanitized representation) - * @internal + * Read the {@link OfferDefinition} for the given {@link Offer}. */ - toDiagnosticNode() { - return { - id: this.id, - nodeType: this.nodeType, - children: this.children.map((child) => child.toDiagnosticNode()), - data: this.data, - startTimeUTCInMs: this.startTimeUTCInMs, - durationInMs: this.durationInMs, - }; + async read(options) { + return withDiagnostics(async (diagnosticNode) => { + const response = await this.clientContext.read({ + path: this.url, + resourceType: exports.ResourceType.offer, + resourceId: this.id, + options, + diagnosticNode, + }); + return new OfferResponse(response.result, response.headers, response.code, getEmptyCosmosDiagnostics(), this); + }, this.clientContext); } /** - * Convert to CosmosDiagnostics - * @internal + * Replace the given {@link Offer} with the specified {@link OfferDefinition}. + * @param body - The specified {@link OfferDefinition} */ - toDiagnostic(clientConfigDiagnostic) { - const rootNode = getRootNode(this); - const diagnostiNode = allowTracing(exports.CosmosDbDiagnosticLevel.debug, this.diagnosticLevel) - ? rootNode.toDiagnosticNode() - : undefined; - const clientConfig = allowTracing(exports.CosmosDbDiagnosticLevel.debug, this.diagnosticLevel) - ? clientConfigDiagnostic - : undefined; - const cosmosDiagnostic = new CosmosDiagnostics(this.diagnosticCtx.getClientSideStats(), diagnostiNode, clientConfig); - return cosmosDiagnostic; + async replace(body, options) { + return withDiagnostics(async (diagnosticNode) => { + const err = {}; + if (!isResourceValid(body, err)) { + throw err; + } + const response = await this.clientContext.replace({ + body, + path: this.url, + resourceType: exports.ResourceType.offer, + resourceId: this.id, + options, + diagnosticNode, + }); + return new OfferResponse(response.result, response.headers, response.code, getEmptyCosmosDiagnostics(), this); + }, this.clientContext); } } -/** - * @hidden - */ -exports.DiagnosticNodeType = void 0; -(function (DiagnosticNodeType) { - DiagnosticNodeType["CLIENT_REQUEST_NODE"] = "CLIENT_REQUEST_NODE"; - DiagnosticNodeType["METADATA_REQUEST_NODE"] = "METADATA_REQUEST_NODE"; - DiagnosticNodeType["HTTP_REQUEST"] = "HTTP_REQUEST"; - DiagnosticNodeType["BATCH_REQUEST"] = "BATCH_REQUEST"; - DiagnosticNodeType["PARALLEL_QUERY_NODE"] = "PARALLEL_QUERY_NODE"; - DiagnosticNodeType["DEFAULT_QUERY_NODE"] = "DEFAULT_QUERY_NODE"; - DiagnosticNodeType["QUERY_REPAIR_NODE"] = "QUERY_REPAIR_NODE"; - DiagnosticNodeType["BACKGROUND_REFRESH_THREAD"] = "BACKGROUND_REFRESH_THREAD"; - DiagnosticNodeType["REQUEST_ATTEMPTS"] = "REQUEST_ATTEMPTS"; -})(exports.DiagnosticNodeType || (exports.DiagnosticNodeType = {})); -function calculateResponsePayloadLength(response) { - var _a; - return ((_a = response === null || response === void 0 ? void 0 : response.bodyAsText) === null || _a === void 0 ? void 0 : _a.length) || 0; -} -function calculateRequestPayloadLength(requestContext) { - return requestContext.body ? requestContext.body.length : 0; -} -// Copyright (c) Microsoft Corporation. -/** - * @hidden - * Utility function to create an Empty CosmosDiagnostic object. - */ -function getEmptyCosmosDiagnostics() { - return new CosmosDiagnostics({ - requestDurationInMs: 0, - requestStartTimeUTCInMs: getCurrentTimestampInMs(), - totalRequestPayloadLengthInBytes: 0, - totalResponsePayloadLengthInBytes: 0, - locationEndpointsContacted: [], - retryDiagnostics: { - failedAttempts: [], - }, - metadataDiagnostics: { - metadataLookups: [], - }, - gatewayStatistics: [], - }, { - id: uuid$3.v4(), - nodeType: exports.DiagnosticNodeType.CLIENT_REQUEST_NODE, - children: [], - data: {}, - startTimeUTCInMs: getCurrentTimestampInMs(), - durationInMs: 0, - }); -} /** - * A supporting utility wrapper function, to be used inside a diagnostic session started - * by `withDiagnostics` function. - * Created a Diagnostic node and add it as a child to existing diagnostic session. - * @hidden + * Use to query or read all Offers. + * + * @see {@link Offer} to read or replace an existing {@link Offer} by id. */ -async function addDignosticChild(callback, node, type, data = {}) { - const childNode = node.initializeChildNode(type, exports.CosmosDbDiagnosticLevel.debug, data); - try { - const response = await callback(childNode); - childNode.updateTimestamp(); - return response; +class Offers { + /** + * @hidden + * @param client - The parent {@link CosmosClient} for the offers. + */ + constructor(client, clientContext) { + this.client = client; + this.clientContext = clientContext; } - catch (e) { - childNode.addData({ - failure: true, + query(query, options) { + return new QueryIterator(this.clientContext, query, options, (diagnosticNode, innerOptions) => { + return this.clientContext.queryFeed({ + path: "/offers", + resourceType: exports.ResourceType.offer, + resourceId: "", + resultFn: (result) => result.Offers, + query, + options: innerOptions, + diagnosticNode, + }); }); - childNode.updateTimestamp(); - throw e; - } -} -/** - * A supporting utility wrapper function, to be used inside a diagnostic session started - * by `withDiagnostics` function. - * Treats requests originating in provided `callback` as metadata calls. - * To realize this, starts a temporary diagnostic session, after execution of callback is - * finished. Merges this temporary diagnostic session to the original diagnostic session - * represented by the input parameter `node`. - * @hidden - */ -async function withMetadataDiagnostics(callback, node, type) { - const diagnosticNodeForMetadataCall = new DiagnosticNodeInternal(node.diagnosticLevel, exports.DiagnosticNodeType.METADATA_REQUEST_NODE, null); - try { - const response = await callback(diagnosticNodeForMetadataCall); - node.addChildNode(diagnosticNodeForMetadataCall, exports.CosmosDbDiagnosticLevel.debug, type); - return response; } - catch (e) { - node.addChildNode(diagnosticNodeForMetadataCall, exports.CosmosDbDiagnosticLevel.debug, type); - throw e; + /** + * Read all offers. + * @example Read all offers to array. + * ```typescript + * const {body: offerList} = await client.offers.readAll().fetchAll(); + * ``` + */ + readAll(options) { + return this.query(undefined, options); } } + /** - * Utility wrapper function to managed lifecycle of a Diagnostic session. - * Meant to be used at the root of the client operation. i.e. item.read(), - * queryIterator.fetchAll(). - * - * This utility starts a new diagnostic session. So using it any where else - * other than start of operation, will result is different diagnostic sessions. + * Operations for reading, replacing, or deleting a specific, existing container by id. * - * Workings : - * 1. Takes a callback function as input. - * 2. Creates a new instance of DiagnosticNodeInternal, which can be though as starting - * a new diagnostic session. - * 3. Executes the callback function. - * 4. If execution was successful. Converts DiagnosticNodeInternal to CosmosDiagnostics - * and injects it to the response object and returns this object. - * 5. If execution threw an exception. Sill converts DiagnosticNodeInternal to CosmosDiagnostics - * and injects it to the Error object, and rethrows the Error object. + * @see {@link Containers} for creating new containers, and reading/querying all containers; use `.containers`. * - * @hidden + * Note: all these operations make calls against a fixed budget. + * You should design your system such that these calls scale sublinearly with your application. + * For instance, do not call `container(id).read()` before every single `item.read()` call, to ensure the container exists; + * do this once on application start up. */ -async function withDiagnostics(callback, clientContext, type = exports.DiagnosticNodeType.CLIENT_REQUEST_NODE) { - const diagnosticNode = new DiagnosticNodeInternal(clientContext.diagnosticLevel, type, null); - try { - const response = await callback(diagnosticNode); - diagnosticNode.updateTimestamp(); - const diagnostics = diagnosticNode.toDiagnostic(clientContext.getClientConfig()); - if (typeof response === "object" && response !== null) { - response.diagnostics = diagnostics; +class Container { + /** + * Operations for creating new items, and reading/querying all items + * + * For reading, replacing, or deleting an existing item, use `.item(id)`. + * + * @example Create a new item + * ```typescript + * const {body: createdItem} = await container.items.create({id: "", properties: {}}); + * ``` + */ + get items() { + if (!this.$items) { + this.$items = new Items(this, this.clientContext); } - clientContext.recordDiagnostics(diagnostics); - return response; - } - catch (e) { - diagnosticNode.updateTimestamp(); - diagnosticNode.addData({ - failure: true, - }); - const diagnostics = diagnosticNode.toDiagnostic(clientContext.getClientConfig()); - e.diagnostics = diagnostics; - clientContext.recordDiagnostics(diagnostics); - throw e; + return this.$items; } -} - -// Copyright (c) Microsoft Corporation. -const logger$3 = logger$5.createClientLogger("ClientContext"); -/** @hidden */ -var STATES; -(function (STATES) { - STATES["start"] = "start"; - STATES["inProgress"] = "inProgress"; - STATES["ended"] = "ended"; -})(STATES || (STATES = {})); -/** @hidden */ -class DefaultQueryExecutionContext { - get continuation() { - return this.continuationToken; + /** + * All operations for Stored Procedures, Triggers, and User Defined Functions + */ + get scripts() { + if (!this.$scripts) { + this.$scripts = new Scripts(this, this.clientContext); + } + return this.$scripts; } /** - * Provides the basic Query Execution Context. - * This wraps the internal logic query execution using provided fetch functions + * Operations for reading and querying conflicts for the given container. * - * @param clientContext - Is used to read the partitionKeyRanges for split proofing - * @param query - A SQL query. - * @param options - Represents the feed options. - * @param fetchFunctions - A function to retrieve each page of data. - * An array of functions may be used to query more than one partition. - * @hidden + * For reading or deleting a specific conflict, use `.conflict(id)`. */ - constructor(options, fetchFunctions) { - this.resources = []; - this.currentIndex = 0; - this.currentPartitionIndex = 0; - this.fetchFunctions = Array.isArray(fetchFunctions) ? fetchFunctions : [fetchFunctions]; - this.options = options || {}; - this.continuationToken = this.options.continuationToken || this.options.continuation || null; - this.state = DefaultQueryExecutionContext.STATES.start; + get conflicts() { + if (!this.$conflicts) { + this.$conflicts = new Conflicts(this, this.clientContext); + } + return this.$conflicts; } /** - * Execute a provided callback on the next element in the execution context. + * Returns a reference URL to the resource. Used for linking in Permissions. */ - async nextItem(diagnosticNode) { - ++this.currentIndex; - const response = await this.current(diagnosticNode); - return response; + get url() { + return createDocumentCollectionUri(this.database.id, this.id); } /** - * Retrieve the current element on the execution context. + * Returns a container instance. Note: You should get this from `database.container(id)`, rather than creating your own object. + * @param database - The parent {@link Database}. + * @param id - The id of the given container. + * @hidden */ - async current(diagnosticNode) { - if (this.currentIndex < this.resources.length) { - return { - result: this.resources[this.currentIndex], - headers: getInitialHeader(), - }; - } - if (this._canFetchMore()) { - const { result: resources, headers } = await this.fetchMore(diagnosticNode); - this.resources = resources; - if (this.resources.length === 0) { - if (!this.continuationToken && this.currentPartitionIndex >= this.fetchFunctions.length) { - this.state = DefaultQueryExecutionContext.STATES.ended; - return { result: undefined, headers }; - } - else { - return this.current(diagnosticNode); - } - } - return { result: this.resources[this.currentIndex], headers }; - } - else { - this.state = DefaultQueryExecutionContext.STATES.ended; - return { - result: undefined, - headers: getInitialHeader(), - }; - } + constructor(database, id, clientContext) { + this.database = database; + this.id = id; + this.clientContext = clientContext; } /** - * Determine if there are still remaining resources to processs based on - * the value of the continuation token or the elements remaining on the current batch in the execution context. + * Used to read, replace, or delete a specific, existing {@link Item} by id. * - * @returns true if there is other elements to process in the DefaultQueryExecutionContext. + * Use `.items` for creating new items, or querying/reading all items. + * + * @param id - The id of the {@link Item}. + * @param partitionKeyValue - The value of the {@link Item} partition key + * @example Replace an item + * `const {body: replacedItem} = await container.item("", "").replace({id: "", title: "Updated post", authorID: 5});` */ - hasMoreResults() { - return (this.state === DefaultQueryExecutionContext.STATES.start || - this.continuationToken !== undefined || - this.currentIndex < this.resources.length - 1 || - this.currentPartitionIndex < this.fetchFunctions.length); + item(id, partitionKeyValue) { + return new Item(this, id, this.clientContext, partitionKeyValue); } /** - * Fetches the next batch of the feed and pass them as an array to a callback + * Used to read, replace, or delete a specific, existing {@link Conflict} by id. + * + * Use `.conflicts` for creating new conflicts, or querying/reading all conflicts. + * @param id - The id of the {@link Conflict}. */ - async fetchMore(diagnosticNode) { - return addDignosticChild(async (childDiagnosticNode) => { - if (this.currentPartitionIndex >= this.fetchFunctions.length) { - return { - headers: getInitialHeader(), - result: undefined, - }; - } - // Keep to the original continuation and to restore the value after fetchFunction call - const originalContinuation = this.options.continuationToken || this.options.continuation; - this.options.continuationToken = this.continuationToken; - // Return undefined if there is no more results - if (this.currentPartitionIndex >= this.fetchFunctions.length) { - return { - headers: getInitialHeader(), - result: undefined, - }; - } - let resources; - let responseHeaders; - try { - let p; - if (this.nextFetchFunction !== undefined) { - logger$3.verbose("using prefetch"); - p = this.nextFetchFunction; - this.nextFetchFunction = undefined; - } - else { - logger$3.verbose("using fresh fetch"); - p = this.fetchFunctions[this.currentPartitionIndex](childDiagnosticNode, this.options); - } - const response = await p; - resources = response.result; - childDiagnosticNode.recordQueryResult(resources, exports.CosmosDbDiagnosticLevel.debugUnsafe); - responseHeaders = response.headers; - this.continuationToken = responseHeaders[Constants$1.HttpHeaders.Continuation]; - if (!this.continuationToken) { - ++this.currentPartitionIndex; - } - if (this.options && this.options.bufferItems === true) { - const fetchFunction = this.fetchFunctions[this.currentPartitionIndex]; - this.nextFetchFunction = fetchFunction - ? fetchFunction(childDiagnosticNode, Object.assign(Object.assign({}, this.options), { continuationToken: this.continuationToken })) - : undefined; - } - } - catch (err) { - this.state = DefaultQueryExecutionContext.STATES.ended; - // return callback(err, undefined, responseHeaders); - // TODO: Error and data being returned is an antipattern, this might broken + conflict(id, partitionKey) { + return new Conflict(this, id, this.clientContext, partitionKey); + } + /** Read the container's definition */ + async read(options) { + return withDiagnostics(async (diagnosticNode) => { + return this.readInternal(diagnosticNode, options); + }, this.clientContext); + } + /** + * @hidden + */ + async readInternal(diagnosticNode, options) { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.read({ + path, + resourceType: exports.ResourceType.container, + resourceId: id, + options, + diagnosticNode, + }); + this.clientContext.partitionKeyDefinitionCache[this.url] = response.result.partitionKey; + return new ContainerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + } + /** Replace the container's definition */ + async replace(body, options) { + return withDiagnostics(async (diagnosticNode) => { + const err = {}; + if (!isResourceValid(body, err)) { throw err; } - this.state = DefaultQueryExecutionContext.STATES.inProgress; - this.currentIndex = 0; - this.options.continuationToken = originalContinuation; - this.options.continuation = originalContinuation; - // deserializing query metrics so that we aren't working with delimited strings in the rest of the code base - if (Constants$1.HttpHeaders.QueryMetrics in responseHeaders) { - const delimitedString = responseHeaders[Constants$1.HttpHeaders.QueryMetrics]; - let queryMetrics = QueryMetrics.createFromDelimitedString(delimitedString); - // Add the request charge to the query metrics so that we can have per partition request charge. - if (Constants$1.HttpHeaders.RequestCharge in responseHeaders) { - const requestCharge = Number(responseHeaders[Constants$1.HttpHeaders.RequestCharge]) || 0; - queryMetrics = new QueryMetrics(queryMetrics.retrievedDocumentCount, queryMetrics.retrievedDocumentSize, queryMetrics.outputDocumentCount, queryMetrics.outputDocumentSize, queryMetrics.indexHitDocumentCount, queryMetrics.totalQueryExecutionTime, queryMetrics.queryPreparationTimes, queryMetrics.indexLookupTime, queryMetrics.documentLoadTime, queryMetrics.vmExecutionTime, queryMetrics.runtimeExecutionTimes, queryMetrics.documentWriteTime, new ClientSideMetrics(requestCharge)); - } - // Wraping query metrics in a object where the key is '0' just so single partition - // and partition queries have the same response schema - responseHeaders[Constants$1.HttpHeaders.QueryMetrics] = {}; - responseHeaders[Constants$1.HttpHeaders.QueryMetrics]["0"] = queryMetrics; - } - return { result: resources, headers: responseHeaders }; - }, diagnosticNode, exports.DiagnosticNodeType.DEFAULT_QUERY_NODE, { - queryMethodIdentifier: "fetchMore", - }); + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.replace({ + body, + path, + resourceType: exports.ResourceType.container, + resourceId: id, + options, + diagnosticNode, + }); + return new ContainerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } - _canFetchMore() { - const res = this.state === DefaultQueryExecutionContext.STATES.start || - (this.continuationToken && this.state === DefaultQueryExecutionContext.STATES.inProgress) || - (this.currentPartitionIndex < this.fetchFunctions.length && - this.state === DefaultQueryExecutionContext.STATES.inProgress); - return res; + /** Delete the container */ + async delete(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.delete({ + path, + resourceType: exports.ResourceType.container, + resourceId: id, + options, + diagnosticNode, + }); + return new ContainerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } -} -DefaultQueryExecutionContext.STATES = STATES; - -/** @hidden */ -class AverageAggregator { /** - * Add the provided item to aggregation result. + * Gets the partition key definition first by looking into the cache otherwise by reading the collection. + * @deprecated This method has been renamed to readPartitionKeyDefinition. */ - aggregate(other) { - if (other == null || other.sum == null) { - return; - } - if (this.sum == null) { - this.sum = 0.0; - this.count = 0; - } - this.sum += other.sum; - this.count += other.count; + async getPartitionKeyDefinition() { + return withDiagnostics(async (diagnosticNode) => { + return this.readPartitionKeyDefinition(diagnosticNode); + }, this.clientContext); } /** - * Get the aggregation result. + * Gets the partition key definition first by looking into the cache otherwise by reading the collection. + * @hidden */ - getResult() { - if (this.sum == null || this.count <= 0) { - return undefined; + async readPartitionKeyDefinition(diagnosticNode) { + // $ISSUE-felixfan-2016-03-17: Make name based path and link based path use the same key + // $ISSUE-felixfan-2016-03-17: Refresh partitionKeyDefinitionCache when necessary + if (this.url in this.clientContext.partitionKeyDefinitionCache) { + diagnosticNode.addData({ readFromCache: true }); + return new ResourceResponse(this.clientContext.partitionKeyDefinitionCache[this.url], {}, 0, getEmptyCosmosDiagnostics()); } - return this.sum / this.count; + const { headers, statusCode, diagnostics } = await withMetadataDiagnostics(async (node) => { + return this.readInternal(node); + }, diagnosticNode, exports.MetadataLookUpType.ContainerLookUp); + return new ResourceResponse(this.clientContext.partitionKeyDefinitionCache[this.url], headers, statusCode, diagnostics); } -} - -/** @hidden */ -class CountAggregator { /** - * Represents an aggregator for COUNT operator. - * @hidden + * Gets offer on container. If none exists, returns an OfferResponse with undefined. */ - constructor() { - this.value = 0; + async readOffer(options = {}) { + return withDiagnostics(async (diagnosticNode) => { + const { resource: container } = await this.read(); + const path = "/offers"; + const url = container._self; + const response = await this.clientContext.queryFeed({ + path, + resourceId: "", + resourceType: exports.ResourceType.offer, + query: `SELECT * from root where root.resource = "${url}"`, + resultFn: (result) => result.Offers, + options, + diagnosticNode, + }); + const offer = response.result[0] + ? new Offer(this.database.client, response.result[0].id, this.clientContext) + : undefined; + return new OfferResponse(response.result[0], response.headers, response.code, getEmptyCosmosDiagnostics(), offer); + }, this.clientContext); + } + async getQueryPlan(query) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + return this.clientContext.getQueryPlan(path + "/docs", exports.ResourceType.item, getIdFromLink(this.url), query, {}, diagnosticNode); + }, this.clientContext); + } + readPartitionKeyRanges(feedOptions) { + feedOptions = feedOptions || {}; + return this.clientContext.queryPartitionKeyRanges(this.url, undefined, feedOptions); } /** - * Add the provided item to aggregation result. + * + * @returns all the feed ranges for which changefeed could be fetched. */ - aggregate(other) { - this.value += other; + async getFeedRanges() { + return withDiagnostics(async (diagnosticNode) => { + const { resources } = await this.readPartitionKeyRanges().fetchAllInternal(diagnosticNode); + const feedRanges = []; + for (const resource of resources) { + const feedRange = new FeedRangeInternal(resource.minInclusive, resource.maxExclusive); + Object.freeze(feedRange); + feedRanges.push(feedRange); + } + return feedRanges; + }, this.clientContext); } /** - * Get the aggregation result. + * Delete all documents belong to the container for the provided partition key value + * @param partitionKey - The partition key value of the items to be deleted */ - getResult() { - return this.value; - } + async deleteAllItemsForPartitionKey(partitionKey, options) { + return withDiagnostics(async (diagnosticNode) => { + let path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + path = path + "/operations/partitionkeydelete"; + const response = await this.clientContext.delete({ + path, + resourceType: exports.ResourceType.container, + resourceId: id, + options, + partitionKey: partitionKey, + method: exports.HTTPMethod.post, + diagnosticNode, + }); + return new ContainerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); + } } -// TODO: this smells funny -/** @hidden */ -const TYPEORDCOMPARATOR = Object.freeze({ - NoValue: { - ord: 0, - }, - undefined: { - ord: 1, - }, - boolean: { - ord: 2, - compFunc: (a, b) => { - return a === b ? 0 : a > b ? 1 : -1; - }, - }, - number: { - ord: 4, - compFunc: (a, b) => { - return a === b ? 0 : a > b ? 1 : -1; - }, - }, - string: { - ord: 5, - compFunc: (a, b) => { - return a === b ? 0 : a > b ? 1 : -1; - }, - }, -}); -/** @hidden */ -class OrderByDocumentProducerComparator { - constructor(sortOrder) { - this.sortOrder = sortOrder; - } // TODO: This should be an enum - targetPartitionKeyRangeDocProdComparator(docProd1, docProd2) { - const a = docProd1.getTargetParitionKeyRange()["minInclusive"]; - const b = docProd2.getTargetParitionKeyRange()["minInclusive"]; - return a === b ? 0 : a > b ? 1 : -1; - } - compare(docProd1, docProd2) { - // Need to check for split, since we don't want to dereference "item" of undefined / exception - if (docProd1.gotSplit()) { - return -1; - } - if (docProd2.gotSplit()) { - return 1; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function validateOffer(body) { + if (body.throughput) { + if (body.maxThroughput) { + console.log("should be erroring"); + throw new Error("Cannot specify `throughput` with `maxThroughput`"); } - const orderByItemsRes1 = this.getOrderByItems(docProd1.peekBufferedItems()[0]); - const orderByItemsRes2 = this.getOrderByItems(docProd2.peekBufferedItems()[0]); - // validate order by items and types - // TODO: once V1 order by on different types is fixed this need to change - this.validateOrderByItems(orderByItemsRes1, orderByItemsRes2); - // no async call in the for loop - for (let i = 0; i < orderByItemsRes1.length; i++) { - // compares the orderby items one by one - const compRes = this.compareOrderByItem(orderByItemsRes1[i], orderByItemsRes2[i]); - if (compRes !== 0) { - if (this.sortOrder[i] === "Ascending") { - return compRes; - } - else if (this.sortOrder[i] === "Descending") { - return -compRes; - } - } + if (body.autoUpgradePolicy) { + throw new Error("Cannot specify autoUpgradePolicy with throughput. Use `maxThroughput` instead"); } - return this.targetPartitionKeyRangeDocProdComparator(docProd1, docProd2); } - // TODO: This smells funny - compareValue(item1, type1, item2, type2) { - if (type1 === "object" || type2 === "object") { - throw new Error("Tried to compare an object type"); - } - const type1Ord = TYPEORDCOMPARATOR[type1].ord; - const type2Ord = TYPEORDCOMPARATOR[type2].ord; - const typeCmp = type1Ord - type2Ord; - if (typeCmp !== 0) { - // if the types are different, use type ordinal - return typeCmp; - } - // both are of the same type - if (type1Ord === TYPEORDCOMPARATOR["undefined"].ord || - type1Ord === TYPEORDCOMPARATOR["NoValue"].ord) { - // if both types are undefined or Null they are equal - return 0; - } - const compFunc = TYPEORDCOMPARATOR[type1].compFunc; - if (typeof compFunc === "undefined") { - throw new Error("Cannot find the comparison function"); - } - // same type and type is defined compare the items - return compFunc(item1, item2); +} + +/** + * Operations for creating new containers, and reading/querying all containers + * + * @see {@link Container} for reading, replacing, or deleting an existing container; use `.container(id)`. + * + * Note: all these operations make calls against a fixed budget. + * You should design your system such that these calls scale sublinearly with your application. + * For instance, do not call `containers.readAll()` before every single `item.read()` call, to ensure the container exists; + * do this once on application start up. + */ +class Containers { + constructor(database, clientContext) { + this.database = database; + this.clientContext = clientContext; } - compareOrderByItem(orderByItem1, orderByItem2) { - const type1 = this.getType(orderByItem1); - const type2 = this.getType(orderByItem2); - return this.compareValue(orderByItem1["item"], type1, orderByItem2["item"], type2); + query(query, options) { + const path = getPathFromLink(this.database.url, exports.ResourceType.container); + const id = getIdFromLink(this.database.url); + return new QueryIterator(this.clientContext, query, options, (diagNode, innerOptions) => { + return this.clientContext.queryFeed({ + path, + resourceType: exports.ResourceType.container, + resourceId: id, + resultFn: (result) => result.DocumentCollections, + query, + options: innerOptions, + diagnosticNode: diagNode, + }); + }); } - validateOrderByItems(res1, res2) { - if (res1.length !== res2.length) { - throw new Error(`Expected ${res1.length}, but got ${res2.length}.`); + /** + * Creates a container. + * + * A container is a named logical container for items. + * + * A database may contain zero or more named containers and each container consists of + * zero or more JSON items. + * + * Being schema-free, the items in a container do not need to share the same structure or fields. + * + * + * Since containers are application resources, they can be authorized using either the + * master key or resource keys. + * + * @param body - Represents the body of the container. + * @param options - Use to set options like response page size, continuation tokens, etc. + */ + async create(body, options = {}) { + return withDiagnostics(async (diagnosticNode) => { + return this.createInternal(diagnosticNode, body, options); + }, this.clientContext); + } + /** + * @hidden + */ + async createInternal(diagnosticNode, body, options = {}) { + const err = {}; + if (!isResourceValid(body, err)) { + throw err; } - if (res1.length !== this.sortOrder.length) { - throw new Error("orderByItems cannot have a different size than sort orders."); + const path = getPathFromLink(this.database.url, exports.ResourceType.container); + const id = getIdFromLink(this.database.url); + validateOffer(body); + if (body.maxThroughput) { + const autoscaleParams = { + maxThroughput: body.maxThroughput, + }; + if (body.autoUpgradePolicy) { + autoscaleParams.autoUpgradePolicy = body.autoUpgradePolicy; + } + const autoscaleHeader = JSON.stringify(autoscaleParams); + options.initialHeaders = Object.assign({}, options.initialHeaders, { + [Constants$1.HttpHeaders.AutoscaleSettings]: autoscaleHeader, + }); + delete body.maxThroughput; + delete body.autoUpgradePolicy; } - for (let i = 0; i < this.sortOrder.length; i++) { - const type1 = this.getType(res1[i]); - const type2 = this.getType(res2[i]); - if (type1 !== type2) { - throw new Error(`Expected ${type1}, but got ${type2}. Cannot execute cross partition order-by queries on mixed types. Consider filtering your query using IS_STRING or IS_NUMBER to get around this exception.`); + if (body.throughput) { + options.initialHeaders = Object.assign({}, options.initialHeaders, { + [Constants$1.HttpHeaders.OfferThroughput]: body.throughput, + }); + delete body.throughput; + } + if (typeof body.partitionKey === "string") { + if (!body.partitionKey.startsWith("/")) { + throw new Error("Partition key must start with '/'"); } + body.partitionKey = { + paths: [body.partitionKey], + }; } - } - getType(orderByItem) { - // TODO: any item? - if (orderByItem === undefined || orderByItem.item === undefined) { - return "NoValue"; + // If they don't specify a partition key, use the default path + if (!body.partitionKey || !body.partitionKey.paths) { + body.partitionKey = { + paths: [DEFAULT_PARTITION_KEY_PATH], + }; } - const type = typeof orderByItem.item; - if (TYPEORDCOMPARATOR[type] === undefined) { - throw new Error(`unrecognizable type ${type}`); + const response = await this.clientContext.create({ + body, + path, + resourceType: exports.ResourceType.container, + resourceId: id, + diagnosticNode, + options, + }); + const ref = new Container(this.database, response.result.id, this.clientContext); + return new ContainerResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); + } + /** + * Checks if a Container exists, and, if it doesn't, creates it. + * This will make a read operation based on the id in the `body`, then if it is not found, a create operation. + * You should confirm that the output matches the body you passed in for non-default properties (i.e. indexing policy/etc.) + * + * A container is a named logical container for items. + * + * A database may contain zero or more named containers and each container consists of + * zero or more JSON items. + * + * Being schema-free, the items in a container do not need to share the same structure or fields. + * + * + * Since containers are application resources, they can be authorized using either the + * master key or resource keys. + * + * @param body - Represents the body of the container. + * @param options - Use to set options like response page size, continuation tokens, etc. + */ + async createIfNotExists(body, options) { + if (!body || body.id === null || body.id === undefined) { + throw new Error("body parameter must be an object with an id property"); } - return type; + /* + 1. Attempt to read the Container (based on an assumption that most containers will already exist, so its faster) + 2. If it fails with NotFound error, attempt to create the container. Else, return the read results. + */ + return withDiagnostics(async (diagnosticNode) => { + try { + const readResponse = await this.database + .container(body.id) + .readInternal(diagnosticNode, options); + return readResponse; + } + catch (err) { + if (err.code === StatusCodes.NotFound) { + const createResponse = await this.createInternal(diagnosticNode, body, options); + // Must merge the headers to capture RU costskaty + mergeHeaders(createResponse.headers, err.headers); + return createResponse; + } + else { + throw err; + } + } + }, this.clientContext); } - getOrderByItems(res) { - // TODO: any res? - return res["orderByItems"]; + /** + * Read all containers. + * @param options - Use to set options like response page size, continuation tokens, etc. + * @returns {@link QueryIterator} Allows you to return all containers in an array or iterate over them one at a time. + * @example Read all containers to array. + * ```typescript + * const {body: containerList} = await client.database("").containers.readAll().fetchAll(); + * ``` + */ + readAll(options) { + return this.query(undefined, options); } } -// Copyright (c) Microsoft Corporation. -/** @hidden */ -class MaxAggregator { - /** - * Represents an aggregator for MAX operator. - * @hidden - */ - constructor() { - this.value = undefined; - this.comparer = new OrderByDocumentProducerComparator(["Ascending"]); +class PermissionResponse extends ResourceResponse { + constructor(resource, headers, statusCode, permission, diagnostics) { + super(resource, headers, statusCode, diagnostics); + this.permission = permission; } +} + +/** + * Use to read, replace, or delete a given {@link Permission} by id. + * + * @see {@link Permissions} to create, upsert, query, or read all Permissions. + */ +class Permission { /** - * Add the provided item to aggregation result. + * Returns a reference URL to the resource. Used for linking in Permissions. */ - aggregate(other) { - if (this.value === undefined) { - this.value = other.max; - } - else if (this.comparer.compareValue(other.max, typeof other.max, this.value, typeof this.value) > 0) { - this.value = other.max; - } + get url() { + return createPermissionUri(this.user.database.id, this.user.id, this.id); } /** - * Get the aggregation result. + * @hidden + * @param user - The parent {@link User}. + * @param id - The id of the given {@link Permission}. */ - getResult() { - return this.value; + constructor(user, id, clientContext) { + this.user = user; + this.id = id; + this.clientContext = clientContext; } -} - -// Copyright (c) Microsoft Corporation. -/** @hidden */ -class MinAggregator { /** - * Represents an aggregator for MIN operator. - * @hidden + * Read the {@link PermissionDefinition} of the given {@link Permission}. */ - constructor() { - this.value = undefined; - this.comparer = new OrderByDocumentProducerComparator(["Ascending"]); + async read(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.read({ + path, + resourceType: exports.ResourceType.permission, + resourceId: id, + options, + diagnosticNode, + }); + return new PermissionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } /** - * Add the provided item to aggregation result. + * Replace the given {@link Permission} with the specified {@link PermissionDefinition}. + * @param body - The specified {@link PermissionDefinition}. */ - aggregate(other) { - if (this.value === undefined) { - // || typeof this.value === "object" - this.value = other.min; - } - else { - const otherType = other.min === null ? "NoValue" : typeof other.min; // || typeof other === "object" - const thisType = this.value === null ? "NoValue" : typeof this.value; - if (this.comparer.compareValue(other.min, otherType, this.value, thisType) < 0) { - this.value = other.min; + async replace(body, options) { + return withDiagnostics(async (diagnosticNode) => { + const err = {}; + if (!isResourceValid(body, err)) { + throw err; } - } + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.replace({ + body, + path, + resourceType: exports.ResourceType.permission, + resourceId: id, + options, + diagnosticNode, + }); + return new PermissionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } /** - * Get the aggregation result. + * Delete the given {@link Permission}. */ - getResult() { - return this.value; + async delete(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.delete({ + path, + resourceType: exports.ResourceType.permission, + resourceId: id, + options, + diagnosticNode, + }); + return new PermissionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } } -/** @hidden */ -class SumAggregator { +/** + * Use to create, replace, query, and read all Permissions. + * + * @see {@link Permission} to read, replace, or delete a specific permission by id. + */ +class Permissions { /** - * Add the provided item to aggregation result. + * @hidden + * @param user - The parent {@link User}. */ - aggregate(other) { - if (other === undefined) { - return; - } - if (this.sum === undefined) { - this.sum = other; - } - else { - this.sum += other; - } + constructor(user, clientContext) { + this.user = user; + this.clientContext = clientContext; + } + query(query, options) { + const path = getPathFromLink(this.user.url, exports.ResourceType.permission); + const id = getIdFromLink(this.user.url); + return new QueryIterator(this.clientContext, query, options, (diagnosticNode, innerOptions) => { + return this.clientContext.queryFeed({ + path, + resourceType: exports.ResourceType.permission, + resourceId: id, + resultFn: (result) => result.Permissions, + query, + options: innerOptions, + diagnosticNode, + }); + }); } /** - * Get the aggregation result. + * Read all permissions. + * @example Read all permissions to array. + * ```typescript + * const {body: permissionList} = await user.permissions.readAll().fetchAll(); + * ``` */ - getResult() { - return this.sum; + readAll(options) { + return this.query(undefined, options); } -} - -/** @hidden */ -class StaticValueAggregator { - aggregate(other) { - if (this.value === undefined) { - this.value = other; - } + /** + * Create a permission. + * + * A permission represents a per-User Permission to access a specific resource + * e.g. Item or Container. + * @param body - Represents the body of the permission. + */ + async create(body, options) { + return withDiagnostics(async (diagnosticNode) => { + const err = {}; + if (!isResourceValid(body, err)) { + throw err; + } + const path = getPathFromLink(this.user.url, exports.ResourceType.permission); + const id = getIdFromLink(this.user.url); + const response = await this.clientContext.create({ + body, + path, + resourceType: exports.ResourceType.permission, + resourceId: id, + diagnosticNode, + options, + }); + const ref = new Permission(this.user, response.result.id, this.clientContext); + return new PermissionResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); + }, this.clientContext); } - getResult() { - return this.value; + /** + * Upsert a permission. + * + * A permission represents a per-User Permission to access a + * specific resource e.g. Item or Container. + */ + async upsert(body, options) { + return withDiagnostics(async (diagnosticNode) => { + const err = {}; + if (!isResourceValid(body, err)) { + throw err; + } + const path = getPathFromLink(this.user.url, exports.ResourceType.permission); + const id = getIdFromLink(this.user.url); + const response = await this.clientContext.upsert({ + body, + path, + resourceType: exports.ResourceType.permission, + resourceId: id, + options, + diagnosticNode, + }); + const ref = new Permission(this.user, response.result.id, this.clientContext); + return new PermissionResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); + }, this.clientContext); } } -// Copyright (c) Microsoft Corporation. -function createAggregator(aggregateType) { - switch (aggregateType) { - case "Average": - return new AverageAggregator(); - case "Count": - return new CountAggregator(); - case "Max": - return new MaxAggregator(); - case "Min": - return new MinAggregator(); - case "Sum": - return new SumAggregator(); - default: - return new StaticValueAggregator(); +class UserResponse extends ResourceResponse { + constructor(resource, headers, statusCode, user, diagnostics) { + super(resource, headers, statusCode, diagnostics); + this.user = user; } } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** @hidden */ -var FetchResultType; -(function (FetchResultType) { - FetchResultType[FetchResultType["Done"] = 0] = "Done"; - FetchResultType[FetchResultType["Exception"] = 1] = "Exception"; - FetchResultType[FetchResultType["Result"] = 2] = "Result"; -})(FetchResultType || (FetchResultType = {})); -/** @hidden */ -class FetchResult { +/** + * Used to read, replace, and delete Users. + * + * Additionally, you can access the permissions for a given user via `user.permission` and `user.permissions`. + * + * @see {@link Users} to create, upsert, query, or read all. + */ +class User { /** - * Wraps fetch results for the document producer. - * This allows the document producer to buffer exceptions so that actual results don't get flushed during splits. - * - * @param feedReponse - The response the document producer got back on a successful fetch - * @param error - The exception meant to be buffered on an unsuccessful fetch - * @hidden + * Returns a reference URL to the resource. Used for linking in Permissions. */ - constructor(feedResponse, error) { - // TODO: feedResponse/error - if (feedResponse !== undefined) { - this.feedResponse = feedResponse; - this.fetchResultType = FetchResultType.Result; - } - else { - this.error = error; - this.fetchResultType = FetchResultType.Exception; - } + get url() { + return createUserUri(this.database.id, this.id); } -} - -/** @hidden */ -class DocumentProducer { /** - * Provides the Target Partition Range Query Execution Context. - * @param clientContext - The service endpoint to use to create the client. - * @param collectionLink - Represents collection link - * @param query - A SQL query. - * @param targetPartitionKeyRange - Query Target Partition key Range * @hidden + * @param database - The parent {@link Database}. */ - constructor(clientContext, collectionLink, query, targetPartitionKeyRange, options) { + constructor(database, id, clientContext) { + this.database = database; + this.id = id; this.clientContext = clientContext; - this.generation = 0; - this.fetchFunction = async (diagnosticNode, options) => { - const path = getPathFromLink(this.collectionLink, exports.ResourceType.item); - diagnosticNode.addData({ partitionKeyRangeId: this.targetPartitionKeyRange.id }); - const id = getIdFromLink(this.collectionLink); - return this.clientContext.queryFeed({ + this.permissions = new Permissions(this, this.clientContext); + } + /** + * Operations to read, replace, or delete a specific Permission by id. + * + * See `client.permissions` for creating, upserting, querying, or reading all operations. + */ + permission(id) { + return new Permission(this, id, this.clientContext); + } + /** + * Read the {@link UserDefinition} for the given {@link User}. + */ + async read(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.read({ path, - resourceType: exports.ResourceType.item, + resourceType: exports.ResourceType.user, resourceId: id, - resultFn: (result) => result.Documents, - query: this.query, options, diagnosticNode, - partitionKeyRangeId: this.targetPartitionKeyRange["id"], }); - }; - // TODO: any options - this.collectionLink = collectionLink; - this.query = query; - this.targetPartitionKeyRange = targetPartitionKeyRange; - this.fetchResults = []; - this.allFetched = false; - this.err = undefined; - this.previousContinuationToken = undefined; - this.continuationToken = undefined; - this.respHeaders = getInitialHeader(); - this.internalExecutionContext = new DefaultQueryExecutionContext(options, this.fetchFunction); + return new UserResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } /** - * Synchronously gives the contiguous buffered results (stops at the first non result) if any - * @returns buffered current items if any - * @hidden + * Replace the given {@link User}'s definition with the specified {@link UserDefinition}. + * @param body - The specified {@link UserDefinition} to replace the definition. */ - peekBufferedItems() { - const bufferedResults = []; - for (let i = 0, done = false; i < this.fetchResults.length && !done; i++) { - const fetchResult = this.fetchResults[i]; - switch (fetchResult.fetchResultType) { - case FetchResultType.Done: - done = true; - break; - case FetchResultType.Exception: - done = true; - break; - case FetchResultType.Result: - bufferedResults.push(fetchResult.feedResponse); - break; - } - } - return bufferedResults; - } - hasMoreResults() { - return this.internalExecutionContext.hasMoreResults() || this.fetchResults.length !== 0; - } - gotSplit() { - const fetchResult = this.fetchResults[0]; - if (fetchResult.fetchResultType === FetchResultType.Exception) { - if (DocumentProducer._needPartitionKeyRangeCacheRefresh(fetchResult.error)) { - return true; + async replace(body, options) { + return withDiagnostics(async (diagnosticNode) => { + const err = {}; + if (!isResourceValid(body, err)) { + throw err; } - } - return false; - } - _getAndResetActiveResponseHeaders() { - const ret = this.respHeaders; - this.respHeaders = getInitialHeader(); - return ret; - } - _updateStates(err, allFetched) { - // TODO: any Error - if (err) { - this.err = err; - return; - } - if (allFetched) { - this.allFetched = true; - } - if (this.internalExecutionContext.continuationToken === this.continuationToken) { - // nothing changed - return; - } - this.previousContinuationToken = this.continuationToken; - this.continuationToken = this.internalExecutionContext.continuationToken; - } - static _needPartitionKeyRangeCacheRefresh(error) { - // TODO: error - return (error.code === StatusCodes.Gone && - "substatus" in error && - error["substatus"] === SubStatusCodes.PartitionKeyRangeGone); + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.replace({ + body, + path, + resourceType: exports.ResourceType.user, + resourceId: id, + options, + diagnosticNode, + }); + return new UserResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } /** - * Fetches and bufferes the next page of results and executes the given callback + * Delete the given {@link User}. */ - async bufferMore(diagnosticNode) { - if (this.err) { - throw this.err; - } - try { - const { result: resources, headers: headerResponse } = await this.internalExecutionContext.fetchMore(diagnosticNode); - ++this.generation; - this._updateStates(undefined, resources === undefined); - if (resources !== undefined) { - // some more results - resources.forEach((element) => { - // TODO: resources any - this.fetchResults.push(new FetchResult(element, undefined)); - }); - } - // need to modify the header response so that the query metrics are per partition - if (headerResponse != null && Constants$1.HttpHeaders.QueryMetrics in headerResponse) { - // "0" is the default partition before one is actually assigned. - const queryMetrics = headerResponse[Constants$1.HttpHeaders.QueryMetrics]["0"]; - // Wraping query metrics in a object where the keys are the partition key range. - headerResponse[Constants$1.HttpHeaders.QueryMetrics] = {}; - headerResponse[Constants$1.HttpHeaders.QueryMetrics][this.targetPartitionKeyRange.id] = - queryMetrics; - } - return { result: resources, headers: headerResponse }; - } - catch (err) { - // TODO: any error - if (DocumentProducer._needPartitionKeyRangeCacheRefresh(err)) { - // Split just happend - // Buffer the error so the execution context can still get the feedResponses in the itemBuffer - const bufferedError = new FetchResult(undefined, err); - this.fetchResults.push(bufferedError); - // Putting a dummy result so that the rest of code flows - return { - result: [bufferedError], - headers: err.headers, - }; - } - else { - this._updateStates(err, err.resources === undefined); - throw err; - } - } + async delete(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.delete({ + path, + resourceType: exports.ResourceType.user, + resourceId: id, + options, + diagnosticNode, + }); + return new UserResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } +} + +/** + * Used to create, upsert, query, and read all users. + * + * @see {@link User} to read, replace, or delete a specific User by id. + */ +class Users { /** - * Synchronously gives the bufferend current item if any - * @returns buffered current item if any * @hidden + * @param database - The parent {@link Database}. */ - getTargetParitionKeyRange() { - return this.targetPartitionKeyRange; + constructor(database, clientContext) { + this.database = database; + this.clientContext = clientContext; + } + query(query, options) { + const path = getPathFromLink(this.database.url, exports.ResourceType.user); + const id = getIdFromLink(this.database.url); + return new QueryIterator(this.clientContext, query, options, (diagnosticNode, innerOptions) => { + return this.clientContext.queryFeed({ + path, + resourceType: exports.ResourceType.user, + resourceId: id, + resultFn: (result) => result.Users, + query, + options: innerOptions, + diagnosticNode, + }); + }); } /** - * Fetches the next element in the DocumentProducer. + * Read all users.- + * @example Read all users to array. + * ```typescript + * const {body: usersList} = await database.users.readAll().fetchAll(); + * ``` */ - async nextItem(diagnosticNode) { - if (this.err) { - this._updateStates(this.err, undefined); - throw this.err; - } - try { - const { result, headers } = await this.current(diagnosticNode); - const fetchResult = this.fetchResults.shift(); - this._updateStates(undefined, result === undefined); - if (fetchResult.feedResponse !== result) { - throw new Error(`Expected ${fetchResult.feedResponse} to equal ${result}`); - } - switch (fetchResult.fetchResultType) { - case FetchResultType.Done: - return { result: undefined, headers }; - case FetchResultType.Exception: - fetchResult.error.headers = headers; - throw fetchResult.error; - case FetchResultType.Result: - return { result: fetchResult.feedResponse, headers }; + readAll(options) { + return this.query(undefined, options); + } + /** + * Create a database user with the specified {@link UserDefinition}. + * @param body - The specified {@link UserDefinition}. + */ + async create(body, options) { + return withDiagnostics(async (diagnosticNode) => { + const err = {}; + if (!isResourceValid(body, err)) { + throw err; } - } - catch (err) { - this._updateStates(err, err.item === undefined); - throw err; - } + const path = getPathFromLink(this.database.url, exports.ResourceType.user); + const id = getIdFromLink(this.database.url); + const response = await this.clientContext.create({ + body, + path, + resourceType: exports.ResourceType.user, + resourceId: id, + options, + diagnosticNode, + }); + const ref = new User(this.database, response.result.id, this.clientContext); + return new UserResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); + }, this.clientContext); } /** - * Retrieve the current element on the DocumentProducer. + * Upsert a database user with a specified {@link UserDefinition}. + * @param body - The specified {@link UserDefinition}. */ - async current(diagnosticNode) { - // If something is buffered just give that - if (this.fetchResults.length > 0) { - const fetchResult = this.fetchResults[0]; - // Need to unwrap fetch results - switch (fetchResult.fetchResultType) { - case FetchResultType.Done: - return { - result: undefined, - headers: this._getAndResetActiveResponseHeaders(), - }; - case FetchResultType.Exception: - fetchResult.error.headers = this._getAndResetActiveResponseHeaders(); - throw fetchResult.error; - case FetchResultType.Result: - return { - result: fetchResult.feedResponse, - headers: this._getAndResetActiveResponseHeaders(), - }; + async upsert(body, options) { + return withDiagnostics(async (diagnosticNode) => { + const err = {}; + if (!isResourceValid(body, err)) { + throw err; } - } - // If there isn't anymore items left to fetch then let the user know. - if (this.allFetched) { - return { - result: undefined, - headers: this._getAndResetActiveResponseHeaders(), - }; - } - // If there are no more bufferd items and there are still items to be fetched then buffer more - const { result, headers } = await this.bufferMore(diagnosticNode); - mergeHeaders(this.respHeaders, headers); - if (result === undefined) { - return { result: undefined, headers: this.respHeaders }; - } - return this.current(diagnosticNode); + const path = getPathFromLink(this.database.url, exports.ResourceType.user); + const id = getIdFromLink(this.database.url); + const response = await this.clientContext.upsert({ + body, + path, + resourceType: exports.ResourceType.user, + resourceId: id, + options, + diagnosticNode, + }); + const ref = new User(this.database, response.result.id, this.clientContext); + return new UserResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); + }, this.clientContext); } } -/** @hidden */ -class QueryRange { +/** Response object for Database operations */ +class DatabaseResponse extends ResourceResponse { + constructor(resource, headers, statusCode, database, diagnostics) { + super(resource, headers, statusCode, diagnostics); + this.database = database; + } +} + +/** + * Operations for reading or deleting an existing database. + * + * @see {@link Databases} for creating new databases, and reading/querying all databases; use `client.databases`. + * + * Note: all these operations make calls against a fixed budget. + * You should design your system such that these calls scale sublinearly with your application. + * For instance, do not call `database.read()` before every single `item.read()` call, to ensure the database exists; + * do this once on application start up. + */ +class Database { /** - * Represents a QueryRange. - * - * @param rangeMin - min - * @param rangeMin - max - * @param isMinInclusive - isMinInclusive - * @param isMaxInclusive - isMaxInclusive - * @hidden + * Returns a reference URL to the resource. Used for linking in Permissions. */ - constructor(rangeMin, rangeMax, isMinInclusive, isMaxInclusive) { - this.min = rangeMin; - this.max = rangeMax; - this.isMinInclusive = isMinInclusive; - this.isMaxInclusive = isMaxInclusive; - } - overlaps(other) { - const range1 = this; // eslint-disable-line @typescript-eslint/no-this-alias - const range2 = other; - if (range1 === undefined || range2 === undefined) { - return false; - } - if (range1.isEmpty() || range2.isEmpty()) { - return false; - } - if (range1.min <= range2.max || range2.min <= range1.max) { - if ((range1.min === range2.max && !(range1.isMinInclusive && range2.isMaxInclusive)) || - (range2.min === range1.max && !(range2.isMinInclusive && range1.isMaxInclusive))) { - return false; - } - return true; - } - return false; - } - isFullRange() { - return (this.min === Constants$1.EffectivePartitionKeyConstants.MinimumInclusiveEffectivePartitionKey && - this.max === Constants$1.EffectivePartitionKeyConstants.MaximumExclusiveEffectivePartitionKey && - this.isMinInclusive === true && - this.isMaxInclusive === false); + get url() { + return createDatabaseUri(this.id); } - isEmpty() { - return !(this.isMinInclusive && this.isMaxInclusive) && this.min === this.max; + /** Returns a new {@link Database} instance. + * + * Note: the intention is to get this object from {@link CosmosClient} via `client.database(id)`, not to instantiate it yourself. + */ + constructor(client, id, clientContext) { + this.client = client; + this.id = id; + this.clientContext = clientContext; + this.containers = new Containers(this, this.clientContext); + this.users = new Users(this, this.clientContext); } /** - * Parse a QueryRange from a partitionKeyRange - * @returns QueryRange - * @hidden + * Used to read, replace, or delete a specific, existing {@link Database} by id. + * + * Use `.containers` creating new containers, or querying/reading all containers. + * + * @example Delete a container + * ```typescript + * await client.database("").container("").delete(); + * ``` */ - static parsePartitionKeyRange(partitionKeyRange) { - return new QueryRange(partitionKeyRange[Constants$1.PartitionKeyRange.MinInclusive], partitionKeyRange[Constants$1.PartitionKeyRange.MaxExclusive], true, false); + container(id) { + return new Container(this, id, this.clientContext); } /** - * Parse a QueryRange from a dictionary - * @returns QueryRange - * @hidden + * Used to read, replace, or delete a specific, existing {@link User} by id. + * + * Use `.users` for creating new users, or querying/reading all users. */ - static parseFromDict(queryRangeDict) { - return new QueryRange(queryRangeDict.min, queryRangeDict.max, queryRangeDict.isMinInclusive, queryRangeDict.isMaxInclusive); + user(id) { + return new User(this, id, this.clientContext); + } + /** Read the definition of the given Database. */ + async read(options) { + return withDiagnostics(async (diagnosticNode) => { + return this.readInternal(diagnosticNode, options); + }, this.clientContext); } -} - -/** @hidden */ -class InMemoryCollectionRoutingMap { /** - * Represents a InMemoryCollectionRoutingMap Object, - * Stores partition key ranges in an efficient way with some additional information and provides - * convenience methods for working with set of ranges. + * @hidden */ - constructor(orderedPartitionKeyRanges, orderedPartitionInfo) { - this.orderedPartitionKeyRanges = orderedPartitionKeyRanges; - this.orderedRanges = orderedPartitionKeyRanges.map((pkr) => { - return new QueryRange(pkr[Constants$1.PartitionKeyRange.MinInclusive], pkr[Constants$1.PartitionKeyRange.MaxExclusive], true, false); + async readInternal(diagnosticNode, options) { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.read({ + path, + resourceType: exports.ResourceType.database, + resourceId: id, + options, + diagnosticNode, }); - this.orderedPartitionInfo = orderedPartitionInfo; + return new DatabaseResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); } - getOrderedParitionKeyRanges() { - return this.orderedPartitionKeyRanges; + /** Delete the given Database. */ + async delete(options) { + return withDiagnostics(async (diagnosticNode) => { + const path = getPathFromLink(this.url); + const id = getIdFromLink(this.url); + const response = await this.clientContext.delete({ + path, + resourceType: exports.ResourceType.database, + resourceId: id, + options, + diagnosticNode, + }); + return new DatabaseResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); + }, this.clientContext); } - getOverlappingRanges(providedQueryRanges) { - // TODO This code has all kinds of smells. Multiple iterations and sorts just to grab overlapping ranges - // stfaul attempted to bring it down to one for-loop and failed - const pqr = Array.isArray(providedQueryRanges) - ? providedQueryRanges - : [providedQueryRanges]; - const minToPartitionRange = {}; // TODO: any - // this for loop doesn't invoke any async callback - for (const queryRange of pqr) { - if (queryRange.isEmpty()) { - continue; - } - if (queryRange.isFullRange()) { - return this.orderedPartitionKeyRanges; + /** + * Gets offer on database. If none exists, returns an OfferResponse with undefined. + */ + async readOffer(options = {}) { + return withDiagnostics(async (diagnosticNode) => { + const { resource: record } = await withMetadataDiagnostics(async (node) => { + return this.readInternal(node); + }, diagnosticNode, exports.MetadataLookUpType.DatabaseLookUp); + const path = "/offers"; + const url = record._self; + const response = await this.clientContext.queryFeed({ + path, + resourceId: "", + resourceType: exports.ResourceType.offer, + query: `SELECT * from root where root.resource = "${url}"`, + resultFn: (result) => result.Offers, + options, + diagnosticNode, + }); + const offer = response.result[0] + ? new Offer(this.client, response.result[0].id, this.clientContext) + : undefined; + return new OfferResponse(response.result[0], response.headers, response.code, getEmptyCosmosDiagnostics(), offer); + }, this.clientContext); + } +} + +/** + * Operations for creating new databases, and reading/querying all databases + * + * @see {@link Database} for reading or deleting an existing database; use `client.database(id)`. + * + * Note: all these operations make calls against a fixed budget. + * You should design your system such that these calls scale sublinearly with your application. + * For instance, do not call `databases.readAll()` before every single `item.read()` call, to ensure the database exists; + * do this once on application start up. + */ +class Databases { + /** + * @hidden + * @param client - The parent {@link CosmosClient} for the Database. + */ + constructor(client, clientContext) { + this.client = client; + this.clientContext = clientContext; + } + query(query, options) { + const cb = (diagNode, innerOptions) => { + return this.clientContext.queryFeed({ + path: "/dbs", + resourceType: exports.ResourceType.database, + resourceId: "", + resultFn: (result) => result.Databases, + query, + options: innerOptions, + diagnosticNode: diagNode, + }); + }; + return new QueryIterator(this.clientContext, query, options, cb); + } + /** + * Send a request for creating a database. + * + * A database manages users, permissions and a set of containers. + * Each Azure Cosmos DB Database Account is able to support multiple independent named databases, + * with the database being the logical container for data. + * + * Each Database consists of one or more containers, each of which in turn contain one or more + * documents. Since databases are an administrative resource, the Service Master Key will be + * required in order to access and successfully complete any action using the User APIs. + * + * @param body - The {@link DatabaseDefinition} that represents the {@link Database} to be created. + * @param options - Use to set options like response page size, continuation tokens, etc. + */ + async create(body, options = {}) { + return withDiagnostics(async (diagnosticNode) => { + return this.createInternal(diagnosticNode, body, options); + }, this.clientContext); + } + /** + * @hidden + */ + async createInternal(diagnosticNode, body, options = {}) { + const err = {}; + if (!isResourceValid(body, err)) { + throw err; + } + validateOffer(body); + if (body.maxThroughput) { + const autoscaleParams = { + maxThroughput: body.maxThroughput, + }; + if (body.autoUpgradePolicy) { + autoscaleParams.autoUpgradePolicy = body.autoUpgradePolicy; } - const minIndex = this.orderedRanges.findIndex((range) => { - if (queryRange.min > range.min && queryRange.min < range.max) { - return true; - } - if (queryRange.min === range.min) { - return true; - } - if (queryRange.min === range.max) { - return true; - } + const autoscaleHeaders = JSON.stringify(autoscaleParams); + options.initialHeaders = Object.assign({}, options.initialHeaders, { + [Constants$1.HttpHeaders.AutoscaleSettings]: autoscaleHeaders, }); - if (minIndex < 0) { - throw new Error("error in collection routing map, queried value is less than the start range."); + delete body.maxThroughput; + delete body.autoUpgradePolicy; + } + if (body.throughput) { + options.initialHeaders = Object.assign({}, options.initialHeaders, { + [Constants$1.HttpHeaders.OfferThroughput]: body.throughput, + }); + delete body.throughput; + } + const path = "/dbs"; // TODO: constant + const response = await this.clientContext.create({ + body, + path, + resourceType: exports.ResourceType.database, + resourceId: undefined, + diagnosticNode, + options, + }); + const ref = new Database(this.client, body.id, this.clientContext); + return new DatabaseResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); + } + /** + * Check if a database exists, and if it doesn't, create it. + * This will make a read operation based on the id in the `body`, then if it is not found, a create operation. + * + * A database manages users, permissions and a set of containers. + * Each Azure Cosmos DB Database Account is able to support multiple independent named databases, + * with the database being the logical container for data. + * + * Each Database consists of one or more containers, each of which in turn contain one or more + * documents. Since databases are an an administrative resource, the Service Master Key will be + * required in order to access and successfully complete any action using the User APIs. + * + * @param body - The {@link DatabaseDefinition} that represents the {@link Database} to be created. + * @param options - Additional options for the request + */ + async createIfNotExists(body, options) { + if (!body || body.id === null || body.id === undefined) { + throw new Error("body parameter must be an object with an id property"); + } + /* + 1. Attempt to read the Database (based on an assumption that most databases will already exist, so its faster) + 2. If it fails with NotFound error, attempt to create the db. Else, return the read results. + */ + return withDiagnostics(async (diagnosticNode) => { + try { + const readResponse = await this.client + .database(body.id) + .readInternal(diagnosticNode, options); + return readResponse; } - // Start at the end and work backwards - let maxIndex; - for (let i = this.orderedRanges.length - 1; i >= 0; i--) { - const range = this.orderedRanges[i]; - if (queryRange.max > range.min && queryRange.max < range.max) { - maxIndex = i; - break; - } - if (queryRange.max === range.min) { - maxIndex = i; - break; - } - if (queryRange.max === range.max) { - maxIndex = i; - break; + catch (err) { + if (err.code === StatusCodes.NotFound) { + const createResponse = await this.createInternal(diagnosticNode, body, options); + // Must merge the headers to capture RU costskaty + mergeHeaders(createResponse.headers, err.headers); + return createResponse; } - } - if (maxIndex > this.orderedRanges.length) { - throw new Error("error in collection routing map, queried value is greater than the end range."); - } - for (let j = minIndex; j < maxIndex + 1; j++) { - if (queryRange.overlaps(this.orderedRanges[j])) { - minToPartitionRange[this.orderedPartitionKeyRanges[j][Constants$1.PartitionKeyRange.MinInclusive]] = this.orderedPartitionKeyRanges[j]; + else { + throw err; } } - } - const overlappingPartitionKeyRanges = Object.keys(minToPartitionRange).map((k) => minToPartitionRange[k]); - return overlappingPartitionKeyRanges.sort((a, b) => { - return a[Constants$1.PartitionKeyRange.MinInclusive].localeCompare(b[Constants$1.PartitionKeyRange.MinInclusive]); - }); + }, this.clientContext); + } + // TODO: DatabaseResponse for QueryIterator? + /** + * Reads all databases. + * @param options - Use to set options like response page size, continuation tokens, etc. + * @returns {@link QueryIterator} Allows you to return all databases in an array or iterate over them one at a time. + * @example Read all databases to array. + * ```typescript + * const {body: databaseList} = await client.databases.readAll().fetchAll(); + * ``` + */ + readAll(options) { + return this.query(undefined, options); } } -// Copyright (c) Microsoft Corporation. /** + * Used to specify which type of events to execute this plug in on. + * * @hidden */ -function compareRanges(a, b) { - const aVal = a[0][Constants$1.PartitionKeyRange.MinInclusive]; - const bVal = b[0][Constants$1.PartitionKeyRange.MinInclusive]; - if (aVal > bVal) { - return 1; +exports.PluginOn = void 0; +(function (PluginOn) { + /** + * Will be executed per network request + */ + PluginOn["request"] = "request"; + /** + * Will be executed per API operation + */ + PluginOn["operation"] = "operation"; +})(exports.PluginOn || (exports.PluginOn = {})); +/** + * @internal + */ +async function executePlugins(diagnosticNode, requestContext, next, on) { + if (!requestContext.plugins) { + return next(requestContext, diagnosticNode, undefined); } - if (aVal < bVal) { - return -1; + let level = 0; + const _ = (inner) => { + if (++level >= inner.plugins.length) { + return next(requestContext, diagnosticNode, undefined); + } + else if (inner.plugins[level].on !== on) { + return _(requestContext); + } + else { + return inner.plugins[level].plugin(inner, diagnosticNode, _); + } + }; + if (requestContext.plugins[level].on !== on) { + return _(requestContext); + } + else { + return requestContext.plugins[level].plugin(requestContext, diagnosticNode, _); } - return 0; } -/** @hidden */ -function createCompleteRoutingMap(partitionKeyRangeInfoTuppleList) { - const rangeById = {}; // TODO: any - const rangeByInfo = {}; // TODO: any - let sortedRanges = []; - // the for loop doesn't invoke any async callback - for (const r of partitionKeyRangeInfoTuppleList) { - rangeById[r[0][Constants$1.PartitionKeyRange.Id]] = r; - rangeByInfo[r[1]] = r[0]; - sortedRanges.push(r); + +/** + * @hidden + */ +// Windows Socket Error Codes +const WindowsInterruptedFunctionCall = 10004; +/** + * @hidden + */ +const WindowsFileHandleNotValid = 10009; +/** + * @hidden + */ +const WindowsPermissionDenied = 10013; +/** + * @hidden + */ +const WindowsBadAddress = 10014; +/** + * @hidden + */ +const WindowsInvalidArgumnet = 10022; +/** + * @hidden + */ +const WindowsResourceTemporarilyUnavailable = 10035; +/** + * @hidden + */ +const WindowsOperationNowInProgress = 10036; +/** + * @hidden + */ +const WindowsAddressAlreadyInUse = 10048; +/** + * @hidden + */ +const WindowsConnectionResetByPeer = 10054; +/** + * @hidden + */ +const WindowsCannotSendAfterSocketShutdown = 10058; +/** + * @hidden + */ +const WindowsConnectionTimedOut = 10060; +/** + * @hidden + */ +const WindowsConnectionRefused = 10061; +/** + * @hidden + */ +const WindowsNameTooLong = 10063; +/** + * @hidden + */ +const WindowsHostIsDown = 10064; +/** + * @hidden + */ +const WindowsNoRouteTohost = 10065; +/** + * @hidden + */ +// Linux Error Codes +/** + * @hidden + */ +const LinuxConnectionReset = "ECONNRESET"; +// Node Error Codes +/** + * @hidden + */ +const BrokenPipe = "EPIPE"; +/** + * @hidden + */ +const CONNECTION_ERROR_CODES = [ + WindowsInterruptedFunctionCall, + WindowsFileHandleNotValid, + WindowsPermissionDenied, + WindowsBadAddress, + WindowsInvalidArgumnet, + WindowsResourceTemporarilyUnavailable, + WindowsOperationNowInProgress, + WindowsAddressAlreadyInUse, + WindowsConnectionResetByPeer, + WindowsCannotSendAfterSocketShutdown, + WindowsConnectionTimedOut, + WindowsConnectionRefused, + WindowsNameTooLong, + WindowsHostIsDown, + WindowsNoRouteTohost, + LinuxConnectionReset, + TimeoutErrorCode, + BrokenPipe, +]; +/** + * @hidden + */ +function needsRetry(operationType, code) { + if ((operationType === exports.OperationType.Read || operationType === exports.OperationType.Query) && + CONNECTION_ERROR_CODES.indexOf(code) !== -1) { + return true; } - sortedRanges = sortedRanges.sort(compareRanges); - const partitionKeyOrderedRange = sortedRanges.map((r) => r[0]); - const orderedPartitionInfo = sortedRanges.map((r) => r[1]); - if (!isCompleteSetOfRange(partitionKeyOrderedRange)) { - return undefined; + else { + return false; } - return new InMemoryCollectionRoutingMap(partitionKeyOrderedRange, orderedPartitionInfo); } /** + * This class implements the default connection retry policy for requests. * @hidden */ -function isCompleteSetOfRange(partitionKeyOrderedRange) { - // TODO: any - let isComplete = false; - if (partitionKeyOrderedRange.length > 0) { - const firstRange = partitionKeyOrderedRange[0]; - const lastRange = partitionKeyOrderedRange[partitionKeyOrderedRange.length - 1]; - isComplete = - firstRange[Constants$1.PartitionKeyRange.MinInclusive] === - Constants$1.EffectivePartitionKeyConstants.MinimumInclusiveEffectivePartitionKey; - isComplete = - isComplete && - lastRange[Constants$1.PartitionKeyRange.MaxExclusive] === - Constants$1.EffectivePartitionKeyConstants.MaximumExclusiveEffectivePartitionKey; - for (let i = 1; i < partitionKeyOrderedRange.length; i++) { - const previousRange = partitionKeyOrderedRange[i - 1]; - const currentRange = partitionKeyOrderedRange[i]; - isComplete = - isComplete && - previousRange[Constants$1.PartitionKeyRange.MaxExclusive] === - currentRange[Constants$1.PartitionKeyRange.MinInclusive]; - if (!isComplete) { - if (previousRange[Constants$1.PartitionKeyRange.MaxExclusive] > - currentRange[Constants$1.PartitionKeyRange.MinInclusive]) { - throw Error("Ranges overlap"); - } - break; +class DefaultRetryPolicy { + constructor(operationType) { + this.operationType = operationType; + this.maxTries = 10; + this.currentRetryAttemptCount = 0; + this.retryAfterInMs = 1000; + } + /** + * Determines whether the request should be retried or not. + * @param err - Error returned by the request. + */ + async shouldRetry(err, diagnosticNode) { + if (err) { + if (this.currentRetryAttemptCount < this.maxTries && + needsRetry(this.operationType, err.code)) { + diagnosticNode.addData({ successfulRetryPolicy: "default" }); + this.currentRetryAttemptCount++; + return true; } } + return false; } - return isComplete; } -// Copyright (c) Microsoft Corporation. -/** @hidden */ -class PartitionKeyRangeCache { - constructor(clientContext) { - this.clientContext = clientContext; - this.collectionRoutingMapByCollectionId = {}; +/** + * This class implements the retry policy for endpoint discovery. + * @hidden + */ +class EndpointDiscoveryRetryPolicy { + /** + * @param globalEndpointManager - The GlobalEndpointManager instance. + */ + constructor(globalEndpointManager, operationType) { + this.globalEndpointManager = globalEndpointManager; + this.operationType = operationType; + this.maxTries = EndpointDiscoveryRetryPolicy.maxTries; + this.currentRetryAttemptCount = 0; + this.retryAfterInMs = EndpointDiscoveryRetryPolicy.retryAfterInMs; } /** - * Finds or Instantiates the requested Collection Routing Map - * @param collectionLink - Requested collectionLink - * @hidden + * Determines whether the request should be retried or not. + * @param err - Error returned by the request. */ - async onCollectionRoutingMap(collectionLink, diagnosticNode, forceRefresh = false) { - const collectionId = getIdFromLink(collectionLink); - if (this.collectionRoutingMapByCollectionId[collectionId] === undefined || forceRefresh) { - this.collectionRoutingMapByCollectionId[collectionId] = this.requestCollectionRoutingMap(collectionLink, diagnosticNode); + async shouldRetry(err, diagnosticNode, retryContext, locationEndpoint) { + if (!err) { + return false; } - return this.collectionRoutingMapByCollectionId[collectionId]; + if (!retryContext || !locationEndpoint) { + return false; + } + if (!this.globalEndpointManager.enableEndpointDiscovery) { + return false; + } + if (this.currentRetryAttemptCount >= this.maxTries) { + return false; + } + this.currentRetryAttemptCount++; + if (isReadRequest(this.operationType)) { + await this.globalEndpointManager.markCurrentLocationUnavailableForRead(diagnosticNode, locationEndpoint); + } + else { + await this.globalEndpointManager.markCurrentLocationUnavailableForWrite(diagnosticNode, locationEndpoint); + } + retryContext.retryCount = this.currentRetryAttemptCount; + retryContext.clearSessionTokenNotAvailable = false; + retryContext.retryRequestOnPreferredLocations = false; + diagnosticNode.addData({ successfulRetryPolicy: "endpointDiscovery" }); + return true; } +} +EndpointDiscoveryRetryPolicy.maxTries = 120; // TODO: Constant? +EndpointDiscoveryRetryPolicy.retryAfterInMs = 1000; + +/** + * This class implements the resource throttle retry policy for requests. + * @hidden + */ +class ResourceThrottleRetryPolicy { /** - * Given the query ranges and a collection, invokes the callback on the list of overlapping partition key ranges - * @hidden + * @param maxTries - Max number of retries to be performed for a request. + * @param fixedRetryIntervalInMs - Fixed retry interval in milliseconds to wait between each + * retry ignoring the retryAfter returned as part of the response. + * @param timeoutInSeconds - Max wait time in seconds to wait for a request while the + * retries are happening. */ - async getOverlappingRanges(collectionLink, queryRange, diagnosticNode, forceRefresh = false) { - const crm = await this.onCollectionRoutingMap(collectionLink, diagnosticNode, forceRefresh); - return crm.getOverlappingRanges(queryRange); + constructor(maxTries = 9, fixedRetryIntervalInMs = 0, timeoutInSeconds = 30) { + this.maxTries = maxTries; + this.fixedRetryIntervalInMs = fixedRetryIntervalInMs; + /** Current retry attempt count. */ + this.currentRetryAttemptCount = 0; + /** Cummulative wait time in milliseconds for a request while the retries are happening. */ + this.cummulativeWaitTimeinMs = 0; + /** Retry interval in milliseconds to wait before the next request will be sent. */ + this.retryAfterInMs = 0; + this.timeoutInMs = timeoutInSeconds * 1000; + this.currentRetryAttemptCount = 0; + this.cummulativeWaitTimeinMs = 0; } - async requestCollectionRoutingMap(collectionLink, diagnosticNode) { - const { resources } = await withMetadataDiagnostics(async (metadataDiagnostics) => { - return this.clientContext - .queryPartitionKeyRanges(collectionLink) - .fetchAllInternal(metadataDiagnostics); - }, diagnosticNode, exports.MetadataLookUpType.PartitionKeyRangeLookUp); - return createCompleteRoutingMap(resources.map((r) => [r, true])); + /** + * Determines whether the request should be retried or not. + * @param err - Error returned by the request. + */ + async shouldRetry(err, diagnosticNode) { + // TODO: any custom error object + if (err) { + if (this.currentRetryAttemptCount < this.maxTries) { + this.currentRetryAttemptCount++; + this.retryAfterInMs = 0; + if (this.fixedRetryIntervalInMs) { + this.retryAfterInMs = this.fixedRetryIntervalInMs; + } + else if (err.retryAfterInMs) { + this.retryAfterInMs = err.retryAfterInMs; + } + if (this.cummulativeWaitTimeinMs < this.timeoutInMs) { + this.cummulativeWaitTimeinMs += this.retryAfterInMs; + diagnosticNode.addData({ successfulRetryPolicy: "resourceThrottle" }); + return true; + } + } + } + return false; } } -/** @hidden */ -const PARITIONKEYRANGE = Constants$1.PartitionKeyRange; -/** @hidden */ -class SmartRoutingMapProvider { - constructor(clientContext) { - this.partitionKeyRangeCache = new PartitionKeyRangeCache(clientContext); +/** + * This class implements the retry policy for session consistent reads. + * @hidden + */ +class SessionRetryPolicy { + /** + * @param globalEndpointManager - The GlobalEndpointManager instance. + */ + constructor(globalEndpointManager, resourceType, operationType, connectionPolicy) { + this.globalEndpointManager = globalEndpointManager; + this.resourceType = resourceType; + this.operationType = operationType; + this.connectionPolicy = connectionPolicy; + /** Current retry attempt count. */ + this.currentRetryAttemptCount = 0; + /** Retry interval in milliseconds. */ + this.retryAfterInMs = 0; } - static _secondRangeIsAfterFirstRange(range1, range2) { - if (typeof range1.max === "undefined") { - throw new Error("range1 must have max"); + /** + * Determines whether the request should be retried or not. + * @param err - Error returned by the request. + * @param callback - The callback function which takes bool argument which specifies whether the request + * will be retried or not. + */ + async shouldRetry(err, diagnosticNode, retryContext) { + if (!err) { + return false; } - if (typeof range2.min === "undefined") { - throw new Error("range2 must have min"); + if (!retryContext) { + return false; } - if (range1.max > range2.min) { - // r.min < #previous_r.max + if (!this.connectionPolicy.enableEndpointDiscovery) { return false; } - else { - if (range1.max === range2.min && range1.isMaxInclusive && range2.isMinInclusive) { - // the inclusive ending endpoint of previous_r is the same as the inclusive beginning endpoint of r - // they share a point + if (this.globalEndpointManager.canUseMultipleWriteLocations(this.resourceType, this.operationType)) { + // If we can write to multiple locations, we should against every write endpoint until we succeed + const endpoints = isReadRequest(this.operationType) + ? await this.globalEndpointManager.getReadEndpoints() + : await this.globalEndpointManager.getWriteEndpoints(); + if (this.currentRetryAttemptCount > endpoints.length) { return false; } - return true; + else { + this.currentRetryAttemptCount++; + retryContext.retryCount++; + retryContext.retryRequestOnPreferredLocations = this.currentRetryAttemptCount > 1; + retryContext.clearSessionTokenNotAvailable = + this.currentRetryAttemptCount === endpoints.length; + diagnosticNode.addData({ successfulRetryPolicy: "session" }); + return true; + } } - } - static _isSortedAndNonOverlapping(ranges) { - for (let idx = 1; idx < ranges.length; idx++) { - const previousR = ranges[idx - 1]; - const r = ranges[idx]; - if (!this._secondRangeIsAfterFirstRange(previousR, r)) { + else { + if (this.currentRetryAttemptCount > 1) { return false; } + else { + this.currentRetryAttemptCount++; + retryContext.retryCount++; + retryContext.retryRequestOnPreferredLocations = false; // Forces all operations to primary write endpoint + retryContext.clearSessionTokenNotAvailable = true; + diagnosticNode.addData({ successfulRetryPolicy: "session" }); + return true; + } } - return true; } - static _stringMax(a, b) { - return a >= b ? a : b; +} + +/** + * This class TimeoutFailoverRetryPolicy handles retries for read operations + * (including data plane,metadata, and query plan) in case of request timeouts + * (TimeoutError) or service unavailability (503 status code) by performing failover + * and retrying on other regions. + * @hidden + */ +class TimeoutFailoverRetryPolicy { + constructor(globalEndpointManager, headers, methodType, resourceType, operationType, enableEndPointDiscovery) { + this.globalEndpointManager = globalEndpointManager; + this.headers = headers; + this.methodType = methodType; + this.resourceType = resourceType; + this.operationType = operationType; + this.enableEndPointDiscovery = enableEndPointDiscovery; + this.maxRetryAttemptCount = 120; + this.maxServiceUnavailableRetryCount = 1; + this.retryAfterInMs = 0; + this.failoverRetryCount = 0; } - static _stringCompare(a, b) { - return a === b ? 0 : a > b ? 1 : -1; + /** + * Checks if a timeout request is valid for the timeout failover retry policy. + * A valid request should be a data plane, metadata, or query plan request. + * @returns + */ + isValidRequestForTimeoutError() { + const isQuery = Constants$1.HttpHeaders.IsQuery in this.headers; + const isQueryPlan = Constants$1.HttpHeaders.IsQueryPlan in this.headers; + if (this.methodType === exports.HTTPMethod.get || isQuery || isQueryPlan) { + return true; + } + return false; } - static _subtractRange(r, partitionKeyRange) { - const left = this._stringMax(partitionKeyRange[PARITIONKEYRANGE.MaxExclusive], r.min); - const leftInclusive = this._stringCompare(left, r.min) === 0 ? r.isMinInclusive : false; - return new QueryRange(left, r.max, leftInclusive, r.isMaxInclusive); + async shouldRetry(err, diagnosticNode, retryContext, locationEndpoint) { + if (!err) { + return false; + } + if (!retryContext || !locationEndpoint) { + return false; + } + // Check if the error is a timeout error (TimeoutErrorCode) and if it is not a valid HTTP network timeout request + if (err.code === TimeoutErrorCode && !this.isValidRequestForTimeoutError()) { + return false; + } + if (!this.enableEndPointDiscovery) { + return false; + } + if (err.code === StatusCodes.ServiceUnavailable && + this.failoverRetryCount >= this.maxServiceUnavailableRetryCount) { + return false; + } + if (this.failoverRetryCount >= this.maxRetryAttemptCount) { + return false; + } + const canUseMultipleWriteLocations = this.globalEndpointManager.canUseMultipleWriteLocations(this.resourceType, this.operationType); + const readRequest = isReadRequest(this.operationType); + if (!canUseMultipleWriteLocations && !readRequest) { + // Write requests on single master cannot be retried, no other regions available + return false; + } + this.failoverRetryCount++; + // Setting the retryLocationIndex to the next available location for retry. + // The retryLocationIndex is determined based on the failoverRetryCount, starting from zero. + retryContext.retryLocationServerIndex = await this.findEndpointIndex(this.failoverRetryCount); + diagnosticNode.addData({ successfulRetryPolicy: "timeout-failover" }); + return true; } /** - * Given the sorted ranges and a collection, invokes the callback on the list of overlapping partition key ranges - * @param callback - Function execute on the overlapping partition key ranges result, - * takes two parameters error, partition key ranges - * @hidden + * Determines index of endpoint to be used for retry based upon failoverRetryCount and avalable locations + * @param failoverRetryCount - count of failovers + * @returns */ - async getOverlappingRanges(collectionLink, sortedRanges, diagnosticNode) { - // validate if the list is non- overlapping and sorted TODO: any PartitionKeyRanges - if (!SmartRoutingMapProvider._isSortedAndNonOverlapping(sortedRanges)) { - throw new Error("the list of ranges is not a non-overlapping sorted ranges"); - } - let partitionKeyRanges = []; // TODO: any ParitionKeyRanges - if (sortedRanges.length === 0) { - return partitionKeyRanges; + async findEndpointIndex(failoverRetryCount) { + // count of preferred locations specified by user + const preferredLocationsCount = this.globalEndpointManager.preferredLocationsCount; + const readRequest = isReadRequest(this.operationType); + let endpointIndex = 0; + // If preferredLocationsCount is not zero, it indicates that the user has specified preferred locations. + if (preferredLocationsCount !== 0) { + // The endpointIndex is set based on the preferred location and the failover retry count. + endpointIndex = failoverRetryCount % preferredLocationsCount; } - const collectionRoutingMap = await this.partitionKeyRangeCache.onCollectionRoutingMap(collectionLink, diagnosticNode); - let index = 0; - let currentProvidedRange = sortedRanges[index]; - for (;;) { - if (currentProvidedRange.isEmpty()) { - // skip and go to the next item - if (++index >= sortedRanges.length) { - return partitionKeyRanges; + else { + // In the absence of preferred locations, the endpoint selection is based on the failover count and the number of available locations. + if (readRequest) { + const getReadEndpoints = await this.globalEndpointManager.getReadEndpoints(); + if (getReadEndpoints && getReadEndpoints.length > 0) { + endpointIndex = failoverRetryCount % getReadEndpoints.length; } - currentProvidedRange = sortedRanges[index]; - continue; - } - let queryRange; - if (partitionKeyRanges.length > 0) { - queryRange = SmartRoutingMapProvider._subtractRange(currentProvidedRange, partitionKeyRanges[partitionKeyRanges.length - 1]); } else { - queryRange = currentProvidedRange; + const getWriteEndpoints = await this.globalEndpointManager.getWriteEndpoints(); + if (getWriteEndpoints && getWriteEndpoints.length > 0) { + endpointIndex = failoverRetryCount % getWriteEndpoints.length; + } } - const overlappingRanges = collectionRoutingMap.getOverlappingRanges(queryRange); - if (overlappingRanges.length <= 0) { - throw new Error(`error: returned overlapping ranges for queryRange ${queryRange} is empty`); + } + return endpointIndex; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * @hidden + */ +async function execute({ diagnosticNode, retryContext = { retryCount: 0 }, retryPolicies, requestContext, executeRequest, }) { + // TODO: any response + return addDignosticChild(async (localDiagnosticNode) => { + localDiagnosticNode.addData({ requestAttempNumber: retryContext.retryCount }); + if (!retryPolicies) { + retryPolicies = { + endpointDiscoveryRetryPolicy: new EndpointDiscoveryRetryPolicy(requestContext.globalEndpointManager, requestContext.operationType), + resourceThrottleRetryPolicy: new ResourceThrottleRetryPolicy(requestContext.connectionPolicy.retryOptions.maxRetryAttemptCount, requestContext.connectionPolicy.retryOptions.fixedRetryIntervalInMilliseconds, requestContext.connectionPolicy.retryOptions.maxWaitTimeInSeconds), + sessionReadRetryPolicy: new SessionRetryPolicy(requestContext.globalEndpointManager, requestContext.resourceType, requestContext.operationType, requestContext.connectionPolicy), + defaultRetryPolicy: new DefaultRetryPolicy(requestContext.operationType), + timeoutFailoverRetryPolicy: new TimeoutFailoverRetryPolicy(requestContext.globalEndpointManager, requestContext.headers, requestContext.method, requestContext.resourceType, requestContext.operationType, requestContext.connectionPolicy.enableEndpointDiscovery), + }; + } + if (retryContext && retryContext.clearSessionTokenNotAvailable) { + requestContext.client.clearSessionToken(requestContext.path); + delete requestContext.headers["x-ms-session-token"]; + } + if (retryContext && retryContext.retryLocationServerIndex) { + requestContext.endpoint = await requestContext.globalEndpointManager.resolveServiceEndpoint(localDiagnosticNode, requestContext.resourceType, requestContext.operationType, retryContext.retryLocationServerIndex); + } + else { + requestContext.endpoint = await requestContext.globalEndpointManager.resolveServiceEndpoint(localDiagnosticNode, requestContext.resourceType, requestContext.operationType); + } + const startTimeUTCInMs = getCurrentTimestampInMs(); + try { + const response = await executeRequest(localDiagnosticNode, requestContext); + response.headers[Constants$1.ThrottleRetryCount] = + retryPolicies.resourceThrottleRetryPolicy.currentRetryAttemptCount; + response.headers[Constants$1.ThrottleRetryWaitTimeInMs] = + retryPolicies.resourceThrottleRetryPolicy.cummulativeWaitTimeinMs; + return response; + } + catch (err) { + // TODO: any error + let retryPolicy = null; + const headers = err.headers || {}; + if (err.code === StatusCodes.ENOTFOUND || + err.code === "REQUEST_SEND_ERROR" || + (err.code === StatusCodes.Forbidden && + (err.substatus === SubStatusCodes.DatabaseAccountNotFound || + err.substatus === SubStatusCodes.WriteForbidden))) { + retryPolicy = retryPolicies.endpointDiscoveryRetryPolicy; } - partitionKeyRanges = partitionKeyRanges.concat(overlappingRanges); - const lastKnownTargetRange = QueryRange.parsePartitionKeyRange(partitionKeyRanges[partitionKeyRanges.length - 1]); - if (!lastKnownTargetRange) { - throw new Error("expected lastKnowTargetRange to be truthy"); + else if (err.code === StatusCodes.TooManyRequests) { + retryPolicy = retryPolicies.resourceThrottleRetryPolicy; } - // the overlapping ranges must contain the requested range - if (SmartRoutingMapProvider._stringCompare(currentProvidedRange.max, lastKnownTargetRange.max) > - 0) { - throw new Error(`error: returned overlapping ranges ${overlappingRanges} \ - does not contain the requested range ${queryRange}`); + else if (err.code === StatusCodes.NotFound && + err.substatus === SubStatusCodes.ReadSessionNotAvailable) { + retryPolicy = retryPolicies.sessionReadRetryPolicy; } - // the current range is contained in partitionKeyRanges just move forward - if (++index >= sortedRanges.length) { - return partitionKeyRanges; + else if (err.code === StatusCodes.ServiceUnavailable || err.code === TimeoutErrorCode) { + retryPolicy = retryPolicies.timeoutFailoverRetryPolicy; } - currentProvidedRange = sortedRanges[index]; - while (SmartRoutingMapProvider._stringCompare(currentProvidedRange.max, lastKnownTargetRange.max) <= 0) { - // the current range is covered too.just move forward - if (++index >= sortedRanges.length) { - return partitionKeyRanges; + else { + retryPolicy = retryPolicies.defaultRetryPolicy; + } + const results = await retryPolicy.shouldRetry(err, localDiagnosticNode, retryContext, requestContext.endpoint); + if (!results) { + headers[Constants$1.ThrottleRetryCount] = + retryPolicies.resourceThrottleRetryPolicy.currentRetryAttemptCount; + headers[Constants$1.ThrottleRetryWaitTimeInMs] = + retryPolicies.resourceThrottleRetryPolicy.cummulativeWaitTimeinMs; + err.headers = Object.assign(Object.assign({}, err.headers), headers); + throw err; + } + else { + requestContext.retryCount++; + const newUrl = results[1]; // TODO: any hack + if (newUrl !== undefined) { + requestContext.endpoint = newUrl; } - currentProvidedRange = sortedRanges[index]; + localDiagnosticNode.recordFailedNetworkCall(startTimeUTCInMs, requestContext, retryContext.retryCount, err.code, err.subsstatusCode, headers); + await sleep(retryPolicy.retryAfterInMs); + return execute({ + diagnosticNode, + executeRequest, + requestContext, + retryContext, + retryPolicies, + }); } } + }, diagnosticNode, exports.DiagnosticNodeType.HTTP_REQUEST); +} + +/** + * @hidden + */ +let defaultHttpsAgent; +const https = __nccwpck_require__(95687); // eslint-disable-line @typescript-eslint/no-require-imports +const tls = __nccwpck_require__(24404); // eslint-disable-line @typescript-eslint/no-require-imports +// minVersion only available in Node 10+ +if (tls.DEFAULT_MIN_VERSION) { + defaultHttpsAgent = new https.Agent({ + keepAlive: true, + minVersion: "TLSv1.2", + }); +} +else { + // Remove when Node 8 support has been dropped + defaultHttpsAgent = new https.Agent({ + keepAlive: true, + secureProtocol: "TLSv1_2_method", + }); +} +const http = __nccwpck_require__(13685); // eslint-disable-line @typescript-eslint/no-require-imports +/** + * @internal + */ +const defaultHttpAgent = new http.Agent({ + keepAlive: true, +}); + +// Copyright (c) Microsoft Corporation. +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = coreRestPipeline.createDefaultHttpClient(); } + return cachedHttpClient; } // Copyright (c) Microsoft Corporation. -/** @hidden */ -const logger$2 = logger$5.createClientLogger("parallelQueryExecutionContextBase"); -/** @hidden */ -var ParallelQueryExecutionContextBaseStates; -(function (ParallelQueryExecutionContextBaseStates) { - ParallelQueryExecutionContextBaseStates["started"] = "started"; - ParallelQueryExecutionContextBaseStates["inProgress"] = "inProgress"; - ParallelQueryExecutionContextBaseStates["ended"] = "ended"; -})(ParallelQueryExecutionContextBaseStates || (ParallelQueryExecutionContextBaseStates = {})); -/** @hidden */ -class ParallelQueryExecutionContextBase { - /** - * Provides the ParallelQueryExecutionContextBase. - * This is the base class that ParallelQueryExecutionContext and OrderByQueryExecutionContext will derive from. - * - * When handling a parallelized query, it instantiates one instance of - * DocumentProcuder per target partition key range and aggregates the result of each. - * - * @param clientContext - The service endpoint to use to create the client. - * @param collectionLink - The Collection Link - * @param options - Represents the feed options. - * @param partitionedQueryExecutionInfo - PartitionedQueryExecutionInfo - * @hidden - */ - constructor(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo) { - this.clientContext = clientContext; - this.collectionLink = collectionLink; - this.query = query; - this.options = options; - this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo; - this.clientContext = clientContext; - this.collectionLink = collectionLink; - this.query = query; - this.options = options; - this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo; - this.diagnosticNodeWrapper = { - consumed: false, - diagnosticNode: new DiagnosticNodeInternal(clientContext.diagnosticLevel, exports.DiagnosticNodeType.PARALLEL_QUERY_NODE, null), - }; - this.diagnosticNodeWrapper.diagnosticNode.addData({ stateful: true }); - this.err = undefined; - this.state = ParallelQueryExecutionContextBase.STATES.started; - this.routingProvider = new SmartRoutingMapProvider(this.clientContext); - this.sortOrders = this.partitionedQueryExecutionInfo.queryInfo.orderBy; - this.requestContinuation = options ? options.continuationToken || options.continuation : null; - // response headers of undergoing operation - this.respHeaders = getInitialHeader(); - // Make priority queue for documentProducers - // The comparator is supplied by the derived class - this.orderByPQ = new PriorityQueue__default["default"]((a, b) => this.documentProducerComparator(b, a)); - // Creating the documentProducers - this.sem = semaphore__default["default"](1); - // Creating callback for semaphore - // TODO: Code smell - const createDocumentProducersAndFillUpPriorityQueueFunc = async () => { - // ensure the lock is released after finishing up - try { - const targetPartitionRanges = await this._onTargetPartitionRanges(); - this.waitingForInternalExecutionContexts = targetPartitionRanges.length; - const maxDegreeOfParallelism = options.maxDegreeOfParallelism === undefined || options.maxDegreeOfParallelism < 1 - ? targetPartitionRanges.length - : Math.min(options.maxDegreeOfParallelism, targetPartitionRanges.length); - logger$2.info("Query starting against " + - targetPartitionRanges.length + - " ranges with parallelism of " + - maxDegreeOfParallelism); - const parallelismSem = semaphore__default["default"](maxDegreeOfParallelism); - let filteredPartitionKeyRanges = []; - // The document producers generated from filteredPartitionKeyRanges - const targetPartitionQueryExecutionContextList = []; - if (this.requestContinuation) { - throw new Error("Continuation tokens are not yet supported for cross partition queries"); - } - else { - filteredPartitionKeyRanges = targetPartitionRanges; - } - // Create one documentProducer for each partitionTargetRange - filteredPartitionKeyRanges.forEach((partitionTargetRange) => { - // TODO: any partitionTargetRange - // no async callback - targetPartitionQueryExecutionContextList.push(this._createTargetPartitionQueryExecutionContext(partitionTargetRange)); - }); - // Fill up our priority queue with documentProducers - targetPartitionQueryExecutionContextList.forEach((documentProducer) => { - // has async callback - const throttledFunc = async () => { - try { - const { result: document, headers } = await documentProducer.current(this.getDiagnosticNode()); - this._mergeWithActiveResponseHeaders(headers); - if (document === undefined) { - // no results on this one - return; - } - // if there are matching results in the target ex range add it to the priority queue - try { - this.orderByPQ.enq(documentProducer); - } - catch (e) { - this.err = e; - } - } - catch (err) { - this._mergeWithActiveResponseHeaders(err.headers); - this.err = err; - } - finally { - parallelismSem.leave(); - this._decrementInitiationLock(); - } - }; - parallelismSem.take(throttledFunc); - }); - } - catch (err) { - this.err = err; - // release the lock - this.sem.leave(); - return; - } - }; - this.sem.take(createDocumentProducersAndFillUpPriorityQueueFunc); - } - _decrementInitiationLock() { - // decrements waitingForInternalExecutionContexts - // if waitingForInternalExecutionContexts reaches 0 releases the semaphore and changes the state - this.waitingForInternalExecutionContexts = this.waitingForInternalExecutionContexts - 1; - if (this.waitingForInternalExecutionContexts === 0) { - this.sem.leave(); - if (this.orderByPQ.size() === 0) { - this.state = ParallelQueryExecutionContextBase.STATES.inProgress; - } +const logger$1 = logger$5.createClientLogger("RequestHandler"); +async function executeRequest(diagnosticNode, requestContext) { + return executePlugins(diagnosticNode, requestContext, httpRequest, exports.PluginOn.request); +} +/** + * @hidden + */ +async function httpRequest(requestContext, diagnosticNode) { + const controller = new nodeAbortController.AbortController(); + const signal = controller.signal; + // Wrap users passed abort events and call our own internal abort() + const userSignal = requestContext.options && requestContext.options.abortSignal; + if (userSignal) { + if (userSignal.aborted) { + controller.abort(); + } + else { + userSignal.addEventListener("abort", () => { + controller.abort(); + }); } } - _mergeWithActiveResponseHeaders(headers) { - mergeHeaders(this.respHeaders, headers); - } - _getAndResetActiveResponseHeaders() { - const ret = this.respHeaders; - this.respHeaders = getInitialHeader(); - return ret; - } - getDiagnosticNode() { - return this.diagnosticNodeWrapper.diagnosticNode; + const timeout = setTimeout(() => { + controller.abort(); + }, requestContext.connectionPolicy.requestTimeout); + let response; + if (requestContext.body) { + requestContext.body = bodyFromData(requestContext.body); } - async _onTargetPartitionRanges() { - // invokes the callback when the target partition ranges are ready - const parsedRanges = this.partitionedQueryExecutionInfo.queryRanges; - const queryRanges = parsedRanges.map((item) => QueryRange.parseFromDict(item)); - return this.routingProvider.getOverlappingRanges(this.collectionLink, queryRanges, this.getDiagnosticNode()); + const httpsClient = getCachedDefaultHttpClient(); + const url = prepareURL(requestContext.endpoint, requestContext.path); + const reqHeaders = coreRestPipeline.createHttpHeaders(requestContext.headers); + const pipelineRequest = coreRestPipeline.createPipelineRequest({ + url, + headers: reqHeaders, + method: requestContext.method, + abortSignal: signal, + body: requestContext.body, + }); + if (requestContext.requestAgent) { + pipelineRequest.agent = requestContext.requestAgent; } - /** - * Gets the replacement ranges for a partitionkeyrange that has been split - */ - async _getReplacementPartitionKeyRanges(documentProducer) { - const partitionKeyRange = documentProducer.targetPartitionKeyRange; - // Download the new routing map - this.routingProvider = new SmartRoutingMapProvider(this.clientContext); - // Get the queryRange that relates to this partitionKeyRange - const queryRange = QueryRange.parsePartitionKeyRange(partitionKeyRange); - return this.routingProvider.getOverlappingRanges(this.collectionLink, [queryRange], this.getDiagnosticNode()); + else { + const parsedUrl = new URL(url); + pipelineRequest.agent = parsedUrl.protocol === "http" ? defaultHttpAgent : defaultHttpsAgent; } - // TODO: P0 Code smell - can barely tell what this is doing - /** - * Removes the current document producer from the priqueue, - * replaces that document producer with child document producers, - * then reexecutes the originFunction with the corrrected executionContext - */ - async _repairExecutionContext(diagnosticNode, originFunction) { - // TODO: any - // Get the replacement ranges - // Removing the invalid documentProducer from the orderByPQ - const parentDocumentProducer = this.orderByPQ.deq(); - try { - const replacementPartitionKeyRanges = await this._getReplacementPartitionKeyRanges(parentDocumentProducer); - const replacementDocumentProducers = []; - // Create the replacement documentProducers - replacementPartitionKeyRanges.forEach((partitionKeyRange) => { - // Create replacment document producers with the parent's continuationToken - const replacementDocumentProducer = this._createTargetPartitionQueryExecutionContext(partitionKeyRange, parentDocumentProducer.continuationToken); - replacementDocumentProducers.push(replacementDocumentProducer); - }); - // We need to check if the documentProducers even has anything left to fetch from before enqueing them - const checkAndEnqueueDocumentProducer = async (documentProducerToCheck, checkNextDocumentProducerCallback) => { - try { - const { result: afterItem } = await documentProducerToCheck.current(diagnosticNode); - if (afterItem === undefined) { - // no more results left in this document producer, so we don't enqueue it - } - else { - // Safe to put document producer back in the queue - this.orderByPQ.enq(documentProducerToCheck); - } - await checkNextDocumentProducerCallback(); - } - catch (err) { - this.err = err; - return; - } - }; - const checkAndEnqueueDocumentProducers = async (rdp) => { - if (rdp.length > 0) { - // We still have a replacementDocumentProducer to check - const replacementDocumentProducer = rdp.shift(); - await checkAndEnqueueDocumentProducer(replacementDocumentProducer, async () => { - await checkAndEnqueueDocumentProducers(rdp); - }); - } - else { - // reexecutes the originFunction with the corrrected executionContext - return originFunction(); - } - }; - // Invoke the recursive function to get the ball rolling - await checkAndEnqueueDocumentProducers(replacementDocumentProducers); + const startTimeUTCInMs = getCurrentTimestampInMs(); + try { + if (requestContext.pipeline) { + response = await requestContext.pipeline.sendRequest(httpsClient, pipelineRequest); } - catch (err) { - this.err = err; - throw err; + else { + response = await httpsClient.sendRequest(pipelineRequest); } } - static _needPartitionKeyRangeCacheRefresh(error) { - // TODO: any error - return (error.code === StatusCodes.Gone && - "substatus" in error && - error["substatus"] === SubStatusCodes.PartitionKeyRangeGone); - } - /** - * Checks to see if the executionContext needs to be repaired. - * if so it repairs the execution context and executes the ifCallback, - * else it continues with the current execution context and executes the elseCallback - */ - async _repairExecutionContextIfNeeded(diagnosticNode, ifCallback, elseCallback) { - const documentProducer = this.orderByPQ.peek(); - // Check if split happened - try { - await documentProducer.current(diagnosticNode); - elseCallback(); - } - catch (err) { - if (ParallelQueryExecutionContextBase._needPartitionKeyRangeCacheRefresh(err)) { - // Split has happened so we need to repair execution context before continueing - return addDignosticChild((childNode) => this._repairExecutionContext(childNode, ifCallback), diagnosticNode, exports.DiagnosticNodeType.QUERY_REPAIR_NODE); - } - else { - // Something actually bad happened ... - this.err = err; - throw err; + catch (error) { + if (error.name === "AbortError") { + // If the user passed signal caused the abort, cancel the timeout and rethrow the error + if (userSignal && userSignal.aborted === true) { + clearTimeout(timeout); + throw error; } + // If the user didn't cancel, it must be an abort we called due to timeout + throw new TimeoutError(`Timeout Error! Request took more than ${requestContext.connectionPolicy.requestTimeout} ms`); } + throw error; } - /** - * Fetches the next element in the ParallelQueryExecutionContextBase. - */ - async nextItem(diagnosticNode) { - if (this.err) { - // if there is a prior error return error - throw this.err; - } - return new Promise((resolve, reject) => { - this.sem.take(() => { - if (!this.diagnosticNodeWrapper.consumed) { - diagnosticNode.addChildNode(this.diagnosticNodeWrapper.diagnosticNode, exports.CosmosDbDiagnosticLevel.debug, exports.MetadataLookUpType.QueryPlanLookUp); - this.diagnosticNodeWrapper.diagnosticNode = undefined; - this.diagnosticNodeWrapper.consumed = true; - } - else { - this.diagnosticNodeWrapper.diagnosticNode = diagnosticNode; - } - // NOTE: lock must be released before invoking quitting - if (this.err) { - // release the lock before invoking callback - this.sem.leave(); - // if there is a prior error return error - this.err.headers = this._getAndResetActiveResponseHeaders(); - reject(this.err); - return; - } - if (this.orderByPQ.size() === 0) { - // there is no more results - this.state = ParallelQueryExecutionContextBase.STATES.ended; - // release the lock before invoking callback - this.sem.leave(); - return resolve({ - result: undefined, - headers: this._getAndResetActiveResponseHeaders(), - }); - } - const ifCallback = () => { - // Release the semaphore to avoid deadlock - this.sem.leave(); - // Reexcute the function - return resolve(this.nextItem(diagnosticNode)); - }; - const elseCallback = async () => { - let documentProducer; - try { - documentProducer = this.orderByPQ.deq(); - } - catch (e) { - // if comparing elements of the priority queue throws exception - // set that error and return error - this.err = e; - // release the lock before invoking callback - this.sem.leave(); - this.err.headers = this._getAndResetActiveResponseHeaders(); - reject(this.err); - return; - } - let item; - let headers; - try { - const response = await documentProducer.nextItem(diagnosticNode); - item = response.result; - headers = response.headers; - this._mergeWithActiveResponseHeaders(headers); - if (item === undefined) { - // this should never happen - // because the documentProducer already has buffered an item - // assert item !== undefined - this.err = new Error(`Extracted DocumentProducer from the priority queue \ - doesn't have any buffered item!`); - // release the lock before invoking callback - this.sem.leave(); - return resolve({ - result: undefined, - headers: this._getAndResetActiveResponseHeaders(), - }); - } - } - catch (err) { - this.err = new Error(`Extracted DocumentProducer from the priority queue fails to get the \ - buffered item. Due to ${JSON.stringify(err)}`); - this.err.headers = this._getAndResetActiveResponseHeaders(); - // release the lock before invoking callback - this.sem.leave(); - reject(this.err); - return; - } - // we need to put back the document producer to the queue if it has more elements. - // the lock will be released after we know document producer must be put back in the queue or not - try { - const { result: afterItem, headers: otherHeaders } = await documentProducer.current(diagnosticNode); - this._mergeWithActiveResponseHeaders(otherHeaders); - if (afterItem === undefined) { - // no more results is left in this document producer - } - else { - try { - const headItem = documentProducer.fetchResults[0]; - if (typeof headItem === "undefined") { - throw new Error("Extracted DocumentProducer from PQ is invalid state with no result!"); - } - this.orderByPQ.enq(documentProducer); - } - catch (e) { - // if comparing elements in priority queue throws exception - // set error - this.err = e; - } - } - } - catch (err) { - if (ParallelQueryExecutionContextBase._needPartitionKeyRangeCacheRefresh(err)) { - // We want the document producer enqueued - // So that later parts of the code can repair the execution context - this.orderByPQ.enq(documentProducer); - } - else { - // Something actually bad happened - this.err = err; - reject(this.err); - } - } - finally { - // release the lock before returning - this.sem.leave(); - } - // invoke the callback on the item - return resolve({ - result: item, - headers: this._getAndResetActiveResponseHeaders(), - }); - }; - this._repairExecutionContextIfNeeded(diagnosticNode, ifCallback, elseCallback).catch(reject); - }); - }); - } - /** - * Determine if there are still remaining resources to processs based on the value of the continuation - * token or the elements remaining on the current batch in the QueryIterator. - * @returns true if there is other elements to process in the ParallelQueryExecutionContextBase. - */ - hasMoreResults() { - return !(this.state === ParallelQueryExecutionContextBase.STATES.ended || this.err !== undefined); - } - /** - * Creates document producers - */ - _createTargetPartitionQueryExecutionContext(partitionKeyTargetRange, continuationToken) { - // TODO: any - // creates target partition range Query Execution Context - let rewrittenQuery = this.partitionedQueryExecutionInfo.queryInfo.rewrittenQuery; - let sqlQuerySpec; - const query = this.query; - if (typeof query === "string") { - sqlQuerySpec = { query }; + clearTimeout(timeout); + const result = response.status === 204 || response.status === 304 || response.bodyAsText === "" + ? null + : JSON.parse(response.bodyAsText); + const responseHeaders = response.headers.toJSON(); + const substatus = responseHeaders[Constants$1.HttpHeaders.SubStatus] + ? parseInt(responseHeaders[Constants$1.HttpHeaders.SubStatus], 10) + : undefined; + diagnosticNode.recordSuccessfulNetworkCall(startTimeUTCInMs, requestContext, response, substatus, url); + if (response.status >= 400) { + const errorResponse = new ErrorResponse(result.message); + logger$1.warning(response.status + + " " + + requestContext.endpoint + + " " + + requestContext.path + + " " + + result.message); + errorResponse.code = response.status; + errorResponse.body = result; + errorResponse.headers = responseHeaders; + if (Constants$1.HttpHeaders.ActivityId in responseHeaders) { + errorResponse.activityId = responseHeaders[Constants$1.HttpHeaders.ActivityId]; } - else { - sqlQuerySpec = query; + if (Constants$1.HttpHeaders.SubStatus in responseHeaders) { + errorResponse.substatus = substatus; } - const formatPlaceHolder = "{documentdb-formattableorderbyquery-filter}"; - if (rewrittenQuery) { - sqlQuerySpec = JSON.parse(JSON.stringify(sqlQuerySpec)); - // We hardcode the formattable filter to true for now - rewrittenQuery = rewrittenQuery.replace(formatPlaceHolder, "true"); - sqlQuerySpec["query"] = rewrittenQuery; + if (Constants$1.HttpHeaders.RetryAfterInMs in responseHeaders) { + errorResponse.retryAfterInMs = parseInt(responseHeaders[Constants$1.HttpHeaders.RetryAfterInMs], 10); + Object.defineProperty(errorResponse, "retryAfterInMilliseconds", { + get: () => { + return errorResponse.retryAfterInMs; + }, + }); } - const options = Object.assign({}, this.options); - options.continuationToken = continuationToken; - return new DocumentProducer(this.clientContext, this.collectionLink, sqlQuerySpec, partitionKeyTargetRange, options); + throw errorResponse; } + return { + headers: responseHeaders, + result, + code: response.status, + substatus, + }; } -ParallelQueryExecutionContextBase.STATES = ParallelQueryExecutionContextBaseStates; - -// Copyright (c) Microsoft Corporation. /** - * Provides the ParallelQueryExecutionContext. - * This class is capable of handling parallelized queries and derives from ParallelQueryExecutionContextBase. * @hidden */ -class ParallelQueryExecutionContext extends ParallelQueryExecutionContextBase { - // Instance members are inherited - // Overriding documentProducerComparator for ParallelQueryExecutionContexts - /** - * Provides a Comparator for document producers using the min value of the corresponding target partition. - * @returns Comparator Function - * @hidden - */ - documentProducerComparator(docProd1, docProd2) { - return docProd1.generation - docProd2.generation; - } -} - -/** @hidden */ -class OrderByQueryExecutionContext extends ParallelQueryExecutionContextBase { - /** - * Provides the OrderByQueryExecutionContext. - * This class is capable of handling orderby queries and dervives from ParallelQueryExecutionContextBase. - * - * When handling a parallelized query, it instantiates one instance of - * DocumentProcuder per target partition key range and aggregates the result of each. - * - * @param clientContext - The service endpoint to use to create the client. - * @param collectionLink - The Collection Link - * @param options - Represents the feed options. - * @param partitionedQueryExecutionInfo - PartitionedQueryExecutionInfo - * @hidden - */ - constructor(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo) { - // Calling on base class constructor - super(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo); - this.orderByComparator = new OrderByDocumentProducerComparator(this.sortOrders); - } - // Instance members are inherited - // Overriding documentProducerComparator for OrderByQueryExecutionContexts - /** - * Provides a Comparator for document producers which respects orderby sort order. - * @returns Comparator Function - * @hidden - */ - documentProducerComparator(docProd1, docProd2) { - return this.orderByComparator.compare(docProd1, docProd2); - } -} - -/** @hidden */ -class OffsetLimitEndpointComponent { - constructor(executionContext, offset, limit) { - this.executionContext = executionContext; - this.offset = offset; - this.limit = limit; - } - async nextItem(diagnosticNode) { - const aggregateHeaders = getInitialHeader(); - while (this.offset > 0) { - // Grab next item but ignore the result. We only need the headers - const { headers } = await this.executionContext.nextItem(diagnosticNode); - this.offset--; - mergeHeaders(aggregateHeaders, headers); - } - if (this.limit > 0) { - const { result, headers } = await this.executionContext.nextItem(diagnosticNode); - this.limit--; - mergeHeaders(aggregateHeaders, headers); - return { result, headers: aggregateHeaders }; +async function request(requestContext, diagnosticNode) { + if (requestContext.body) { + requestContext.body = bodyFromData(requestContext.body); + if (!requestContext.body) { + throw new Error("parameter data must be a javascript object, string, or Buffer"); } - // If both limit and offset are 0, return nothing - return { - result: undefined, - headers: getInitialHeader(), - }; - } - hasMoreResults() { - return (this.offset > 0 || this.limit > 0) && this.executionContext.hasMoreResults(); - } -} - -/** @hidden */ -class OrderByEndpointComponent { - /** - * Represents an endpoint in handling an order by query. For each processed orderby - * result it returns 'payload' item of the result - * - * @param executionContext - Underlying Execution Context - * @hidden - */ - constructor(executionContext) { - this.executionContext = executionContext; - } - /** - * Execute a provided function on the next element in the OrderByEndpointComponent. - */ - async nextItem(diagnosticNode) { - const { result: item, headers } = await this.executionContext.nextItem(diagnosticNode); - return { - result: item !== undefined ? item.payload : undefined, - headers, - }; - } - /** - * Determine if there are still remaining resources to processs. - * @returns true if there is other elements to process in the OrderByEndpointComponent. - */ - hasMoreResults() { - return this.executionContext.hasMoreResults(); } + return addDignosticChild(async (childNode) => { + return execute({ + diagnosticNode: childNode, + requestContext, + executeRequest, + }); + }, diagnosticNode, exports.DiagnosticNodeType.REQUEST_ATTEMPTS); } +const RequestHandler = { + request, +}; // Copyright (c) Microsoft Corporation. -async function digest(str) { - const hash = crypto.createHash("sha256"); - hash.update(str, "utf8"); - return hash.digest("hex"); +// Licensed under the MIT license. +function atob(str) { + return Buffer.from(str, "base64").toString("binary"); } // Copyright (c) Microsoft Corporation. -async function hashObject(object) { - const stringifiedObject = stableStringify__default["default"](object); - return digest(stringifiedObject); -} - -/** @hidden */ -class OrderedDistinctEndpointComponent { - constructor(executionContext) { - this.executionContext = executionContext; - } - async nextItem(diagnosticNode) { - const { headers, result } = await this.executionContext.nextItem(diagnosticNode); - if (result) { - const hashedResult = await hashObject(result); - if (hashedResult === this.hashedLastResult) { - return { result: undefined, headers }; +// Licensed under the MIT license. +/** + * Models vector clock bases session token. Session token has the following format: + * `{Version}#{GlobalLSN}#{RegionId1}={LocalLsn1}#{RegionId2}={LocalLsn2}....#{RegionIdN}={LocalLsnN}` + * 'Version' captures the configuration number of the partition which returned this session token. + * 'Version' is incremented everytime topology of the partition is updated (say due to Add/Remove/Failover). + * + * The choice of separators '#' and '=' is important. Separators ';' and ',' are used to delimit + * per-partitionKeyRange session token + * @hidden + * + */ +class VectorSessionToken { + constructor(version, globalLsn, localLsnByregion, sessionToken) { + this.version = version; + this.globalLsn = globalLsn; + this.localLsnByregion = localLsnByregion; + this.sessionToken = sessionToken; + if (!this.sessionToken) { + const regionAndLocalLsn = []; + for (const [key, value] of this.localLsnByregion.entries()) { + regionAndLocalLsn.push(`${key}${VectorSessionToken.REGION_PROGRESS_SEPARATOR}${value}`); + } + const regionProgress = regionAndLocalLsn.join(VectorSessionToken.SEGMENT_SEPARATOR); + if (regionProgress === "") { + this.sessionToken = `${this.version}${VectorSessionToken.SEGMENT_SEPARATOR}${this.globalLsn}`; + } + else { + this.sessionToken = `${this.version}${VectorSessionToken.SEGMENT_SEPARATOR}${this.globalLsn}${VectorSessionToken.SEGMENT_SEPARATOR}${regionProgress}`; } - this.hashedLastResult = hashedResult; } - return { result, headers }; - } - hasMoreResults() { - return this.executionContext.hasMoreResults(); - } -} - -/** @hidden */ -class UnorderedDistinctEndpointComponent { - constructor(executionContext) { - this.executionContext = executionContext; - this.hashedResults = new Set(); } - async nextItem(diagnosticNode) { - const { headers, result } = await this.executionContext.nextItem(diagnosticNode); - if (result) { - const hashedResult = await hashObject(result); - if (this.hashedResults.has(hashedResult)) { - return { result: undefined, headers }; + static create(sessionToken) { + const [versionStr, globalLsnStr, ...regionSegments] = sessionToken.split(VectorSessionToken.SEGMENT_SEPARATOR); + const version = parseInt(versionStr, 10); + const globalLsn = parseFloat(globalLsnStr); + if (typeof version !== "number" || typeof globalLsn !== "number") { + return null; + } + const lsnByRegion = new Map(); + for (const regionSegment of regionSegments) { + const [regionIdStr, localLsnStr] = regionSegment.split(VectorSessionToken.REGION_PROGRESS_SEPARATOR); + if (!regionIdStr || !localLsnStr) { + return null; } - this.hashedResults.add(hashedResult); + const regionId = parseInt(regionIdStr, 10); + let localLsn; + try { + localLsn = localLsnStr; + } + catch (err) { + // TODO: log error + return null; + } + if (typeof regionId !== "number") { + return null; + } + lsnByRegion.set(regionId, localLsn); } - return { result, headers }; - } - hasMoreResults() { - return this.executionContext.hasMoreResults(); + return new VectorSessionToken(version, globalLsn, lsnByRegion, sessionToken); } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// All aggregates are effectively a group by operation -// The empty group is used for aggregates without a GROUP BY clause -const emptyGroup = "__empty__"; -// Newer API versions rewrite the query to return `item2`. It fixes some legacy issues with the original `item` result -// Aggregator code should use item2 when available -const extractAggregateResult = (payload) => Object.keys(payload).length > 0 ? (payload.item2 ? payload.item2 : payload.item) : null; - -/** @hidden */ -class GroupByEndpointComponent { - constructor(executionContext, queryInfo) { - this.executionContext = executionContext; - this.queryInfo = queryInfo; - this.groupings = new Map(); - this.aggregateResultArray = []; - this.completed = false; + equals(other) { + return !other + ? false + : this.version === other.version && + this.globalLsn === other.globalLsn && + this.areRegionProgressEqual(other.localLsnByregion); } - async nextItem(diagnosticNode) { - // If we have a full result set, begin returning results - if (this.aggregateResultArray.length > 0) { - return { - result: this.aggregateResultArray.pop(), - headers: getInitialHeader(), - }; + merge(other) { + if (other == null) { + throw new Error("other (Vector Session Token) must not be null"); } - if (this.completed) { - return { - result: undefined, - headers: getInitialHeader(), - }; + if (this.version === other.version && + this.localLsnByregion.size !== other.localLsnByregion.size) { + throw new Error(`Compared session tokens ${this.sessionToken} and ${other.sessionToken} have unexpected regions`); } - const aggregateHeaders = getInitialHeader(); - while (this.executionContext.hasMoreResults()) { - // Grab the next result - const { result, headers } = (await this.executionContext.nextItem(diagnosticNode)); - mergeHeaders(aggregateHeaders, headers); - // If it exists, process it via aggregators - if (result) { - const group = result.groupByItems ? await hashObject(result.groupByItems) : emptyGroup; - const aggregators = this.groupings.get(group); - const payload = result.payload; - if (aggregators) { - // Iterator over all results in the payload - Object.keys(payload).map((key) => { - // in case the value of a group is null make sure we create a dummy payload with item2==null - const effectiveGroupByValue = payload[key] - ? payload[key] - : new Map().set("item2", null); - const aggregateResult = extractAggregateResult(effectiveGroupByValue); - aggregators.get(key).aggregate(aggregateResult); - }); - } - else { - // This is the first time we have seen a grouping. Setup the initial result without aggregate values - const grouping = new Map(); - this.groupings.set(group, grouping); - // Iterator over all results in the payload - Object.keys(payload).map((key) => { - const aggregateType = this.queryInfo.groupByAliasToAggregateType[key]; - // Create a new aggregator for this specific aggregate field - const aggregator = createAggregator(aggregateType); - grouping.set(key, aggregator); - if (aggregateType) { - const aggregateResult = extractAggregateResult(payload[key]); - aggregator.aggregate(aggregateResult); - } - else { - aggregator.aggregate(payload[key]); - } - }); - } + const [higherVersionSessionToken, lowerVersionSessionToken] = this.version < other.version ? [other, this] : [this, other]; + const highestLocalLsnByRegion = new Map(); + for (const [regionId, highLocalLsn] of higherVersionSessionToken.localLsnByregion.entries()) { + const lowLocalLsn = lowerVersionSessionToken.localLsnByregion.get(regionId); + if (lowLocalLsn) { + highestLocalLsnByRegion.set(regionId, max(highLocalLsn, lowLocalLsn)); } - } - for (const grouping of this.groupings.values()) { - const groupResult = {}; - for (const [aggregateKey, aggregator] of grouping.entries()) { - groupResult[aggregateKey] = aggregator.getResult(); + else if (this.version === other.version) { + throw new Error(`Compared session tokens have unexpected regions. Session 1: ${this.sessionToken} - Session 2: ${this.sessionToken}`); + } + else { + highestLocalLsnByRegion.set(regionId, highLocalLsn); } - this.aggregateResultArray.push(groupResult); } - this.completed = true; - return { - result: this.aggregateResultArray.pop(), - headers: aggregateHeaders, - }; - } - hasMoreResults() { - return this.executionContext.hasMoreResults() || this.aggregateResultArray.length > 0; + return new VectorSessionToken(Math.max(this.version, other.version), Math.max(this.globalLsn, other.globalLsn), highestLocalLsnByRegion); } -} - -/** @hidden */ -class GroupByValueEndpointComponent { - constructor(executionContext, queryInfo) { - this.executionContext = executionContext; - this.queryInfo = queryInfo; - this.aggregators = new Map(); - this.aggregateResultArray = []; - this.completed = false; - // VALUE queries will only every have a single grouping - this.aggregateType = this.queryInfo.aggregates[0]; + toString() { + return this.sessionToken; } - async nextItem(diagnosticNode) { - // Start returning results if we have processed a full results set - if (this.aggregateResultArray.length > 0) { - return { - result: this.aggregateResultArray.pop(), - headers: getInitialHeader(), - }; - } - if (this.completed) { - return { - result: undefined, - headers: getInitialHeader(), - }; + areRegionProgressEqual(other) { + if (this.localLsnByregion.size !== other.size) { + return false; } - const aggregateHeaders = getInitialHeader(); - while (this.executionContext.hasMoreResults()) { - // Grab the next result - const { result, headers } = (await this.executionContext.nextItem(diagnosticNode)); - mergeHeaders(aggregateHeaders, headers); - // If it exists, process it via aggregators - if (result) { - let grouping = emptyGroup; - let payload = result; - if (result.groupByItems) { - // If the query contains a GROUP BY clause, it will have a payload property and groupByItems - payload = result.payload; - grouping = await hashObject(result.groupByItems); - } - const aggregator = this.aggregators.get(grouping); - if (!aggregator) { - // This is the first time we have seen a grouping so create a new aggregator - this.aggregators.set(grouping, createAggregator(this.aggregateType)); - } - if (this.aggregateType) { - const aggregateResult = extractAggregateResult(payload[0]); - // if aggregate result is null, we need to short circuit aggregation and return undefined - if (aggregateResult === null) { - this.completed = true; - } - this.aggregators.get(grouping).aggregate(aggregateResult); - } - else { - // Queries with no aggregates pass the payload directly to the aggregator - // Example: SELECT VALUE c.team FROM c GROUP BY c.team - this.aggregators.get(grouping).aggregate(payload); - } + for (const [regionId, localLsn] of this.localLsnByregion.entries()) { + const otherLocalLsn = other.get(regionId); + if (localLsn !== otherLocalLsn) { + return false; } } - // We bail early since we got an undefined result back `[{}]` - if (this.completed) { - return { - result: undefined, - headers: aggregateHeaders, - }; - } - // If no results are left in the underlying execution context, convert our aggregate results to an array - for (const aggregator of this.aggregators.values()) { - this.aggregateResultArray.push(aggregator.getResult()); - } - this.completed = true; - return { - result: this.aggregateResultArray.pop(), - headers: aggregateHeaders, - }; + return true; } - hasMoreResults() { - return this.executionContext.hasMoreResults() || this.aggregateResultArray.length > 0; +} +VectorSessionToken.SEGMENT_SEPARATOR = "#"; +VectorSessionToken.REGION_PROGRESS_SEPARATOR = "="; +/** + * @hidden + */ +function max(int1, int2) { + // NOTE: This only works for positive numbers + if (int1.length === int2.length) { + return int1 > int2 ? int1 : int2; + } + else if (int1.length > int2.length) { + return int1; + } + else { + return int2; } } +// Copyright (c) Microsoft Corporation. /** @hidden */ -class PipelinedQueryExecutionContext { - constructor(clientContext, collectionLink, query, options, partitionedQueryExecutionInfo) { - this.clientContext = clientContext; - this.collectionLink = collectionLink; - this.query = query; - this.options = options; - this.partitionedQueryExecutionInfo = partitionedQueryExecutionInfo; - this.endpoint = null; - this.pageSize = options["maxItemCount"]; - if (this.pageSize === undefined) { - this.pageSize = PipelinedQueryExecutionContext.DEFAULT_PAGE_SIZE; - } - // Pick between parallel vs order by execution context - const sortOrders = partitionedQueryExecutionInfo.queryInfo.orderBy; - if (Array.isArray(sortOrders) && sortOrders.length > 0) { - // Need to wrap orderby execution context in endpoint component, since the data is nested as a \ - // "payload" property. - this.endpoint = new OrderByEndpointComponent(new OrderByQueryExecutionContext(this.clientContext, this.collectionLink, this.query, this.options, this.partitionedQueryExecutionInfo)); +class SessionContainer { + constructor(collectionNameToCollectionResourceId = new Map(), collectionResourceIdToSessionTokens = new Map()) { + this.collectionNameToCollectionResourceId = collectionNameToCollectionResourceId; + this.collectionResourceIdToSessionTokens = collectionResourceIdToSessionTokens; + } + get(request) { + if (!request) { + throw new Error("request cannot be null"); } - else { - this.endpoint = new ParallelQueryExecutionContext(this.clientContext, this.collectionLink, this.query, this.options, this.partitionedQueryExecutionInfo); + const collectionName = getContainerLink(trimSlashes(request.resourceAddress)); + const rangeIdToTokenMap = this.getPartitionKeyRangeIdToTokenMap(collectionName); + return SessionContainer.getCombinedSessionTokenString(rangeIdToTokenMap); + } + remove(request) { + let collectionResourceId; + const resourceAddress = trimSlashes(request.resourceAddress); + const collectionName = getContainerLink(resourceAddress); + if (collectionName) { + collectionResourceId = this.collectionNameToCollectionResourceId.get(collectionName); + this.collectionNameToCollectionResourceId.delete(collectionName); } - if (Object.keys(partitionedQueryExecutionInfo.queryInfo.groupByAliasToAggregateType).length > 0 || - partitionedQueryExecutionInfo.queryInfo.aggregates.length > 0 || - partitionedQueryExecutionInfo.queryInfo.groupByExpressions.length > 0) { - if (partitionedQueryExecutionInfo.queryInfo.hasSelectValue) { - this.endpoint = new GroupByValueEndpointComponent(this.endpoint, partitionedQueryExecutionInfo.queryInfo); - } - else { - this.endpoint = new GroupByEndpointComponent(this.endpoint, partitionedQueryExecutionInfo.queryInfo); - } + if (collectionResourceId !== undefined) { + this.collectionResourceIdToSessionTokens.delete(collectionResourceId); } - // If top then add that to the pipeline. TOP N is effectively OFFSET 0 LIMIT N - const top = partitionedQueryExecutionInfo.queryInfo.top; - if (typeof top === "number") { - this.endpoint = new OffsetLimitEndpointComponent(this.endpoint, 0, top); + } + set(request, resHeaders) { + // TODO: we check the master logic a few different places. Might not need it. + if (!resHeaders || + SessionContainer.isReadingFromMaster(request.resourceType, request.operationType)) { + return; } - // If offset+limit then add that to the pipeline - const limit = partitionedQueryExecutionInfo.queryInfo.limit; - const offset = partitionedQueryExecutionInfo.queryInfo.offset; - if (typeof limit === "number" && typeof offset === "number") { - this.endpoint = new OffsetLimitEndpointComponent(this.endpoint, offset, limit); + const sessionTokenString = resHeaders[Constants$1.HttpHeaders.SessionToken]; + if (!sessionTokenString) { + return; } - // If distinct then add that to the pipeline - const distinctType = partitionedQueryExecutionInfo.queryInfo.distinctType; - if (distinctType === "Ordered") { - this.endpoint = new OrderedDistinctEndpointComponent(this.endpoint); + const containerName = this.getContainerName(request, resHeaders); + const ownerId = !request.isNameBased + ? request.resourceId + : resHeaders[Constants$1.HttpHeaders.OwnerId] || request.resourceId; + if (!ownerId) { + return; } - if (distinctType === "Unordered") { - this.endpoint = new UnorderedDistinctEndpointComponent(this.endpoint); + if (containerName && this.validateOwnerID(ownerId)) { + if (!this.collectionResourceIdToSessionTokens.has(ownerId)) { + this.collectionResourceIdToSessionTokens.set(ownerId, new Map()); + } + if (!this.collectionNameToCollectionResourceId.has(containerName)) { + this.collectionNameToCollectionResourceId.set(containerName, ownerId); + } + const containerSessionContainer = this.collectionResourceIdToSessionTokens.get(ownerId); + SessionContainer.compareAndSetToken(sessionTokenString, containerSessionContainer); } } - async nextItem(diagnosticNode) { - return this.endpoint.nextItem(diagnosticNode); + validateOwnerID(ownerId) { + // If ownerId contains exactly 8 bytes it represents a unique database+collection identifier. Otherwise it represents another resource + // The first 4 bytes are the database. The last 4 bytes are the collection. + // Cosmos rids potentially contain "-" which is an invalid character in the browser atob implementation + // See https://en.wikipedia.org/wiki/Base64#Filenames + return atob(ownerId.replace(/-/g, "/")).length === 8; } - // Removed callback here beacuse it wouldn't have ever worked... - hasMoreResults() { - return this.endpoint.hasMoreResults(); + getPartitionKeyRangeIdToTokenMap(collectionName) { + let rangeIdToTokenMap = null; + if (collectionName && this.collectionNameToCollectionResourceId.has(collectionName)) { + rangeIdToTokenMap = this.collectionResourceIdToSessionTokens.get(this.collectionNameToCollectionResourceId.get(collectionName)); + } + return rangeIdToTokenMap; } - async fetchMore(diagnosticNode) { - // if the wrapped endpoint has different implementation for fetchMore use that - // otherwise use the default implementation - if (typeof this.endpoint.fetchMore === "function") { - return this.endpoint.fetchMore(diagnosticNode); + static getCombinedSessionTokenString(tokens) { + if (!tokens || tokens.size === 0) { + return SessionContainer.EMPTY_SESSION_TOKEN; } - else { - this.fetchBuffer = []; - this.fetchMoreRespHeaders = getInitialHeader(); - return this._fetchMoreImplementation(diagnosticNode); + let result = ""; + for (const [range, token] of tokens.entries()) { + result += + range + + SessionContainer.SESSION_TOKEN_PARTITION_SPLITTER + + token.toString() + + SessionContainer.SESSION_TOKEN_SEPARATOR; } + return result.slice(0, -1); } - async _fetchMoreImplementation(diagnosticNode) { - try { - const { result: item, headers } = await this.endpoint.nextItem(diagnosticNode); - mergeHeaders(this.fetchMoreRespHeaders, headers); - if (item === undefined) { - // no more results - if (this.fetchBuffer.length === 0) { - return { - result: undefined, - headers: this.fetchMoreRespHeaders, - }; - } - else { - // Just give what we have - const temp = this.fetchBuffer; - this.fetchBuffer = []; - return { result: temp, headers: this.fetchMoreRespHeaders }; - } - } - else { - // append the result - this.fetchBuffer.push(item); - if (this.fetchBuffer.length >= this.pageSize) { - // fetched enough results - const temp = this.fetchBuffer.slice(0, this.pageSize); - this.fetchBuffer = this.fetchBuffer.splice(this.pageSize); - return { result: temp, headers: this.fetchMoreRespHeaders }; - } - else { - // recursively fetch more - // TODO: is recursion a good idea? - return this._fetchMoreImplementation(diagnosticNode); - } - } + static compareAndSetToken(newTokenString, containerSessionTokens) { + if (!newTokenString) { + return; } - catch (err) { - mergeHeaders(this.fetchMoreRespHeaders, err.headers); - err.headers = this.fetchMoreRespHeaders; - if (err) { - throw err; + const partitionsParts = newTokenString.split(SessionContainer.SESSION_TOKEN_SEPARATOR); + for (const partitionPart of partitionsParts) { + const newTokenParts = partitionPart.split(SessionContainer.SESSION_TOKEN_PARTITION_SPLITTER); + if (newTokenParts.length !== 2) { + return; } + const range = newTokenParts[0]; + const newToken = VectorSessionToken.create(newTokenParts[1]); + const tokenForRange = !containerSessionTokens.get(range) + ? newToken + : containerSessionTokens.get(range).merge(newToken); + containerSessionTokens.set(range, tokenForRange); } } -} -PipelinedQueryExecutionContext.DEFAULT_PAGE_SIZE = 10; + // TODO: have a assert if the type doesn't mastch known types + static isReadingFromMaster(resourceType, operationType) { + if (resourceType === Constants$1.Path.OffersPathSegment || + resourceType === Constants$1.Path.DatabasesPathSegment || + resourceType === Constants$1.Path.UsersPathSegment || + resourceType === Constants$1.Path.PermissionsPathSegment || + resourceType === Constants$1.Path.TopologyPathSegment || + resourceType === Constants$1.Path.DatabaseAccountPathSegment || + resourceType === Constants$1.Path.PartitionKeyRangesPathSegment || + (resourceType === Constants$1.Path.CollectionsPathSegment && + operationType === exports.OperationType.Query)) { + return true; + } + return false; + } + getContainerName(request, headers) { + let ownerFullName = headers[Constants$1.HttpHeaders.OwnerFullName]; + if (!ownerFullName) { + ownerFullName = trimSlashes(request.resourceAddress); + } + return getContainerLink(ownerFullName); + } +} +SessionContainer.EMPTY_SESSION_TOKEN = ""; +SessionContainer.SESSION_TOKEN_SEPARATOR = ","; +SessionContainer.SESSION_TOKEN_PARTITION_SPLITTER = ":"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function checkURL(testString) { + return new URL(testString); +} +function sanitizeEndpoint(url) { + return new URL(url).href.replace(/\/$/, ""); +} // Copyright (c) Microsoft Corporation. /** - * Represents a QueryIterator Object, an implementation of feed or query response that enables - * traversal and iterating over the response - * in the Azure Cosmos DB database service. + * Implementation of DiagnosticWriter, which uses \@azure/logger to write + * diagnostics. + * @hidden */ -class QueryIterator { - /** - * @hidden - */ - constructor(clientContext, query, options, fetchFunctions, resourceLink, resourceType) { - this.clientContext = clientContext; - this.query = query; - this.options = options; - this.fetchFunctions = fetchFunctions; - this.resourceLink = resourceLink; - this.resourceType = resourceType; - this.query = query; - this.fetchFunctions = fetchFunctions; - this.options = options || {}; - this.resourceLink = resourceLink; - this.fetchAllLastResHeaders = getInitialHeader(); - this.reset(); - this.isInitialized = false; +class LogDiagnosticWriter { + constructor() { + this.logger = logger$5.createClientLogger("CosmosDBDiagnostics"); } - /** - * Gets an async iterator that will yield results until completion. - * - * NOTE: AsyncIterators are a very new feature and you might need to - * use polyfils/etc. in order to use them in your code. - * - * If you're using TypeScript, you can use the following polyfill as long - * as you target ES6 or higher and are running on Node 6 or higher. - * - * ```typescript - * if (!Symbol || !Symbol.asyncIterator) { - * (Symbol as any).asyncIterator = Symbol.for("Symbol.asyncIterator"); - * } - * ``` - * - * @example Iterate over all databases - * ```typescript - * for await(const { resources: db } of client.databases.readAll().getAsyncIterator()) { - * console.log(`Got ${db} from AsyncIterator`); - * } - * ``` - */ - getAsyncIterator() { - return tslib.__asyncGenerator(this, arguments, function* getAsyncIterator_1() { - this.reset(); - let diagnosticNode = new DiagnosticNodeInternal(this.clientContext.diagnosticLevel, exports.DiagnosticNodeType.CLIENT_REQUEST_NODE, null); - this.queryPlanPromise = this.fetchQueryPlan(diagnosticNode); - while (this.queryExecutionContext.hasMoreResults()) { - let response; - try { - response = yield tslib.__await(this.queryExecutionContext.fetchMore(diagnosticNode)); - } - catch (error) { - if (this.needsQueryPlan(error)) { - yield tslib.__await(this.createPipelinedExecutionContext()); - try { - response = yield tslib.__await(this.queryExecutionContext.fetchMore(diagnosticNode)); - } - catch (queryError) { - this.handleSplitError(queryError); - } - } - else { - throw error; - } - } - const feedResponse = new FeedResponse(response.result, response.headers, this.queryExecutionContext.hasMoreResults(), diagnosticNode.toDiagnostic(this.clientContext.getClientConfig())); - diagnosticNode = new DiagnosticNodeInternal(this.clientContext.diagnosticLevel, exports.DiagnosticNodeType.CLIENT_REQUEST_NODE, null); - if (response.result !== undefined) { - yield yield tslib.__await(feedResponse); - } - } - }); + async write(diagnosticsData) { + this.logger.verbose(diagnosticsData); } - /** - * Determine if there are still remaining resources to process based on the value of the continuation token or the - * elements remaining on the current batch in the QueryIterator. - * @returns true if there is other elements to process in the QueryIterator. - */ - hasMoreResults() { - return this.queryExecutionContext.hasMoreResults(); +} +/** + * Implementation of a no-op DiagnosticWriter. + * @hidden + */ +class NoOpDiagnosticWriter { + async write(_diagnosticsData) { + // No op } - /** - * Fetch all pages for the query and return a single FeedResponse. - */ - async fetchAll() { - return withDiagnostics(async (diagnosticNode) => { - return this.fetchAllInternal(diagnosticNode); - }, this.clientContext); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class DefaultDiagnosticFormatter { + format(cosmosDiagnostic) { + return JSON.stringify(cosmosDiagnostic); } - /** - * @hidden - */ - async fetchAllInternal(diagnosticNode) { - this.reset(); - let response; +} + +// Copyright (c) Microsoft Corporation. +const uuid = uuid$3.v4; +const logger = logger$5.createClientLogger("ClientContext"); +const QueryJsonContentType = "application/query+json"; +const HttpHeaders = Constants$1.HttpHeaders; +/** + * @hidden + * @hidden + */ +class ClientContext { + constructor(cosmosClientOptions, globalEndpointManager, clientConfig, diagnosticLevel) { + this.cosmosClientOptions = cosmosClientOptions; + this.globalEndpointManager = globalEndpointManager; + this.clientConfig = clientConfig; + this.diagnosticLevel = diagnosticLevel; + this.connectionPolicy = cosmosClientOptions.connectionPolicy; + this.sessionContainer = new SessionContainer(); + this.partitionKeyDefinitionCache = {}; + this.pipeline = null; + if (cosmosClientOptions.aadCredentials) { + this.pipeline = coreRestPipeline.createEmptyPipeline(); + const hrefEndpoint = sanitizeEndpoint(cosmosClientOptions.endpoint); + const scope = `${hrefEndpoint}/.default`; + this.pipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential: cosmosClientOptions.aadCredentials, + scopes: scope, + challengeCallbacks: { + async authorizeRequest({ request, getAccessToken }) { + const tokenResponse = await getAccessToken([scope], {}); + const AUTH_PREFIX = `type=aad&ver=1.0&sig=`; + const authorizationToken = `${AUTH_PREFIX}${tokenResponse.token}`; + request.headers.set("Authorization", authorizationToken); + }, + }, + })); + } + this.initializeDiagnosticSettings(diagnosticLevel); + } + /** @hidden */ + async read({ path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) { try { - response = await this.toArrayImplementation(diagnosticNode); + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.get, path, operationType: exports.OperationType.Read, resourceId, + options, + resourceType, + partitionKey }); + diagnosticNode.addData({ + operationType: exports.OperationType.Read, + resourceType, + }); + request.headers = await this.buildHeaders(request); + this.applySessionToken(request); + // read will use ReadEndpoint since it uses GET operation + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); + this.captureSessionToken(undefined, path, exports.OperationType.Read, response.headers); + return response; } - catch (error) { - this.handleSplitError(error); + catch (err) { + this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); + throw err; } - return response; } - /** - * Retrieve the next batch from the feed. - * - * This may or may not fetch more pages from the backend depending on your settings - * and the type of query. Aggregate queries will generally fetch all backend pages - * before returning the first batch of responses. - */ - async fetchNext() { - return withDiagnostics(async (diagnosticNode) => { - this.queryPlanPromise = withMetadataDiagnostics(async (metadataNode) => { - return this.fetchQueryPlan(metadataNode); - }, diagnosticNode, exports.MetadataLookUpType.QueryPlanLookUp); - if (!this.isInitialized) { - await this.init(); - } - let response; - try { - response = await this.queryExecutionContext.fetchMore(diagnosticNode); - } - catch (error) { - if (this.needsQueryPlan(error)) { - await this.createPipelinedExecutionContext(); - try { - response = await this.queryExecutionContext.fetchMore(diagnosticNode); - } - catch (queryError) { - this.handleSplitError(queryError); - } - } - else { - throw error; - } + async queryFeed({ path, resourceType, resourceId, resultFn, query, options, diagnosticNode, partitionKeyRangeId, partitionKey, startEpk, endEpk, }) { + // Query operations will use ReadEndpoint even though it uses + // GET(for queryFeed) and POST(for regular query operations) + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.get, path, operationType: exports.OperationType.Query, partitionKeyRangeId, + resourceId, + resourceType, + options, body: query, partitionKey }); + diagnosticNode.addData({ + operationType: exports.OperationType.Query, + resourceType, + }); + const requestId = uuid(); + if (query !== undefined) { + request.method = exports.HTTPMethod.post; + } + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + request.headers = await this.buildHeaders(request); + if (startEpk !== undefined && endEpk !== undefined) { + request.headers[HttpHeaders.StartEpk] = startEpk; + request.headers[HttpHeaders.EndEpk] = endEpk; + request.headers[HttpHeaders.ReadFeedKeyType] = "EffectivePartitionKeyRange"; + } + if (query !== undefined) { + request.headers[HttpHeaders.IsQuery] = "true"; + request.headers[HttpHeaders.ContentType] = QueryJsonContentType; + if (typeof query === "string") { + request.body = { query }; // Converts query text to query object. } - return new FeedResponse(response.result, response.headers, this.queryExecutionContext.hasMoreResults(), getEmptyCosmosDiagnostics()); - }, this.clientContext); - } - /** - * Reset the QueryIterator to the beginning and clear all the resources inside it - */ - reset() { - this.queryPlanPromise = undefined; - this.fetchAllLastResHeaders = getInitialHeader(); - this.fetchAllTempResources = []; - this.queryExecutionContext = new DefaultQueryExecutionContext(this.options, this.fetchFunctions); + } + this.applySessionToken(request); + logger.info("query " + + requestId + + " started" + + (request.partitionKeyRangeId ? " pkrid: " + request.partitionKeyRangeId : "")); + logger.verbose(request); + const start = Date.now(); + const response = await RequestHandler.request(request, diagnosticNode); + logger.info("query " + requestId + " finished - " + (Date.now() - start) + "ms"); + this.captureSessionToken(undefined, path, exports.OperationType.Query, response.headers); + return this.processQueryFeedResponse(response, !!query, resultFn); } - async toArrayImplementation(diagnosticNode) { - this.queryPlanPromise = withMetadataDiagnostics(async (metadataNode) => { - return this.fetchQueryPlan(metadataNode); - }, diagnosticNode, exports.MetadataLookUpType.QueryPlanLookUp); - // this.queryPlanPromise = this.fetchQueryPlan(diagnosticNode); - if (!this.isInitialized) { - await this.init(); + async getQueryPlan(path, resourceType, resourceId, query, options = {}, diagnosticNode) { + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, path, operationType: exports.OperationType.Read, resourceId, + resourceType, + options, body: query }); + diagnosticNode.addData({ + operationType: exports.OperationType.Read, + resourceType, + }); + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + request.headers = await this.buildHeaders(request); + request.headers[HttpHeaders.IsQueryPlan] = "True"; + request.headers[HttpHeaders.QueryVersion] = "1.4"; + request.headers[HttpHeaders.SupportedQueryFeatures] = + "NonValueAggregate, Aggregate, Distinct, MultipleOrderBy, OffsetAndLimit, OrderBy, Top, CompositeAggregate, GroupBy, MultipleAggregates"; + request.headers[HttpHeaders.ContentType] = QueryJsonContentType; + if (typeof query === "string") { + request.body = { query }; // Converts query text to query object. } - while (this.queryExecutionContext.hasMoreResults()) { - let response; - try { - response = await this.queryExecutionContext.nextItem(diagnosticNode); - } - catch (error) { - if (this.needsQueryPlan(error)) { - await this.createPipelinedExecutionContext(); - response = await this.queryExecutionContext.nextItem(diagnosticNode); - } - else { - throw error; - } + this.applySessionToken(request); + const response = await RequestHandler.request(request, diagnosticNode); + this.captureSessionToken(undefined, path, exports.OperationType.Query, response.headers); + return response; + } + queryPartitionKeyRanges(collectionLink, query, options) { + const path = getPathFromLink(collectionLink, exports.ResourceType.pkranges); + const id = getIdFromLink(collectionLink); + const cb = async (diagNode, innerOptions) => { + const response = await this.queryFeed({ + path, + resourceType: exports.ResourceType.pkranges, + resourceId: id, + resultFn: (result) => result.PartitionKeyRanges, + query, + options: innerOptions, + diagnosticNode: diagNode, + }); + return response; + }; + return new QueryIterator(this, query, options, cb); + } + async delete({ path, resourceType, resourceId, options = {}, partitionKey, method = exports.HTTPMethod.delete, diagnosticNode, }) { + try { + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: method, operationType: exports.OperationType.Delete, path, + resourceType, + options, + resourceId, + partitionKey }); + diagnosticNode.addData({ + operationType: exports.OperationType.Delete, + resourceType, + }); + request.headers = await this.buildHeaders(request); + this.applySessionToken(request); + // deleteResource will use WriteEndpoint since it uses DELETE operation + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); + if (parseLink(path).type !== "colls") { + this.captureSessionToken(undefined, path, exports.OperationType.Delete, response.headers); } - const { result, headers } = response; - // concatenate the results and fetch more - mergeHeaders(this.fetchAllLastResHeaders, headers); - if (result !== undefined) { - this.fetchAllTempResources.push(result); + else { + this.clearSessionToken(path); } + return response; + } + catch (err) { + this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); + throw err; } - return new FeedResponse(this.fetchAllTempResources, this.fetchAllLastResHeaders, this.queryExecutionContext.hasMoreResults(), getEmptyCosmosDiagnostics()); } - async createPipelinedExecutionContext() { - const queryPlanResponse = await this.queryPlanPromise; - // We always coerce queryPlanPromise to resolved. So if it errored, we need to manually inspect the resolved value - if (queryPlanResponse instanceof Error) { - throw queryPlanResponse; + async patch({ body, path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) { + try { + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.patch, operationType: exports.OperationType.Patch, path, + resourceType, + body, + resourceId, + options, + partitionKey }); + diagnosticNode.addData({ + operationType: exports.OperationType.Patch, + resourceType, + }); + request.headers = await this.buildHeaders(request); + this.applySessionToken(request); + // patch will use WriteEndpoint + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); + this.captureSessionToken(undefined, path, exports.OperationType.Patch, response.headers); + return response; } - const queryPlan = queryPlanResponse.result; - const queryInfo = queryPlan.queryInfo; - if (queryInfo.aggregates.length > 0 && queryInfo.hasSelectValue === false) { - throw new Error("Aggregate queries must use the VALUE keyword"); + catch (err) { + this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); + throw err; } - this.queryExecutionContext = new PipelinedQueryExecutionContext(this.clientContext, this.resourceLink, this.query, this.options, queryPlan); } - async fetchQueryPlan(diagnosticNode) { - if (!this.queryPlanPromise && this.resourceType === exports.ResourceType.item) { - return this.clientContext - .getQueryPlan(getPathFromLink(this.resourceLink) + "/docs", exports.ResourceType.item, this.resourceLink, this.query, this.options, diagnosticNode) - .catch((error) => error); // Without this catch, node reports an unhandled rejection. So we stash the promise as resolved even if it errored. + async create({ body, path, resourceType, resourceId, diagnosticNode, options = {}, partitionKey, }) { + try { + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Create, path, + resourceType, + resourceId, + body, + options, + partitionKey }); + diagnosticNode.addData({ + operationType: exports.OperationType.Create, + resourceType, + }); + request.headers = await this.buildHeaders(request); + // create will use WriteEndpoint since it uses POST operation + this.applySessionToken(request); + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); + this.captureSessionToken(undefined, path, exports.OperationType.Create, response.headers); + return response; + } + catch (err) { + this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); + throw err; } - return this.queryPlanPromise; } - needsQueryPlan(error) { - var _a; - if (((_a = error.body) === null || _a === void 0 ? void 0 : _a.additionalErrorInfo) || - error.message.includes("Cross partition query only supports")) { - return error.code === StatusCodes.BadRequest && this.resourceType === exports.ResourceType.item; + processQueryFeedResponse(res, isQuery, resultFn) { + if (isQuery) { + return { + result: resultFn(res.result), + headers: res.headers, + code: res.code, + }; } else { - throw error; + const newResult = resultFn(res.result).map((body) => body); + return { + result: newResult, + headers: res.headers, + code: res.code, + }; } } - async init() { - if (this.isInitialized === true) { + applySessionToken(requestContext) { + const request = this.getSessionParams(requestContext.path); + if (requestContext.headers && requestContext.headers[HttpHeaders.SessionToken]) { return; } - if (this.initPromise === undefined) { - this.initPromise = this._init(); + const sessionConsistency = requestContext.headers[HttpHeaders.ConsistencyLevel]; + if (!sessionConsistency) { + return; + } + if (sessionConsistency !== exports.ConsistencyLevel.Session) { + return; + } + if (request.resourceAddress) { + const sessionToken = this.sessionContainer.get(request); + if (sessionToken) { + requestContext.headers[HttpHeaders.SessionToken] = sessionToken; + } } - return this.initPromise; } - async _init() { - if (this.options.forceQueryPlan === true && this.resourceType === exports.ResourceType.item) { - await this.createPipelinedExecutionContext(); + async replace({ body, path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) { + try { + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.put, operationType: exports.OperationType.Replace, path, + resourceType, + body, + resourceId, + options, + partitionKey }); + diagnosticNode.addData({ + operationType: exports.OperationType.Replace, + resourceType, + }); + request.headers = await this.buildHeaders(request); + this.applySessionToken(request); + // replace will use WriteEndpoint since it uses PUT operation + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); + this.captureSessionToken(undefined, path, exports.OperationType.Replace, response.headers); + return response; + } + catch (err) { + this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); + throw err; } - this.isInitialized = true; } - handleSplitError(err) { - if (err.code === 410) { - const error = new Error("Encountered partition split and could not recover. This request is retryable"); - error.code = 503; - error.originalError = err; - throw error; + async upsert({ body, path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) { + try { + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Upsert, path, + resourceType, + body, + resourceId, + options, + partitionKey }); + diagnosticNode.addData({ + operationType: exports.OperationType.Upsert, + resourceType, + }); + request.headers = await this.buildHeaders(request); + request.headers[HttpHeaders.IsUpsert] = true; + this.applySessionToken(request); + // upsert will use WriteEndpoint since it uses POST operation + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); + this.captureSessionToken(undefined, path, exports.OperationType.Upsert, response.headers); + return response; } - else { + catch (err) { + this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); throw err; } } -} - -class ConflictResponse extends ResourceResponse { - constructor(resource, headers, statusCode, conflict, diagnostics) { - super(resource, headers, statusCode, diagnostics); - this.conflict = conflict; + async execute({ sprocLink, params, options = {}, partitionKey, diagnosticNode, }) { + // Accept a single parameter or an array of parameters. + // Didn't add type annotation for this because we should legacy this behavior + if (params !== null && params !== undefined && !Array.isArray(params)) { + params = [params]; + } + const path = getPathFromLink(sprocLink); + const id = getIdFromLink(sprocLink); + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Execute, path, resourceType: exports.ResourceType.sproc, options, resourceId: id, body: params, partitionKey }); + diagnosticNode.addData({ + operationType: exports.OperationType.Execute, + resourceType: exports.ResourceType.sproc, + }); + request.headers = await this.buildHeaders(request); + // executeStoredProcedure will use WriteEndpoint since it uses POST operation + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); + return response; } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -async function readPartitionKeyDefinition(diagnosticNode, container) { - const partitionKeyDefinition = await container.readPartitionKeyDefinition(diagnosticNode); - return partitionKeyDefinition.resource; -} - -/** - * Use to read or delete a given {@link Conflict} by id. - * - * @see {@link Conflicts} to query or read all conflicts. - */ -class Conflict { /** - * Returns a reference URL to the resource. Used for linking in Permissions. + * Gets the Database account information. + * @param options - `urlConnection` in the options is the endpoint url whose database account needs to be retrieved. + * If not present, current client's url will be used. */ - get url() { - return `/${this.container.url}/${Constants$1.Path.ConflictsPathSegment}/${this.id}`; + async getDatabaseAccount(diagnosticNode, options = {}) { + const endpoint = options.urlConnection || this.cosmosClientOptions.endpoint; + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { endpoint, method: exports.HTTPMethod.get, operationType: exports.OperationType.Read, path: "", resourceType: exports.ResourceType.none, options }); + diagnosticNode.addData({ + operationType: exports.OperationType.Read, + resourceType: exports.ResourceType.none, + }); + request.headers = await this.buildHeaders(request); + // await options.beforeOperation({ endpoint, request, headers: requestHeaders }); + const { result, headers, code, substatus, diagnostics } = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); + const databaseAccount = new DatabaseAccount(result, headers); + return { + result: databaseAccount, + headers, + diagnostics, + code: code, + substatus: substatus, + }; } - /** - * @hidden - * @param container - The parent {@link Container}. - * @param id - The id of the given {@link Conflict}. - */ - constructor(container, id, clientContext, partitionKey) { - this.container = container; - this.id = id; - this.clientContext = clientContext; - this.partitionKey = partitionKey; - this.partitionKey = partitionKey; + getWriteEndpoint(diagnosticNode) { + return this.globalEndpointManager.getWriteEndpoint(diagnosticNode); } - /** - * Read the {@link ConflictDefinition} for the given {@link Conflict}. - */ - async read(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url, exports.ResourceType.conflicts); - const id = getIdFromLink(this.url); - const response = await this.clientContext.read({ - path, - resourceType: exports.ResourceType.user, - resourceId: id, + getReadEndpoint(diagnosticNode) { + return this.globalEndpointManager.getReadEndpoint(diagnosticNode); + } + getWriteEndpoints() { + return this.globalEndpointManager.getWriteEndpoints(); + } + getReadEndpoints() { + return this.globalEndpointManager.getReadEndpoints(); + } + async batch({ body, path, partitionKey, resourceId, options = {}, diagnosticNode, }) { + try { + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Batch, path, + body, resourceType: exports.ResourceType.item, resourceId, options, - diagnosticNode, + partitionKey }); + diagnosticNode.addData({ + operationType: exports.OperationType.Batch, + resourceType: exports.ResourceType.item, }); - return new ConflictResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + request.headers = await this.buildHeaders(request); + request.headers[HttpHeaders.IsBatchRequest] = true; + request.headers[HttpHeaders.IsBatchAtomic] = true; + this.applySessionToken(request); + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); + this.captureSessionToken(undefined, path, exports.OperationType.Batch, response.headers); + response.diagnostics = diagnosticNode.toDiagnostic(this.getClientConfig()); + return response; + } + catch (err) { + this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); + throw err; + } } - /** - * Delete the given {@link ConflictDefinition}. - */ - async delete(options) { - return withDiagnostics(async (diagnosticNode) => { - if (this.partitionKey === undefined) { - const partitionKeyDefinition = await readPartitionKeyDefinition(diagnosticNode, this.container); - this.partitionKey = undefinedPartitionKey(partitionKeyDefinition); - } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.delete({ - path, - resourceType: exports.ResourceType.conflicts, - resourceId: id, - options, - partitionKey: this.partitionKey, - diagnosticNode, + async bulk({ body, path, partitionKeyRangeId, resourceId, bulkOptions = {}, options = {}, diagnosticNode, }) { + try { + const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Batch, path, + body, resourceType: exports.ResourceType.item, resourceId, + options }); + diagnosticNode.addData({ + operationType: exports.OperationType.Batch, + resourceType: exports.ResourceType.item, }); - return new ConflictResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + request.headers = await this.buildHeaders(request); + request.headers[HttpHeaders.IsBatchRequest] = true; + request.headers[HttpHeaders.PartitionKeyRangeID] = partitionKeyRangeId; + request.headers[HttpHeaders.IsBatchAtomic] = false; + request.headers[HttpHeaders.BatchContinueOnError] = bulkOptions.continueOnError || false; + this.applySessionToken(request); + request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); + const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); + this.captureSessionToken(undefined, path, exports.OperationType.Batch, response.headers); + return response; + } + catch (err) { + this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); + throw err; + } } -} - -/** - * Use to query or read all conflicts. - * - * @see {@link Conflict} to read or delete a given {@link Conflict} by id. - */ -class Conflicts { - constructor(container, clientContext) { - this.container = container; - this.clientContext = clientContext; + captureSessionToken(err, path, operationType, resHeaders) { + const request = this.getSessionParams(path); + request.operationType = operationType; + if (!err || + (!this.isMasterResource(request.resourceType) && + (err.code === StatusCodes.PreconditionFailed || + err.code === StatusCodes.Conflict || + (err.code === StatusCodes.NotFound && + err.substatus !== SubStatusCodes.ReadSessionNotAvailable)))) { + this.sessionContainer.set(request, resHeaders); + } } - query(query, options) { - const path = getPathFromLink(this.container.url, exports.ResourceType.conflicts); - const id = getIdFromLink(this.container.url); - return new QueryIterator(this.clientContext, query, options, (diagNode, innerOptions) => { - return this.clientContext.queryFeed({ - path, - resourceType: exports.ResourceType.conflicts, - resourceId: id, - resultFn: (result) => result.Conflicts, - query, - options: innerOptions, - diagnosticNode: diagNode, - }); + clearSessionToken(path) { + const request = this.getSessionParams(path); + this.sessionContainer.remove(request); + } + recordDiagnostics(diagnostic) { + const formatted = this.diagnosticFormatter.format(diagnostic); + this.diagnosticWriter.write(formatted); + } + initializeDiagnosticSettings(diagnosticLevel) { + this.diagnosticFormatter = new DefaultDiagnosticFormatter(); + switch (diagnosticLevel) { + case exports.CosmosDbDiagnosticLevel.info: + this.diagnosticWriter = new NoOpDiagnosticWriter(); + break; + default: + this.diagnosticWriter = new LogDiagnosticWriter(); + } + } + // TODO: move + getSessionParams(resourceLink) { + const resourceId = null; + let resourceAddress = null; + const parserOutput = parseLink(resourceLink); + resourceAddress = parserOutput.objectBody.self; + const resourceType = parserOutput.type; + return { + resourceId, + resourceAddress, + resourceType, + isNameBased: true, + }; + } + isMasterResource(resourceType) { + if (resourceType === Constants$1.Path.OffersPathSegment || + resourceType === Constants$1.Path.DatabasesPathSegment || + resourceType === Constants$1.Path.UsersPathSegment || + resourceType === Constants$1.Path.PermissionsPathSegment || + resourceType === Constants$1.Path.TopologyPathSegment || + resourceType === Constants$1.Path.DatabaseAccountPathSegment || + resourceType === Constants$1.Path.PartitionKeyRangesPathSegment || + resourceType === Constants$1.Path.CollectionsPathSegment) { + return true; + } + return false; + } + buildHeaders(requestContext) { + return getHeaders({ + clientOptions: this.cosmosClientOptions, + defaultHeaders: Object.assign(Object.assign({}, this.cosmosClientOptions.defaultHeaders), requestContext.options.initialHeaders), + verb: requestContext.method, + path: requestContext.path, + resourceId: requestContext.resourceId, + resourceType: requestContext.resourceType, + options: requestContext.options, + partitionKeyRangeId: requestContext.partitionKeyRangeId, + useMultipleWriteLocations: this.connectionPolicy.useMultipleWriteLocations, + partitionKey: requestContext.partitionKey !== undefined + ? convertToInternalPartitionKey(requestContext.partitionKey) + : undefined, // TODO: Move this check from here to PartitionKey }); } /** - * Reads all conflicts - * @param options - Use to set options like response page size, continuation tokens, etc. + * Returns collection of properties which are derived from the context for Request Creation. + * These properties have client wide scope, as opposed to request specific scope. + * @returns */ - readAll(options) { - return this.query(undefined, options); + getContextDerivedPropsForRequestCreation() { + return { + globalEndpointManager: this.globalEndpointManager, + requestAgent: this.cosmosClientOptions.agent, + connectionPolicy: this.connectionPolicy, + client: this, + plugins: this.cosmosClientOptions.plugins, + pipeline: this.pipeline, + }; + } + getClientConfig() { + return this.clientConfig; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * @hidden + */ +function getUserAgent(suffix) { + const ua = `${universalUserAgent.getUserAgent()} ${Constants$1.SDKName}/${Constants$1.SDKVersion}`; + if (suffix) { + return ua + " " + suffix; } + return ua; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -exports.ConflictResolutionMode = void 0; -(function (ConflictResolutionMode) { - ConflictResolutionMode["Custom"] = "Custom"; - ConflictResolutionMode["LastWriterWins"] = "LastWriterWins"; -})(exports.ConflictResolutionMode || (exports.ConflictResolutionMode = {})); +function isNonEmptyString(variable) { + return typeof variable === "string" && variable.trim().length > 0; +} -class ItemResponse extends ResourceResponse { - constructor(resource, headers, statusCode, subsstatusCode, item, diagnostics) { - super(resource, headers, statusCode, diagnostics, subsstatusCode); - this.item = item; +// Copyright (c) Microsoft Corporation. +const DefaultDiagnosticLevelValue = exports.CosmosDbDiagnosticLevel.info; +const diagnosticLevelFromEnv = (typeof process !== "undefined" && + process.env && + process.env[Constants$1.CosmosDbDiagnosticLevelEnvVarName]) || + undefined; +const acceptableDiagnosticLevelValues = Object.values(exports.CosmosDbDiagnosticLevel).map((x) => x.toString()); +let cosmosDiagnosticLevel; +if (isNonEmptyString(diagnosticLevelFromEnv)) { + // avoid calling setDiagnosticLevel because we don't want a mis-set environment variable to crash + if (isCosmosDiagnosticLevel(diagnosticLevelFromEnv)) { + setDiagnosticLevel(diagnosticLevelFromEnv); + } + else { + console.error(`${Constants$1.CosmosDbDiagnosticLevelEnvVarName} set to unknown diagnostic level '${diagnosticLevelFromEnv}'; Setting Cosmos Db diagnostic level to info. Acceptable values: ${acceptableDiagnosticLevelValues.join(", ")}.`); + } +} +function setDiagnosticLevel(level) { + if (level && !isCosmosDiagnosticLevel(level)) { + throw new Error(`Unknown diagnostic level '${level}'. Acceptable values: ${acceptableDiagnosticLevelValues.join(",")}`); } + cosmosDiagnosticLevel = level; +} +function getDiagnosticLevelFromEnvironment() { + return cosmosDiagnosticLevel; +} +function isCosmosDiagnosticLevel(diagnosticLevel) { + return acceptableDiagnosticLevelValues.includes(diagnosticLevel); +} +function determineDiagnosticLevel(diagnosticLevelFromClientConfig, diagnosticLevelFromEnvironment) { + const diagnosticLevelFromEnvOrClient = diagnosticLevelFromEnvironment !== null && diagnosticLevelFromEnvironment !== void 0 ? diagnosticLevelFromEnvironment : diagnosticLevelFromClientConfig; // Diagnostic Setting from environment gets first priority. + return diagnosticLevelFromEnvOrClient !== null && diagnosticLevelFromEnvOrClient !== void 0 ? diagnosticLevelFromEnvOrClient : DefaultDiagnosticLevelValue; // Diagnostic Setting supplied in Client config gets second priority. } +// Copyright (c) Microsoft Corporation. /** - * Used to perform operations on a specific item. - * - * @see {@link Items} for operations on all items; see `container.items`. + * @hidden + * This internal class implements the logic for endpoint management for geo-replicated database accounts. */ -class Item { +class GlobalEndpointManager { /** - * Returns a reference URL to the resource. Used for linking in Permissions. + * @param options - The document client instance. + * @internal */ - get url() { - return createDocumentUri(this.container.database.id, this.container.id, this.id); + constructor(options, readDatabaseAccount) { + this.readDatabaseAccount = readDatabaseAccount; + this.writeableLocations = []; + this.readableLocations = []; + this.unavailableReadableLocations = []; + this.unavailableWriteableLocations = []; + this.options = options; + this.defaultEndpoint = options.endpoint; + this.enableEndpointDiscovery = options.connectionPolicy.enableEndpointDiscovery; + this.isRefreshing = false; + this.preferredLocations = this.options.connectionPolicy.preferredLocations; + this.preferredLocationsCount = this.preferredLocations ? this.preferredLocations.length : 0; } /** - * @hidden - * @param container - The parent {@link Container}. - * @param id - The id of the given {@link Item}. - * @param partitionKey - The primary key of the given {@link Item} (only for partitioned containers). + * Gets the current read endpoint from the endpoint cache. */ - constructor(container, id, clientContext, partitionKey) { - this.container = container; - this.id = id; - this.clientContext = clientContext; - this.partitionKey = - partitionKey === undefined ? undefined : convertToInternalPartitionKey(partitionKey); + async getReadEndpoint(diagnosticNode) { + return this.resolveServiceEndpoint(diagnosticNode, exports.ResourceType.item, exports.OperationType.Read); } /** - * Read the item's definition. - * - * Any provided type, T, is not necessarily enforced by the SDK. - * You may get more or less properties and it's up to your logic to enforce it. - * If the type, T, is a class, it won't pass `typeof` comparisons, because it won't have a match prototype. - * It's recommended to only use interfaces. - * - * There is no set schema for JSON items. They may contain any number of custom properties. - * - * @param options - Additional options for the request - * - * @example Using custom type for response - * ```typescript - * interface TodoItem { - * title: string; - * done: bool; - * id: string; - * } - * - * let item: TodoItem; - * ({body: item} = await item.read()); - * ``` + * Gets the current write endpoint from the endpoint cache. */ - async read(options = {}) { - return withDiagnostics(async (diagnosticNode) => { - if (this.partitionKey === undefined) { - const partitionKeyDefinition = await readPartitionKeyDefinition(diagnosticNode, this.container); - this.partitionKey = undefinedPartitionKey(partitionKeyDefinition); - } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - let response; - try { - response = await this.clientContext.read({ - path, - resourceType: exports.ResourceType.item, - resourceId: id, - options, - partitionKey: this.partitionKey, - diagnosticNode, - }); - } - catch (error) { - if (error.code !== StatusCodes.NotFound) { - throw error; - } - response = error; - } - return new ItemResponse(response.result, response.headers, response.code, response.substatus, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + async getWriteEndpoint(diagnosticNode) { + return this.resolveServiceEndpoint(diagnosticNode, exports.ResourceType.item, exports.OperationType.Replace); } - async replace(body, options = {}) { - return withDiagnostics(async (diagnosticNode) => { - if (this.partitionKey === undefined) { - const partitionKeyResponse = await readPartitionKeyDefinition(diagnosticNode, this.container); - this.partitionKey = extractPartitionKeys(body, partitionKeyResponse); - } - const err = {}; - if (!isItemResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.replace({ - body, - path, - resourceType: exports.ResourceType.item, - resourceId: id, - options, - partitionKey: this.partitionKey, - diagnosticNode, - }); - return new ItemResponse(response.result, response.headers, response.code, response.substatus, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + async getReadEndpoints() { + return this.readableLocations.map((loc) => loc.databaseAccountEndpoint); } - /** - * Delete the item. - * - * Any provided type, T, is not necessarily enforced by the SDK. - * You may get more or less properties and it's up to your logic to enforce it. - * - * @param options - Additional options for the request - */ - async delete(options = {}) { - return withDiagnostics(async (diagnosticNode) => { - if (this.partitionKey === undefined) { - const partitionKeyResponse = await readPartitionKeyDefinition(diagnosticNode, this.container); - this.partitionKey = undefinedPartitionKey(partitionKeyResponse); - } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.delete({ - path, - resourceType: exports.ResourceType.item, - resourceId: id, - options, - partitionKey: this.partitionKey, - diagnosticNode, - }); - return new ItemResponse(response.result, response.headers, response.code, response.substatus, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + async getWriteEndpoints() { + return this.writeableLocations.map((loc) => loc.databaseAccountEndpoint); } - /** - * Perform a JSONPatch on the item. - * - * Any provided type, T, is not necessarily enforced by the SDK. - * You may get more or less properties and it's up to your logic to enforce it. - * - * @param options - Additional options for the request - */ - async patch(body, options = {}) { - return withDiagnostics(async (diagnosticNode) => { - if (this.partitionKey === undefined) { - const partitionKeyResponse = await readPartitionKeyDefinition(diagnosticNode, this.container); - this.partitionKey = extractPartitionKeys(body, partitionKeyResponse); + async markCurrentLocationUnavailableForRead(diagnosticNode, endpoint) { + await this.refreshEndpointList(diagnosticNode); + const location = this.readableLocations.find((loc) => loc.databaseAccountEndpoint === endpoint); + if (location) { + location.unavailable = true; + location.lastUnavailabilityTimestampInMs = Date.now(); + this.unavailableReadableLocations.push(location); + } + } + async markCurrentLocationUnavailableForWrite(diagnosticNode, endpoint) { + await this.refreshEndpointList(diagnosticNode); + const location = this.writeableLocations.find((loc) => loc.databaseAccountEndpoint === endpoint); + if (location) { + location.unavailable = true; + location.lastUnavailabilityTimestampInMs = Date.now(); + this.unavailableWriteableLocations.push(location); + } + } + canUseMultipleWriteLocations(resourceType, operationType) { + let canUse = this.options.connectionPolicy.useMultipleWriteLocations; + if (resourceType) { + canUse = + canUse && + (resourceType === exports.ResourceType.item || + (resourceType === exports.ResourceType.sproc && operationType === exports.OperationType.Execute)); + } + return canUse; + } + async resolveServiceEndpoint(diagnosticNode, resourceType, operationType, startServiceEndpointIndex = 0 // Represents the starting index for selecting servers. + ) { + // If endpoint discovery is disabled, always use the user provided endpoint + if (!this.options.connectionPolicy.enableEndpointDiscovery) { + diagnosticNode.addData({ readFromCache: true }, "default_endpoint"); + diagnosticNode.recordEndpointResolution(this.defaultEndpoint); + return this.defaultEndpoint; + } + // If getting the database account, always use the user provided endpoint + if (resourceType === exports.ResourceType.none) { + diagnosticNode.addData({ readFromCache: true }, "none_resource"); + diagnosticNode.recordEndpointResolution(this.defaultEndpoint); + return this.defaultEndpoint; + } + if (this.readableLocations.length === 0 || this.writeableLocations.length === 0) { + const resourceResponse = await withMetadataDiagnostics(async (metadataNode) => { + return this.readDatabaseAccount(metadataNode, { + urlConnection: this.defaultEndpoint, + }); + }, diagnosticNode, exports.MetadataLookUpType.DatabaseAccountLookUp); + this.writeableLocations = resourceResponse.resource.writableLocations; + this.readableLocations = resourceResponse.resource.readableLocations; + } + const locations = isReadRequest(operationType) + ? this.readableLocations + : this.writeableLocations; + let location; + // If we have preferred locations, try each one in order and use the first available one + if (this.preferredLocations && + this.preferredLocations.length > 0 && + startServiceEndpointIndex < this.preferredLocations.length) { + for (let i = startServiceEndpointIndex; i < this.preferredLocations.length; i++) { + const preferredLocation = this.preferredLocations[i]; + location = locations.find((loc) => loc.unavailable !== true && + normalizeEndpoint(loc.name) === normalizeEndpoint(preferredLocation)); + if (location) { + break; + } } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.patch({ - body, - path, - resourceType: exports.ResourceType.item, - resourceId: id, - options, - partitionKey: this.partitionKey, - diagnosticNode, + } + // If no preferred locations or one did not match, just grab the first one that is available + if (!location) { + const startIndexValid = startServiceEndpointIndex >= 0 && startServiceEndpointIndex < locations.length; + const locationsToSearch = startIndexValid + ? locations.slice(startServiceEndpointIndex) + : locations; + location = locationsToSearch.find((loc) => { + return loc.unavailable !== true; }); - return new ItemResponse(response.result, response.headers, response.code, response.substatus, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + } + location = location ? location : { name: "", databaseAccountEndpoint: this.defaultEndpoint }; + diagnosticNode.recordEndpointResolution(location.databaseAccountEndpoint); + return location.databaseAccountEndpoint; } -} - -/** - * A single response page from the Azure Cosmos DB Change Feed - */ -class ChangeFeedResponse { /** - * @internal + * Refreshes the endpoint list by clearning stale unavailability and then + * retrieving the writable and readable locations from the geo-replicated database account + * and then updating the locations cache. + * We skip the refreshing if enableEndpointDiscovery is set to False */ - constructor( + async refreshEndpointList(diagnosticNode) { + if (!this.isRefreshing && this.enableEndpointDiscovery) { + this.isRefreshing = true; + const databaseAccount = await this.getDatabaseAccountFromAnyEndpoint(diagnosticNode); + if (databaseAccount) { + this.refreshStaleUnavailableLocations(); + this.refreshEndpoints(databaseAccount); + } + this.isRefreshing = false; + } + } + refreshEndpoints(databaseAccount) { + for (const location of databaseAccount.writableLocations) { + const existingLocation = this.writeableLocations.find((loc) => loc.name === location.name); + if (!existingLocation) { + this.writeableLocations.push(location); + } + } + for (const location of databaseAccount.readableLocations) { + const existingLocation = this.readableLocations.find((loc) => loc.name === location.name); + if (!existingLocation) { + this.readableLocations.push(location); + } + } + } + refreshStaleUnavailableLocations() { + const now = Date.now(); + this.updateLocation(now, this.unavailableReadableLocations, this.readableLocations); + this.unavailableReadableLocations = this.cleanUnavailableLocationList(now, this.unavailableReadableLocations); + this.updateLocation(now, this.unavailableWriteableLocations, this.writeableLocations); + this.unavailableWriteableLocations = this.cleanUnavailableLocationList(now, this.unavailableWriteableLocations); + } /** - * Gets the items returned in the response from Azure Cosmos DB + * update the locationUnavailability to undefined if the location is available again + * @param now - current time + * @param unavailableLocations - list of unavailable locations + * @param allLocations - list of all locations */ - result, + updateLocation(now, unavailableLocations, allLocations) { + for (const location of unavailableLocations) { + const unavaialableLocation = allLocations.find((loc) => loc.name === location.name); + if (unavaialableLocation && + now - unavaialableLocation.lastUnavailabilityTimestampInMs > + Constants$1.LocationUnavailableExpirationTimeInMs) { + unavaialableLocation.unavailable = false; + } + } + } + cleanUnavailableLocationList(now, unavailableLocations) { + return unavailableLocations.filter((loc) => { + if (loc && + now - loc.lastUnavailabilityTimestampInMs >= Constants$1.LocationUnavailableExpirationTimeInMs) { + return false; + } + return true; + }); + } /** - * Gets the number of items returned in the response from Azure Cosmos DB + * Gets the database account first by using the default endpoint, and if that doesn't returns + * use the endpoints for the preferred locations in the order they are specified to get + * the database account. */ - count, + async getDatabaseAccountFromAnyEndpoint(diagnosticNode) { + try { + const options = { urlConnection: this.defaultEndpoint }; + const { resource: databaseAccount } = await this.readDatabaseAccount(diagnosticNode, options); + return databaseAccount; + // If for any reason(non - globaldb related), we are not able to get the database + // account from the above call to readDatabaseAccount, + // we would try to get this information from any of the preferred locations that the user + // might have specified (by creating a locational endpoint) + // and keeping eating the exception until we get the database account and return None at the end, + // if we are not able to get that info from any endpoints + } + catch (err) { + // TODO: Tracing + } + if (this.preferredLocations) { + for (const location of this.preferredLocations) { + try { + const locationalEndpoint = GlobalEndpointManager.getLocationalEndpoint(this.defaultEndpoint, location); + const options = { urlConnection: locationalEndpoint }; + const { resource: databaseAccount } = await this.readDatabaseAccount(diagnosticNode, options); + if (databaseAccount) { + return databaseAccount; + } + } + catch (err) { + // TODO: Tracing + } + } + } + } /** - * Gets the status code of the response from Azure Cosmos DB + * Gets the locational endpoint using the location name passed to it using the default endpoint. + * + * @param defaultEndpoint - The default endpoint to use for the endpoint. + * @param locationName - The location name for the azure region like "East US". */ - statusCode, headers, diagnostics) { - this.result = result; - this.count = count; - this.statusCode = statusCode; - this.diagnostics = diagnostics; - this.headers = Object.freeze(headers); + static getLocationalEndpoint(defaultEndpoint, locationName) { + // For defaultEndpoint like 'https://contoso.documents.azure.com:443/' parse it to generate URL format + // This defaultEndpoint should be global endpoint(and cannot be a locational endpoint) + // and we agreed to document that + const endpointUrl = new URL(defaultEndpoint); + // hostname attribute in endpointUrl will return 'contoso.documents.azure.com' + if (endpointUrl.hostname) { + const hostnameParts = endpointUrl.hostname.toString().toLowerCase().split("."); + if (hostnameParts) { + // globalDatabaseAccountName will return 'contoso' + const globalDatabaseAccountName = hostnameParts[0]; + // Prepare the locationalDatabaseAccountName as contoso-EastUS for location_name 'East US' + const locationalDatabaseAccountName = globalDatabaseAccountName + "-" + locationName.replace(" ", ""); + // Replace 'contoso' with 'contoso-EastUS' and + // return locationalEndpoint as https://contoso-EastUS.documents.azure.com:443/ + const locationalEndpoint = defaultEndpoint + .toLowerCase() + .replace(globalDatabaseAccountName, locationalDatabaseAccountName); + return locationalEndpoint; + } + } + return null; + } +} +function normalizeEndpoint(endpoint) { + return endpoint.split(" ").join("").toLowerCase(); +} + +// Copyright (c) Microsoft Corporation. +/** + * Provides a client-side logical representation of the Azure Cosmos DB database account. + * This client is used to configure and execute requests in the Azure Cosmos DB database service. + * @example Instantiate a client and create a new database + * ```typescript + * const client = new CosmosClient({endpoint: "", auth: {masterKey: ""}}); + * await client.databases.create({id: ""}); + * ``` + * @example Instantiate a client with custom Connection Policy + * ```typescript + * const connectionPolicy = new ConnectionPolicy(); + * connectionPolicy.RequestTimeout = 10000; + * const client = new CosmosClient({ + * endpoint: "", + * auth: {masterKey: ""}, + * connectionPolicy + * }); + * ``` + */ +class CosmosClient { + constructor(optionsOrConnectionString) { + var _a, _b; + if (typeof optionsOrConnectionString === "string") { + optionsOrConnectionString = parseConnectionString(optionsOrConnectionString); + } + const endpoint = checkURL(optionsOrConnectionString.endpoint); + if (!endpoint) { + throw new Error("Invalid endpoint specified"); + } + const clientConfig = this.initializeClientConfigDiagnostic(optionsOrConnectionString); + optionsOrConnectionString.connectionPolicy = Object.assign({}, defaultConnectionPolicy, optionsOrConnectionString.connectionPolicy); + optionsOrConnectionString.defaultHeaders = optionsOrConnectionString.defaultHeaders || {}; + optionsOrConnectionString.defaultHeaders[Constants$1.HttpHeaders.CacheControl] = "no-cache"; + optionsOrConnectionString.defaultHeaders[Constants$1.HttpHeaders.Version] = + Constants$1.CurrentVersion; + if (optionsOrConnectionString.consistencyLevel !== undefined) { + optionsOrConnectionString.defaultHeaders[Constants$1.HttpHeaders.ConsistencyLevel] = + optionsOrConnectionString.consistencyLevel; + } + optionsOrConnectionString.defaultHeaders[Constants$1.HttpHeaders.UserAgent] = getUserAgent(optionsOrConnectionString.userAgentSuffix); + const globalEndpointManager = new GlobalEndpointManager(optionsOrConnectionString, async (diagnosticNode, opts) => this.getDatabaseAccountInternal(diagnosticNode, opts)); + this.clientContext = new ClientContext(optionsOrConnectionString, globalEndpointManager, clientConfig, determineDiagnosticLevel(optionsOrConnectionString.diagnosticLevel, getDiagnosticLevelFromEnvironment())); + if (((_a = optionsOrConnectionString.connectionPolicy) === null || _a === void 0 ? void 0 : _a.enableEndpointDiscovery) && + ((_b = optionsOrConnectionString.connectionPolicy) === null || _b === void 0 ? void 0 : _b.enableBackgroundEndpointRefreshing)) { + this.backgroundRefreshEndpointList(globalEndpointManager, optionsOrConnectionString.connectionPolicy.endpointRefreshRateInMs || + defaultConnectionPolicy.endpointRefreshRateInMs); + } + this.databases = new Databases(this, this.clientContext); + this.offers = new Offers(this, this.clientContext); + } + initializeClientConfigDiagnostic(optionsOrConnectionString) { + return { + endpoint: optionsOrConnectionString.endpoint, + resourceTokensConfigured: optionsOrConnectionString.resourceTokens !== undefined, + tokenProviderConfigured: optionsOrConnectionString.tokenProvider !== undefined, + aadCredentialsConfigured: optionsOrConnectionString.aadCredentials !== undefined, + connectionPolicyConfigured: optionsOrConnectionString.connectionPolicy !== undefined, + consistencyLevel: optionsOrConnectionString.consistencyLevel, + defaultHeaders: optionsOrConnectionString.defaultHeaders, + agentConfigured: optionsOrConnectionString.agent !== undefined, + userAgentSuffix: optionsOrConnectionString.userAgentSuffix, + diagnosticLevel: optionsOrConnectionString.diagnosticLevel, + pluginsConfigured: optionsOrConnectionString.plugins !== undefined, + sDKVersion: Constants$1.SDKVersion, + }; } /** - * Gets the request charge for this request from the Azure Cosmos DB service. + * Get information about the current {@link DatabaseAccount} (including which regions are supported, etc.) */ - get requestCharge() { - const rus = this.headers[Constants$1.HttpHeaders.RequestCharge]; - return rus ? parseInt(rus, 10) : null; + async getDatabaseAccount(options) { + return withDiagnostics(async (diagnosticNode) => { + return this.getDatabaseAccountInternal(diagnosticNode, options); + }, this.clientContext); } /** - * Gets the activity ID for the request from the Azure Cosmos DB service. + * @hidden */ - get activityId() { - return this.headers[Constants$1.HttpHeaders.ActivityId]; + async getDatabaseAccountInternal(diagnosticNode, options) { + const response = await this.clientContext.getDatabaseAccount(diagnosticNode, options); + return new ResourceResponse(response.result, response.headers, response.code, getEmptyCosmosDiagnostics(), response.substatus); } /** - * Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service. + * Gets the currently used write endpoint url. Useful for troubleshooting purposes. * - * This is equivalent to the `etag` property. + * The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints. */ - get continuation() { - return this.etag; + async getWriteEndpoint() { + return withDiagnostics(async (diagnosticNode) => { + return this.clientContext.getWriteEndpoint(diagnosticNode); + }, this.clientContext); } /** - * Gets the session token for use in session consistency reads from the Azure Cosmos DB service. + * Gets the currently used read endpoint. Useful for troubleshooting purposes. + * + * The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints. */ - get sessionToken() { - return this.headers[Constants$1.HttpHeaders.SessionToken]; + async getReadEndpoint() { + return withDiagnostics(async (diagnosticNode) => { + return this.clientContext.getReadEndpoint(diagnosticNode); + }, this.clientContext); } /** - * Gets the entity tag associated with last transaction in the Azure Cosmos DB service, - * which can be used as If-Non-Match Access condition for ReadFeed REST request or - * `continuation` property of `ChangeFeedOptions` parameter for - * `Items.changeFeed()` - * to get feed changes since the transaction specified by this entity tag. + * Gets the known write endpoints. Useful for troubleshooting purposes. * - * This is equivalent to the `continuation` property. + * The urls may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints. */ - get etag() { - return this.headers[Constants$1.HttpHeaders.ETag]; + getWriteEndpoints() { + return this.clientContext.getWriteEndpoints(); } -} - -/** - * Provides iterator for change feed. - * - * Use `Items.changeFeed()` to get an instance of the iterator. - */ -class ChangeFeedIterator { /** - * @internal + * Gets the currently used read endpoint. Useful for troubleshooting purposes. + * + * The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints. */ - constructor(clientContext, resourceId, resourceLink, partitionKey, changeFeedOptions) { - this.clientContext = clientContext; - this.resourceId = resourceId; - this.resourceLink = resourceLink; - this.partitionKey = partitionKey; - this.changeFeedOptions = changeFeedOptions; - // partition key XOR partition key range id - const partitionKeyValid = partitionKey !== undefined; - this.isPartitionSpecified = partitionKeyValid; - let canUseStartFromBeginning = true; - if (changeFeedOptions.continuation) { - this.nextIfNoneMatch = changeFeedOptions.continuation; - canUseStartFromBeginning = false; - } - if (changeFeedOptions.startTime) { - // .toUTCString() is platform specific, but most platforms use RFC 1123. - // In ECMAScript 2018, this was standardized to RFC 1123. - // See for more info: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString - this.ifModifiedSince = changeFeedOptions.startTime.toUTCString(); - canUseStartFromBeginning = false; - } - if (canUseStartFromBeginning && !changeFeedOptions.startFromBeginning) { - this.nextIfNoneMatch = ChangeFeedIterator.IfNoneMatchAllHeaderValue; - } + getReadEndpoints() { + return this.clientContext.getReadEndpoints(); } /** - * Gets a value indicating whether there are potentially additional results that can be retrieved. + * Used for reading, updating, or deleting a existing database by id or accessing containers belonging to that database. * - * Initially returns true. This value is set based on whether the last execution returned a continuation token. + * This does not make a network call. Use `.read` to get info about the database after getting the {@link Database} object. * - * @returns Boolean value representing if whether there are potentially additional results that can be retrieved. + * @param id - The id of the database. + * @example Create a new container off of an existing database + * ```typescript + * const container = client.database("").containers.create(""); + * ``` + * + * @example Delete an existing database + * ```typescript + * await client.database("").delete(); + * ``` */ - get hasMoreResults() { - return this.lastStatusCode !== StatusCodes.NotModified; + database(id) { + return new Database(this, id, this.clientContext); } /** - * Gets an async iterator which will yield pages of results from Azure Cosmos DB. + * Used for reading, or updating a existing offer by id. + * @param id - The id of the offer. */ - getAsyncIterator() { - return tslib.__asyncGenerator(this, arguments, function* getAsyncIterator_1() { - do { - const result = yield tslib.__await(this.fetchNext()); - if (result.count > 0) { - yield yield tslib.__await(result); - } - } while (this.hasMoreResults); - }); + offer(id) { + return new Offer(this, id, this.clientContext); } /** - * Read feed and retrieves the next page of results in Azure Cosmos DB. + * Clears background endpoint refresher. Use client.dispose() when destroying the CosmosClient within another process. */ - async fetchNext() { - return withDiagnostics(async (diagnosticNode) => { - const response = await this.getFeedResponse(diagnosticNode); - this.lastStatusCode = response.statusCode; - this.nextIfNoneMatch = response.headers[Constants$1.HttpHeaders.ETag]; - return response; - }, this.clientContext); + dispose() { + clearTimeout(this.endpointRefresher); } - async getFeedResponse(diagnosticNode) { - if (!this.isPartitionSpecified) { - throw new Error("Container is partitioned, but no partition key or partition key range id was specified."); - } - const feedOptions = { initialHeaders: {}, useIncrementalFeed: true }; - if (typeof this.changeFeedOptions.maxItemCount === "number") { - feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount; - } - if (this.changeFeedOptions.sessionToken) { - feedOptions.sessionToken = this.changeFeedOptions.sessionToken; - } - if (this.nextIfNoneMatch) { - feedOptions.accessCondition = { - type: Constants$1.HttpHeaders.IfNoneMatch, - condition: this.nextIfNoneMatch, - }; - } - if (this.ifModifiedSince) { - feedOptions.initialHeaders[Constants$1.HttpHeaders.IfModifiedSince] = this.ifModifiedSince; + async backgroundRefreshEndpointList(globalEndpointManager, refreshRate) { + this.endpointRefresher = setInterval(() => { + try { + return withDiagnostics(async (diagnosticNode) => { + return globalEndpointManager.refreshEndpointList(diagnosticNode); + }, this.clientContext, exports.DiagnosticNodeType.BACKGROUND_REFRESH_THREAD); + } + catch (e) { + console.warn("Failed to refresh endpoints", e); + } + }, refreshRate); + if (this.endpointRefresher.unref && typeof this.endpointRefresher.unref === "function") { + this.endpointRefresher.unref(); } - const response = await this.clientContext.queryFeed({ - path: this.resourceLink, - resourceType: exports.ResourceType.item, - resourceId: this.resourceId, - resultFn: (result) => (result ? result.Documents : []), - query: undefined, - options: feedOptions, - partitionKey: this.partitionKey, - diagnosticNode: diagnosticNode, - }); // TODO: some funky issues with query feed. Probably need to change it up. - return new ChangeFeedResponse(response.result, response.result ? response.result.length : 0, response.code, response.headers, getEmptyCosmosDiagnostics()); } } -ChangeFeedIterator.IfNoneMatchAllHeaderValue = "*"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const BytePrefix = { - Undefined: "00", - Null: "01", - False: "02", - True: "03", - MinNumber: "04", - Number: "05", - MaxNumber: "06", - MinString: "07", - String: "08", - MaxString: "09", - Int64: "0a", - Int32: "0b", - Int16: "0c", - Int8: "0d", - Uint64: "0e", - Uint32: "0f", - Uint16: "10", - Uint8: "11", - Binary: "12", - Guid: "13", - Float: "14", - Infinity: "FF", -}; +class SasTokenProperties { +} // Copyright (c) Microsoft Corporation. -function writeNumberForBinaryEncodingJSBI(hash) { - let payload = encodeNumberAsUInt64JSBI(hash); - let outputStream = Buffer.from(BytePrefix.Number, "hex"); - const firstChunk = JSBI__default["default"].asUintN(64, JSBI__default["default"].signedRightShift(payload, JSBI__default["default"].BigInt(56))); - outputStream = Buffer.concat([outputStream, Buffer.from(firstChunk.toString(16), "hex")]); - payload = JSBI__default["default"].asUintN(64, JSBI__default["default"].leftShift(JSBI__default["default"].BigInt(payload), JSBI__default["default"].BigInt(0x8))); - let byteToWrite = JSBI__default["default"].BigInt(0); - let shifted; - let padded; - do { - { - // we pad because after shifting because we will produce characters like "f" or similar, - // which cannot be encoded as hex in a buffer because they are invalid hex - // https://github.com/nodejs/node/issues/24491 - padded = byteToWrite.toString(16).padStart(2, "0"); - if (padded !== "00") { - outputStream = Buffer.concat([outputStream, Buffer.from(padded, "hex")]); - } - } - shifted = JSBI__default["default"].asUintN(64, JSBI__default["default"].signedRightShift(payload, JSBI__default["default"].BigInt(56))); - byteToWrite = JSBI__default["default"].asUintN(64, JSBI__default["default"].bitwiseOr(shifted, JSBI__default["default"].BigInt(0x01))); - payload = JSBI__default["default"].asUintN(64, JSBI__default["default"].leftShift(payload, JSBI__default["default"].BigInt(7))); - } while (JSBI__default["default"].notEqual(payload, JSBI__default["default"].BigInt(0))); - const lastChunk = JSBI__default["default"].asUintN(64, JSBI__default["default"].bitwiseAnd(byteToWrite, JSBI__default["default"].BigInt(0xfe))); - // we pad because after shifting because we will produce characters like "f" or similar, - // which cannot be encoded as hex in a buffer because they are invalid hex - // https://github.com/nodejs/node/issues/24491 - padded = lastChunk.toString(16).padStart(2, "0"); - if (padded !== "00") { - outputStream = Buffer.concat([outputStream, Buffer.from(padded, "hex")]); - } - return outputStream; -} -function encodeNumberAsUInt64JSBI(value) { - const rawValueBits = getRawBitsJSBI(value); - const mask = JSBI__default["default"].BigInt(0x8000000000000000); - const returned = rawValueBits < mask - ? JSBI__default["default"].bitwiseXor(rawValueBits, mask) - : JSBI__default["default"].add(JSBI__default["default"].bitwiseNot(rawValueBits), JSBI__default["default"].BigInt(1)); - return returned; -} -function doubleToByteArrayJSBI(double) { - const output = Buffer.alloc(8); - const lng = getRawBitsJSBI(double); - for (let i = 0; i < 8; i++) { - output[i] = JSBI__default["default"].toNumber(JSBI__default["default"].bitwiseAnd(JSBI__default["default"].signedRightShift(lng, JSBI__default["default"].multiply(JSBI__default["default"].BigInt(i), JSBI__default["default"].BigInt(8))), JSBI__default["default"].BigInt(0xff))); +// Licensed under the MIT license. +/// +function encodeUTF8(str) { + const bytes = new Uint8Array(str.length); + for (let i = 0; i < str.length; i++) { + bytes[i] = str.charCodeAt(i); } - return output; -} -function getRawBitsJSBI(value) { - const view = new DataView(new ArrayBuffer(8)); - view.setFloat64(0, value); - return JSBI__default["default"].BigInt(`0x${buf2hex(view.buffer)}`); -} -function buf2hex(buffer) { - return Array.prototype.map - .call(new Uint8Array(buffer), (x) => ("00" + x.toString(16)).slice(-2)) - .join(""); + return bytes; } -// +----------------------------------------------------------------------+ -// | murmurHash3js.js v3.0.1 // https://github.com/pid/murmurHash3js -// | A javascript implementation of MurmurHash3's x86 hashing algorithms. | -// |----------------------------------------------------------------------| -// | Copyright (c) 2012-2015 Karan Lyons | -// | https://github.com/karanlyons/murmurHash3.js/blob/c1778f75792abef7bdd74bc85d2d4e1a3d25cfe9/murmurHash3.js | -// | Freely distributable under the MIT license. | -// +----------------------------------------------------------------------+ -// PRIVATE FUNCTIONS -// ----------------- -function _x86Multiply(m, n) { - // - // Given two 32bit ints, returns the two multiplied together as a - // 32bit int. - // - return (m & 0xffff) * n + ((((m >>> 16) * n) & 0xffff) << 16); -} -function _x86Rotl(m, n) { - // - // Given a 32bit int and an int representing a number of bit positions, - // returns the 32bit int rotated left by that number of positions. - // - return (m << n) | (m >>> (32 - n)); -} -function _x86Fmix(h) { - // - // Given a block, returns murmurHash3's final x86 mix of that block. - // - h ^= h >>> 16; - h = _x86Multiply(h, 0x85ebca6b); - h ^= h >>> 13; - h = _x86Multiply(h, 0xc2b2ae35); - h ^= h >>> 16; - return h; -} -function _x64Add(m, n) { - // - // Given two 64bit ints (as an array of two 32bit ints) returns the two - // added together as a 64bit int (as an array of two 32bit ints). - // - m = [m[0] >>> 16, m[0] & 0xffff, m[1] >>> 16, m[1] & 0xffff]; - n = [n[0] >>> 16, n[0] & 0xffff, n[1] >>> 16, n[1] & 0xffff]; - const o = [0, 0, 0, 0]; - o[3] += m[3] + n[3]; - o[2] += o[3] >>> 16; - o[3] &= 0xffff; - o[2] += m[2] + n[2]; - o[1] += o[2] >>> 16; - o[2] &= 0xffff; - o[1] += m[1] + n[1]; - o[0] += o[1] >>> 16; - o[1] &= 0xffff; - o[0] += m[0] + n[0]; - o[0] &= 0xffff; - return [(o[0] << 16) | o[1], (o[2] << 16) | o[3]]; -} -function _x64Multiply(m, n) { - // - // Given two 64bit ints (as an array of two 32bit ints) returns the two - // multiplied together as a 64bit int (as an array of two 32bit ints). - // - m = [m[0] >>> 16, m[0] & 0xffff, m[1] >>> 16, m[1] & 0xffff]; - n = [n[0] >>> 16, n[0] & 0xffff, n[1] >>> 16, n[1] & 0xffff]; - const o = [0, 0, 0, 0]; - o[3] += m[3] * n[3]; - o[2] += o[3] >>> 16; - o[3] &= 0xffff; - o[2] += m[2] * n[3]; - o[1] += o[2] >>> 16; - o[2] &= 0xffff; - o[2] += m[3] * n[2]; - o[1] += o[2] >>> 16; - o[2] &= 0xffff; - o[1] += m[1] * n[3]; - o[0] += o[1] >>> 16; - o[1] &= 0xffff; - o[1] += m[2] * n[2]; - o[0] += o[1] >>> 16; - o[1] &= 0xffff; - o[1] += m[3] * n[1]; - o[0] += o[1] >>> 16; - o[1] &= 0xffff; - o[0] += m[0] * n[3] + m[1] * n[2] + m[2] * n[1] + m[3] * n[0]; - o[0] &= 0xffff; - return [(o[0] << 16) | o[1], (o[2] << 16) | o[3]]; -} -function _x64Rotl(m, n) { - // - // Given a 64bit int (as an array of two 32bit ints) and an int - // representing a number of bit positions, returns the 64bit int (as an - // array of two 32bit ints) rotated left by that number of positions. - // - n %= 64; - if (n === 32) { - return [m[1], m[0]]; - } - else if (n < 32) { - return [(m[0] << n) | (m[1] >>> (32 - n)), (m[1] << n) | (m[0] >>> (32 - n))]; +// Copyright (c) Microsoft Corporation. +/** + * Experimental internal only + * Generates the payload representing the permission configuration for the sas token. + */ +async function createAuthorizationSasToken(masterKey, sasTokenProperties) { + let resourcePrefixPath = ""; + if (typeof sasTokenProperties.databaseName === "string" && + sasTokenProperties.databaseName !== "") { + resourcePrefixPath += `/${Constants$1.Path.DatabasesPathSegment}/${sasTokenProperties.databaseName}`; } - else { - n -= 32; - return [(m[1] << n) | (m[0] >>> (32 - n)), (m[0] << n) | (m[1] >>> (32 - n))]; + if (typeof sasTokenProperties.containerName === "string" && + sasTokenProperties.containerName !== "") { + if (sasTokenProperties.databaseName === "") { + throw new Error(`illegalArgumentException : ${sasTokenProperties.databaseName} \ + is an invalid database name`); + } + resourcePrefixPath += `/${Constants$1.Path.CollectionsPathSegment}/${sasTokenProperties.containerName}`; } -} -function _x64LeftShift(m, n) { - // - // Given a 64bit int (as an array of two 32bit ints) and an int - // representing a number of bit positions, returns the 64bit int (as an - // array of two 32bit ints) shifted left by that number of positions. - // - n %= 64; - if (n === 0) { - return m; + if (typeof sasTokenProperties.resourceName === "string" && + sasTokenProperties.resourceName !== "") { + if (sasTokenProperties.containerName === "") { + throw new Error(`illegalArgumentException : ${sasTokenProperties.containerName} \ + is an invalid container name`); + } + switch (sasTokenProperties.resourceKind) { + case "ITEM": + resourcePrefixPath += `${Constants$1.Path.Root}${Constants$1.Path.DocumentsPathSegment}`; + break; + case "STORED_PROCEDURE": + resourcePrefixPath += `${Constants$1.Path.Root}${Constants$1.Path.StoredProceduresPathSegment}`; + break; + case "USER_DEFINED_FUNCTION": + resourcePrefixPath += `${Constants$1.Path.Root}${Constants$1.Path.UserDefinedFunctionsPathSegment}`; + break; + case "TRIGGER": + resourcePrefixPath += `${Constants$1.Path.Root}${Constants$1.Path.TriggersPathSegment}`; + break; + default: + throw new Error(`illegalArgumentException : ${sasTokenProperties.resourceKind} \ + is an invalid resource kind`); + } + resourcePrefixPath += `${Constants$1.Path.Root}${sasTokenProperties.resourceName}${Constants$1.Path.Root}`; } - else if (n < 32) { - return [(m[0] << n) | (m[1] >>> (32 - n)), m[1] << n]; + sasTokenProperties.resourcePath = resourcePrefixPath.toString(); + let partitionRanges = ""; + if (sasTokenProperties.partitionKeyValueRanges !== undefined && + sasTokenProperties.partitionKeyValueRanges.length > 0) { + if (typeof sasTokenProperties.resourceKind !== "string" && + sasTokenProperties.resourceKind !== "ITEM") { + throw new Error(`illegalArgumentException : ${sasTokenProperties.resourceKind} \ + is an invalid partition key value range`); + } + sasTokenProperties.partitionKeyValueRanges.forEach((range) => { + partitionRanges += `${encodeUTF8(range)},`; + }); } - else { - return [m[1] << (n - 32), 0]; + if (sasTokenProperties.controlPlaneReaderScope === 0) { + sasTokenProperties.controlPlaneReaderScope += exports.SasTokenPermissionKind.ContainerReadAny; + sasTokenProperties.controlPlaneWriterScope += exports.SasTokenPermissionKind.ContainerReadAny; } -} -function _x64Xor(m, n) { - // - // Given two 64bit ints (as an array of two 32bit ints) returns the two - // xored together as a 64bit int (as an array of two 32bit ints). - // - return [m[0] ^ n[0], m[1] ^ n[1]]; -} -function _x64Fmix(h) { - // - // Given a block, returns murmurHash3's final x64 mix of that block. - // (`[0, h[0] >>> 1]` is a 33 bit unsigned right shift. This is the - // only place where we need to right shift 64bit ints.) - // - h = _x64Xor(h, [0, h[0] >>> 1]); - h = _x64Multiply(h, [0xff51afd7, 0xed558ccd]); - h = _x64Xor(h, [0, h[0] >>> 1]); - h = _x64Multiply(h, [0xc4ceb9fe, 0x1a85ec53]); - h = _x64Xor(h, [0, h[0] >>> 1]); - return h; -} -// PUBLIC FUNCTIONS -// ---------------- -function x86Hash32(bytes, seed) { - // - // Given a string and an optional seed as an int, returns a 32 bit hash - // using the x86 flavor of MurmurHash3, as an unsigned int. - // - seed = seed || 0; - const remainder = bytes.length % 4; - const blocks = bytes.length - remainder; - let h1 = seed; - let k1 = 0; - const c1 = 0xcc9e2d51; - const c2 = 0x1b873593; - let j = 0; - for (let i = 0; i < blocks; i = i + 4) { - k1 = bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24); - k1 = _x86Multiply(k1, c1); - k1 = _x86Rotl(k1, 15); - k1 = _x86Multiply(k1, c2); - h1 ^= k1; - h1 = _x86Rotl(h1, 13); - h1 = _x86Multiply(h1, 5) + 0xe6546b64; - j = i + 4; + if (sasTokenProperties.dataPlaneReaderScope === 0 && + sasTokenProperties.dataPlaneWriterScope === 0) { + sasTokenProperties.dataPlaneReaderScope = exports.SasTokenPermissionKind.ContainerFullAccess; + sasTokenProperties.dataPlaneWriterScope = exports.SasTokenPermissionKind.ContainerFullAccess; } - k1 = 0; - switch (remainder) { - case 3: - k1 ^= bytes[j + 2] << 16; - case 2: - k1 ^= bytes[j + 1] << 8; - case 1: - k1 ^= bytes[j]; - k1 = _x86Multiply(k1, c1); - k1 = _x86Rotl(k1, 15); - k1 = _x86Multiply(k1, c2); - h1 ^= k1; + if (typeof sasTokenProperties.keyType !== "number" || + typeof sasTokenProperties.keyType === undefined) { + switch (sasTokenProperties.keyType) { + case CosmosKeyType.PrimaryMaster: + sasTokenProperties.keyType = 1; + break; + case CosmosKeyType.SecondaryMaster: + sasTokenProperties.keyType = 2; + break; + case CosmosKeyType.PrimaryReadOnly: + sasTokenProperties.keyType = 3; + break; + case CosmosKeyType.SecondaryReadOnly: + sasTokenProperties.keyType = 4; + break; + default: + throw new Error(`illegalArgumentException : ${sasTokenProperties.keyType} \ + is an invalid key type`); + } } - h1 ^= bytes.length; - h1 = _x86Fmix(h1); - return h1 >>> 0; + const payload = sasTokenProperties.user + + "\n" + + sasTokenProperties.userTag + + "\n" + + sasTokenProperties.resourcePath + + "\n" + + partitionRanges + + "\n" + + utcsecondsSinceEpoch(sasTokenProperties.startTime).toString(16) + + "\n" + + utcsecondsSinceEpoch(sasTokenProperties.expiryTime).toString(16) + + "\n" + + sasTokenProperties.keyType + + "\n" + + sasTokenProperties.controlPlaneReaderScope.toString(16) + + "\n" + + sasTokenProperties.controlPlaneWriterScope.toString(16) + + "\n" + + sasTokenProperties.dataPlaneReaderScope.toString(16) + + "\n" + + sasTokenProperties.dataPlaneWriterScope.toString(16) + + "\n"; + const signedPayload = await hmac(masterKey, Buffer.from(payload).toString("base64")); + return "type=sas&ver=1.0&sig=" + signedPayload + ";" + Buffer.from(payload).toString("base64"); } -function x86Hash128(bytes, seed) { - // - // Given a string and an optional seed as an int, returns a 128 bit - // hash using the x86 flavor of MurmurHash3, as an unsigned hex. - // - seed = seed || 0; - const remainder = bytes.length % 16; - const blocks = bytes.length - remainder; - let h1 = seed; - let h2 = seed; - let h3 = seed; - let h4 = seed; - let k1 = 0; - let k2 = 0; - let k3 = 0; - let k4 = 0; - const c1 = 0x239b961b; - const c2 = 0xab0e9789; - const c3 = 0x38b34ae5; - const c4 = 0xa1e38b93; - let j = 0; - for (let i = 0; i < blocks; i = i + 16) { - k1 = bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24); - k2 = bytes[i + 4] | (bytes[i + 5] << 8) | (bytes[i + 6] << 16) | (bytes[i + 7] << 24); - k3 = bytes[i + 8] | (bytes[i + 9] << 8) | (bytes[i + 10] << 16) | (bytes[i + 11] << 24); - k4 = bytes[i + 12] | (bytes[i + 13] << 8) | (bytes[i + 14] << 16) | (bytes[i + 15] << 24); - k1 = _x86Multiply(k1, c1); - k1 = _x86Rotl(k1, 15); - k1 = _x86Multiply(k1, c2); - h1 ^= k1; - h1 = _x86Rotl(h1, 19); - h1 += h2; - h1 = _x86Multiply(h1, 5) + 0x561ccd1b; - k2 = _x86Multiply(k2, c2); - k2 = _x86Rotl(k2, 16); - k2 = _x86Multiply(k2, c3); - h2 ^= k2; - h2 = _x86Rotl(h2, 17); - h2 += h3; - h2 = _x86Multiply(h2, 5) + 0x0bcaa747; - k3 = _x86Multiply(k3, c3); - k3 = _x86Rotl(k3, 17); - k3 = _x86Multiply(k3, c4); - h3 ^= k3; - h3 = _x86Rotl(h3, 15); - h3 += h4; - h3 = _x86Multiply(h3, 5) + 0x96cd1c35; - k4 = _x86Multiply(k4, c4); - k4 = _x86Rotl(k4, 18); - k4 = _x86Multiply(k4, c1); - h4 ^= k4; - h4 = _x86Rotl(h4, 13); - h4 += h1; - h4 = _x86Multiply(h4, 5) + 0x32ac3b17; - j = i + 16; - } - k1 = 0; - k2 = 0; - k3 = 0; - k4 = 0; - switch (remainder) { - case 15: - k4 ^= bytes[j + 14] << 16; - case 14: - k4 ^= bytes[j + 13] << 8; - case 13: - k4 ^= bytes[j + 12]; - k4 = _x86Multiply(k4, c4); - k4 = _x86Rotl(k4, 18); - k4 = _x86Multiply(k4, c1); - h4 ^= k4; - case 12: - k3 ^= bytes[j + 11] << 24; - case 11: - k3 ^= bytes[j + 10] << 16; - case 10: - k3 ^= bytes[j + 9] << 8; - case 9: - k3 ^= bytes[j + 8]; - k3 = _x86Multiply(k3, c3); - k3 = _x86Rotl(k3, 17); - k3 = _x86Multiply(k3, c4); - h3 ^= k3; - case 8: - k2 ^= bytes[j + 7] << 24; - case 7: - k2 ^= bytes[j + 6] << 16; - case 6: - k2 ^= bytes[j + 5] << 8; - case 5: - k2 ^= bytes[j + 4]; - k2 = _x86Multiply(k2, c2); - k2 = _x86Rotl(k2, 16); - k2 = _x86Multiply(k2, c3); - h2 ^= k2; - case 4: - k1 ^= bytes[j + 3] << 24; - case 3: - k1 ^= bytes[j + 2] << 16; - case 2: - k1 ^= bytes[j + 1] << 8; - case 1: - k1 ^= bytes[j]; - k1 = _x86Multiply(k1, c1); - k1 = _x86Rotl(k1, 15); - k1 = _x86Multiply(k1, c2); - h1 ^= k1; - } - h1 ^= bytes.length; - h2 ^= bytes.length; - h3 ^= bytes.length; - h4 ^= bytes.length; - h1 += h2; - h1 += h3; - h1 += h4; - h2 += h1; - h3 += h1; - h4 += h1; - h1 = _x86Fmix(h1); - h2 = _x86Fmix(h2); - h3 = _x86Fmix(h3); - h4 = _x86Fmix(h4); - h1 += h2; - h1 += h3; - h1 += h4; - h2 += h1; - h3 += h1; - h4 += h1; - return (("00000000" + (h1 >>> 0).toString(16)).slice(-8) + - ("00000000" + (h2 >>> 0).toString(16)).slice(-8) + - ("00000000" + (h3 >>> 0).toString(16)).slice(-8) + - ("00000000" + (h4 >>> 0).toString(16)).slice(-8)); +/** + * @hidden + */ +// TODO: utcMilllisecondsSinceEpoch +function utcsecondsSinceEpoch(date) { + return Math.round(date.getTime() / 1000); } -function x64Hash128(bytes, seed) { - // - // Given a string and an optional seed as an int, returns a 128 bit - // hash using the x64 flavor of MurmurHash3, as an unsigned hex. - // - seed = seed || 0; - const remainder = bytes.length % 16; - const blocks = bytes.length - remainder; - let h1 = [0, seed]; - let h2 = [0, seed]; - let k1 = [0, 0]; - let k2 = [0, 0]; - const c1 = [0x87c37b91, 0x114253d5]; - const c2 = [0x4cf5ad43, 0x2745937f]; - let j = 0; - for (let i = 0; i < blocks; i = i + 16) { - k1 = [ - bytes[i + 4] | (bytes[i + 5] << 8) | (bytes[i + 6] << 16) | (bytes[i + 7] << 24), - bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24), - ]; - k2 = [ - bytes[i + 12] | (bytes[i + 13] << 8) | (bytes[i + 14] << 16) | (bytes[i + 15] << 24), - bytes[i + 8] | (bytes[i + 9] << 8) | (bytes[i + 10] << 16) | (bytes[i + 11] << 24), - ]; - k1 = _x64Multiply(k1, c1); - k1 = _x64Rotl(k1, 31); - k1 = _x64Multiply(k1, c2); - h1 = _x64Xor(h1, k1); - h1 = _x64Rotl(h1, 27); - h1 = _x64Add(h1, h2); - h1 = _x64Add(_x64Multiply(h1, [0, 5]), [0, 0x52dce729]); - k2 = _x64Multiply(k2, c2); - k2 = _x64Rotl(k2, 33); - k2 = _x64Multiply(k2, c1); - h2 = _x64Xor(h2, k2); - h2 = _x64Rotl(h2, 31); - h2 = _x64Add(h2, h1); - h2 = _x64Add(_x64Multiply(h2, [0, 5]), [0, 0x38495ab5]); - j = i + 16; - } - k1 = [0, 0]; - k2 = [0, 0]; - switch (remainder) { - case 15: - k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 14]], 48)); - case 14: - k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 13]], 40)); - case 13: - k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 12]], 32)); - case 12: - k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 11]], 24)); - case 11: - k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 10]], 16)); - case 10: - k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 9]], 8)); - case 9: - k2 = _x64Xor(k2, [0, bytes[j + 8]]); - k2 = _x64Multiply(k2, c2); - k2 = _x64Rotl(k2, 33); - k2 = _x64Multiply(k2, c1); - h2 = _x64Xor(h2, k2); - case 8: - k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 7]], 56)); - case 7: - k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 6]], 48)); - case 6: - k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 5]], 40)); - case 5: - k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 4]], 32)); - case 4: - k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 3]], 24)); - case 3: - k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 2]], 16)); - case 2: - k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 1]], 8)); - case 1: - k1 = _x64Xor(k1, [0, bytes[j]]); - k1 = _x64Multiply(k1, c1); - k1 = _x64Rotl(k1, 31); - k1 = _x64Multiply(k1, c2); - h1 = _x64Xor(h1, k1); - } - h1 = _x64Xor(h1, [0, bytes.length]); - h2 = _x64Xor(h2, [0, bytes.length]); - h1 = _x64Add(h1, h2); - h2 = _x64Add(h2, h1); - h1 = _x64Fmix(h1); - h2 = _x64Fmix(h2); - h1 = _x64Add(h1, h2); - h2 = _x64Add(h2, h1); - // Here we reverse h1 and h2 in Cosmos - // This is an implementation detail and not part of the public spec - const h1Buff = Buffer.from(("00000000" + (h1[0] >>> 0).toString(16)).slice(-8) + - ("00000000" + (h1[1] >>> 0).toString(16)).slice(-8), "hex"); - const h1Reversed = reverse$1(h1Buff).toString("hex"); - const h2Buff = Buffer.from(("00000000" + (h2[0] >>> 0).toString(16)).slice(-8) + - ("00000000" + (h2[1] >>> 0).toString(16)).slice(-8), "hex"); - const h2Reversed = reverse$1(h2Buff).toString("hex"); - return h1Reversed + h2Reversed; -} -function reverse$1(buff) { - const buffer = Buffer.allocUnsafe(buff.length); - for (let i = 0, j = buff.length - 1; i <= j; ++i, --j) { - buffer[i] = buff[j]; - buffer[j] = buff[i]; - } - return buffer; -} -var MurmurHash = { - version: "3.0.0", - x86: { - hash32: x86Hash32, - hash128: x86Hash128, - }, - x64: { - hash128: x64Hash128, - }, - inputValidation: true, -}; -// Copyright (c) Microsoft Corporation. -function hashV2PartitionKey(partitionKey) { - const toHash = Buffer.concat(partitionKey.map(prefixKeyByType$1)); - const hash = MurmurHash.x64.hash128(toHash); - const reverseBuff = reverse(Buffer.from(hash, "hex")); - reverseBuff[0] &= 0x3f; - return reverseBuff.toString("hex").toUpperCase(); -} -function prefixKeyByType$1(key) { - let bytes; - switch (typeof key) { - case "string": { - bytes = Buffer.concat([ - Buffer.from(BytePrefix.String, "hex"), - Buffer.from(key), - Buffer.from(BytePrefix.Infinity, "hex"), - ]); - return bytes; - } - case "number": { - const numberBytes = doubleToByteArrayJSBI(key); - bytes = Buffer.concat([Buffer.from(BytePrefix.Number, "hex"), numberBytes]); - return bytes; - } - case "boolean": { - const prefix = key ? BytePrefix.True : BytePrefix.False; - return Buffer.from(prefix, "hex"); - } - case "object": { - if (key === null) { - return Buffer.from(BytePrefix.Null, "hex"); - } - return Buffer.from(BytePrefix.Undefined, "hex"); - } - case "undefined": { - return Buffer.from(BytePrefix.Undefined, "hex"); - } - default: - throw new Error(`Unexpected type: ${typeof key}`); - } -} -function reverse(buff) { - const buffer = Buffer.allocUnsafe(buff.length); - for (let i = 0, j = buff.length - 1; i <= j; ++i, --j) { - buffer[i] = buff[j]; - buffer[j] = buff[i]; - } - return buffer; -} +Object.defineProperty(exports, "RestError", ({ + enumerable: true, + get: function () { return coreRestPipeline.RestError; } +})); +Object.defineProperty(exports, "AbortError", ({ + enumerable: true, + get: function () { return abortController.AbortError; } +})); +exports.BulkOperationType = BulkOperationType; +exports.ChangeFeedIterator = ChangeFeedIterator; +exports.ChangeFeedIteratorResponse = ChangeFeedIteratorResponse; +exports.ChangeFeedResponse = ChangeFeedResponse; +exports.ChangeFeedStartFrom = ChangeFeedStartFrom; +exports.ClientContext = ClientContext; +exports.ClientSideMetrics = ClientSideMetrics; +exports.Conflict = Conflict; +exports.ConflictResponse = ConflictResponse; +exports.Conflicts = Conflicts; +exports.Constants = Constants$1; +exports.Container = Container; +exports.ContainerResponse = ContainerResponse; +exports.Containers = Containers; +exports.CosmosClient = CosmosClient; +exports.CosmosDiagnostics = CosmosDiagnostics; +exports.DEFAULT_PARTITION_KEY_PATH = DEFAULT_PARTITION_KEY_PATH; +exports.Database = Database; +exports.DatabaseAccount = DatabaseAccount; +exports.DatabaseResponse = DatabaseResponse; +exports.Databases = Databases; +exports.DiagnosticNodeInternal = DiagnosticNodeInternal; +exports.ErrorResponse = ErrorResponse; +exports.FeedRange = FeedRange; +exports.FeedResponse = FeedResponse; +exports.GlobalEndpointManager = GlobalEndpointManager; +exports.Item = Item; +exports.ItemResponse = ItemResponse; +exports.Items = Items; +exports.Offer = Offer; +exports.OfferResponse = OfferResponse; +exports.Offers = Offers; +exports.PartitionKeyBuilder = PartitionKeyBuilder; +exports.PatchOperationType = PatchOperationType; +exports.Permission = Permission; +exports.PermissionResponse = PermissionResponse; +exports.Permissions = Permissions; +exports.QueryIterator = QueryIterator; +exports.QueryMetrics = QueryMetrics; +exports.QueryMetricsConstants = QueryMetricsConstants; +exports.QueryPreparationTimes = QueryPreparationTimes; +exports.ResourceResponse = ResourceResponse; +exports.RuntimeExecutionTimes = RuntimeExecutionTimes; +exports.SasTokenProperties = SasTokenProperties; +exports.Scripts = Scripts; +exports.StatusCodes = StatusCodes; +exports.StoredProcedure = StoredProcedure; +exports.StoredProcedureResponse = StoredProcedureResponse; +exports.StoredProcedures = StoredProcedures; +exports.TimeSpan = TimeSpan; +exports.TimeoutError = TimeoutError; +exports.Trigger = Trigger; +exports.TriggerResponse = TriggerResponse; +exports.Triggers = Triggers; +exports.User = User; +exports.UserDefinedFunction = UserDefinedFunction; +exports.UserDefinedFunctionResponse = UserDefinedFunctionResponse; +exports.UserDefinedFunctions = UserDefinedFunctions; +exports.UserResponse = UserResponse; +exports.Users = Users; +exports.createAuthorizationSasToken = createAuthorizationSasToken; +exports.setAuthorizationTokenHeaderUsingMasterKey = setAuthorizationTokenHeaderUsingMasterKey; +//# sourceMappingURL=index.js.map -/** - * Generate Hash for a `Multi Hash` type partition. - * @param partitionKey - to be hashed. - * @returns - */ -function hashMultiHashPartitionKey(partitionKey) { - return partitionKey.map((keys) => hashV2PartitionKey([keys])).join(""); -} -// Copyright (c) Microsoft Corporation. -function writeStringForBinaryEncoding(payload) { - let outputStream = Buffer.from(BytePrefix.String, "hex"); - const MAX_STRING_BYTES_TO_APPEND = 100; - const byteArray = [...Buffer.from(payload)]; - const isShortString = payload.length <= MAX_STRING_BYTES_TO_APPEND; - for (let index = 0; index < (isShortString ? byteArray.length : MAX_STRING_BYTES_TO_APPEND + 1); index++) { - let charByte = byteArray[index]; - if (charByte < 0xff) { - charByte++; - } - outputStream = Buffer.concat([outputStream, Buffer.from(charByte.toString(16), "hex")]); - } - if (isShortString) { - outputStream = Buffer.concat([outputStream, Buffer.from(BytePrefix.Undefined, "hex")]); - } - return outputStream; -} +/***/ }), -// Copyright (c) Microsoft Corporation. -const MAX_STRING_CHARS = 100; -function hashV1PartitionKey(partitionKey) { - const key = partitionKey[0]; - const toHash = prefixKeyByType(key); - const hash = MurmurHash.x86.hash32(toHash); - const encodedJSBI = writeNumberForBinaryEncodingJSBI(hash); - const encodedValue = encodeByType(key); - const finalHash = Buffer.concat([encodedJSBI, encodedValue]).toString("hex").toUpperCase(); - return finalHash; +/***/ 50378: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(59054)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(67261)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(59921)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(40447)); + +var _nil = _interopRequireDefault(__nccwpck_require__(18018)); + +var _version = _interopRequireDefault(__nccwpck_require__(54389)); + +var _validate = _interopRequireDefault(__nccwpck_require__(73054)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(59704)); + +var _parse = _interopRequireDefault(__nccwpck_require__(72350)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 70411: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); } -function prefixKeyByType(key) { - let bytes; - switch (typeof key) { - case "string": { - const truncated = key.substr(0, MAX_STRING_CHARS); - bytes = Buffer.concat([ - Buffer.from(BytePrefix.String, "hex"), - Buffer.from(truncated), - Buffer.from(BytePrefix.Undefined, "hex"), - ]); - return bytes; - } - case "number": { - const numberBytes = doubleToByteArrayJSBI(key); - bytes = Buffer.concat([Buffer.from(BytePrefix.Number, "hex"), numberBytes]); - return bytes; - } - case "boolean": { - const prefix = key ? BytePrefix.True : BytePrefix.False; - return Buffer.from(prefix, "hex"); - } - case "object": { - if (key === null) { - return Buffer.from(BytePrefix.Null, "hex"); - } - return Buffer.from(BytePrefix.Undefined, "hex"); - } - case "undefined": { - return Buffer.from(BytePrefix.Undefined, "hex"); - } - default: - throw new Error(`Unexpected type: ${typeof key}`); - } + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 18018: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 72350: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(73054)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; } -function encodeByType(key) { - switch (typeof key) { - case "string": { - const truncated = key.substr(0, MAX_STRING_CHARS); - return writeStringForBinaryEncoding(truncated); - } - case "number": { - const encodedJSBI = writeNumberForBinaryEncodingJSBI(key); - return encodedJSBI; - } - case "boolean": { - const prefix = key ? BytePrefix.True : BytePrefix.False; - return Buffer.from(prefix, "hex"); - } - case "object": - if (key === null) { - return Buffer.from(BytePrefix.Null, "hex"); - } - return Buffer.from(BytePrefix.Undefined, "hex"); - case "undefined": - return Buffer.from(BytePrefix.Undefined, "hex"); - default: - throw new Error(`Unexpected type: ${typeof key}`); - } + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 44976: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 97165: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); } -// Copyright (c) Microsoft Corporation. -/** - * Generate hash of a PartitonKey based on it PartitionKeyDefinition. - * @param partitionKey - to be hashed. - * @param partitionDefinition - container's partitionKey definition - * @returns - */ -function hashPartitionKey(partitionKey, partitionDefinition) { - const kind = (partitionDefinition === null || partitionDefinition === void 0 ? void 0 : partitionDefinition.kind) || exports.PartitionKeyKind.Hash; // Default value. - const isV2 = partitionDefinition && - partitionDefinition.version && - partitionDefinition.version === exports.PartitionKeyDefinitionVersion.V2; - switch (kind) { - case exports.PartitionKeyKind.Hash: - return isV2 ? hashV2PartitionKey(partitionKey) : hashV1PartitionKey(partitionKey); - case exports.PartitionKeyKind.MultiHash: - return hashMultiHashPartitionKey(partitionKey); - } +/***/ }), + +/***/ 19850: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 59704: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(73054)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + /** - * @internal - * FeedRange for which change feed is being requested. + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ -class ChangeFeedRange { - constructor(minInclusive, maxExclusive, continuationToken, epkMinHeader, epkMaxHeader) { - this.minInclusive = minInclusive; - this.maxExclusive = maxExclusive; - this.continuationToken = continuationToken; - this.epkMinHeader = epkMinHeader; - this.epkMaxHeader = epkMaxHeader; - } +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); } -/** - * A single response page from the Azure Cosmos DB Change Feed - */ -class ChangeFeedIteratorResponse { - /** - * @internal - */ - constructor( - /** - * Gets the items returned in the response from Azure Cosmos DB - */ - result, - /** - * Gets the number of items returned in the response from Azure Cosmos DB - */ - count, - /** - * Gets the status code of the response from Azure Cosmos DB - */ - statusCode, - /** - * Headers related to cosmos DB and change feed. - */ - headers, - /** - * Cosmos Diagnostic Object. - */ - diagnostics, - /** - * Gets the subStatusCodes of the response from Azure Cosmos DB. Useful in partition split or partition gone. - */ - subStatusCode) { - this.result = result; - this.count = count; - this.statusCode = statusCode; - this.diagnostics = diagnostics; - this.subStatusCode = subStatusCode; - this.headers = headers; - } - /** - * Gets the request charge for this request from the Azure Cosmos DB service. - */ - get requestCharge() { - const rus = this.headers[Constants$1.HttpHeaders.RequestCharge]; - return rus ? parseInt(rus, 10) : null; - } - /** - * Gets the activity ID for the request from the Azure Cosmos DB service. - */ - get activityId() { - return this.headers[Constants$1.HttpHeaders.ActivityId]; - } - /** - * Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service. - */ - get continuationToken() { - return this.headers[Constants$1.HttpHeaders.ContinuationToken]; - } - /** - * Gets the session token for use in session consistency reads from the Azure Cosmos DB service. - */ - get sessionToken() { - return this.headers[Constants$1.HttpHeaders.SessionToken]; - } +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; } -// Copyright (c) Microsoft Corporation. -/** - * @hidden - * A queue for iterating over specified Epk ranges and fetch change feed for the given epk ranges. - */ -class FeedRangeQueue { - constructor() { - this.elements = []; - } - modifyFirstElement(newItem) { - if (!this.isEmpty()) { - this.elements[0] = newItem; - } - } - enqueue(item) { - this.elements.push(item); - } - dequeue() { - return this.elements.shift(); - } - peek() { - return !this.isEmpty() ? this.elements[0] : undefined; - } - isEmpty() { - return this.elements.length === 0; - } - moveFirstElementToTheEnd() { - if (!this.isEmpty()) { - this.elements.push(this.dequeue()); - } +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 59054: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(97165)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(59704)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; } - /** - * Returns a snapshot of the queue as an array to be used as Continuation token. - */ - returnSnapshot() { - const allFeedRanges = []; - this.elements.map((element) => { - const minInclusive = element.epkMinHeader ? element.epkMinHeader : element.minInclusive; - const maxExclusive = element.epkMaxHeader ? element.epkMaxHeader : element.maxExclusive; - const feedRangeElement = new ChangeFeedRange(minInclusive, maxExclusive, element.continuationToken); - allFeedRanges.push(feedRangeElement); - }); - return allFeedRanges; + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); } -/** - * Continuation token for change feed of entire container, or a specific Epk Range. - * @internal - */ -class CompositeContinuationToken { - constructor(rid, Continuation) { - this.rid = rid; - this.Continuation = Continuation; - } +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 67261: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(99740)); + +var _md = _interopRequireDefault(__nccwpck_require__(70411)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 99740: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(59704)); + +var _parse = _interopRequireDefault(__nccwpck_require__(72350)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; } -/** - * @hidden - * Class which specifies the ChangeFeedIterator to start reading changes from beginning of time. - */ -class ChangeFeedStartFromBeginning { - constructor(cfResource) { - this.cfResource = cfResource; - } - getCfResource() { - return this.cfResource; +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); } -} -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * @hidden - * Class which specifies the ChangeFeedIterator to start reading changes from this moment in time. - */ -class ChangeFeedStartFromNow { - constructor(cfResource) { - this.cfResource = cfResource; + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); } - getCfResource() { - return this.cfResource; + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; } -/** - * @hidden - * Class which specifies the ChangeFeedIterator to start reading changes from a particular point of time. - */ -class ChangeFeedStartFromTime { - constructor(startTime, cfResource) { - this.startTime = startTime; - this.cfResource = cfResource; - } - getCfResource() { - return this.cfResource; - } - getStartTime() { - return this.startTime; +/***/ }), + +/***/ 59921: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(97165)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(59704)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; } + + return buf; + } + + return (0, _stringify.default)(rnds); } -// Copyright (c) Microsoft Corporation. -/** - * Specifies a feed range for the changefeed. - */ -class FeedRange { - /** - * @internal - */ - constructor(minInclusive, maxExclusive) { - // only way to explictly block users from creating FeedRange directly in JS - if (new.target === FeedRange) { - throw new ErrorResponse("Cannot instantiate abstract class FeedRange"); - } - this.minInclusive = minInclusive; - this.maxExclusive = maxExclusive; +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 40447: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(99740)); + +var _sha = _interopRequireDefault(__nccwpck_require__(19850)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 73054: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(44976)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 54389: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(73054)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 3084: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var msalCommon = __nccwpck_require__(1985); +var logger$o = __nccwpck_require__(3233); +var abortController = __nccwpck_require__(52557); +var coreUtil = __nccwpck_require__(51333); +var coreClient = __nccwpck_require__(29729); +var coreRestPipeline = __nccwpck_require__(88121); +var coreTracing = __nccwpck_require__(19363); +var fs = __nccwpck_require__(57147); +var os = __nccwpck_require__(22037); +var path = __nccwpck_require__(71017); +var promises = __nccwpck_require__(73292); +var https = __nccwpck_require__(95687); +var child_process = __nccwpck_require__(32081); +var crypto = __nccwpck_require__(6113); +var util = __nccwpck_require__(73837); +var open = __nccwpck_require__(85768); + +function _interopNamespaceDefault(e) { + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); } + n.default = e; + return Object.freeze(n); +} + +var msalCommon__namespace = /*#__PURE__*/_interopNamespaceDefault(msalCommon); +var child_process__namespace = /*#__PURE__*/_interopNamespaceDefault(child_process); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function isErrorResponse(errorResponse) { + return (errorResponse && + typeof errorResponse.error === "string" && + typeof errorResponse.error_description === "string"); } /** - * @hidden - * Specifies a feed range for the changefeed. + * The Error.name value of an CredentialUnavailable */ -class FeedRangeInternal extends FeedRange { - /* eslint-disable @typescript-eslint/no-useless-constructor */ - constructor(minInclusive, maxExclusive) { - super(minInclusive, maxExclusive); +const CredentialUnavailableErrorName = "CredentialUnavailableError"; +/** + * This signifies that the credential that was tried in a chained credential + * was not available to be used as the credential. Rather than treating this as + * an error that should halt the chain, it's caught and the chain continues + */ +class CredentialUnavailableError extends Error { + constructor(message) { + super(message); + this.name = CredentialUnavailableErrorName; } } - /** - * @hidden - * Validates the change feed options passed by the user + * The Error.name value of an AuthenticationError */ -function validateChangeFeedIteratorOptions(options) { - if (!isChangeFeedIteratorOptions(options)) { - throw new ErrorResponse("Invalid Changefeed Iterator Options."); - } - if ((options === null || options === void 0 ? void 0 : options.maxItemCount) && typeof (options === null || options === void 0 ? void 0 : options.maxItemCount) !== "number") { - throw new ErrorResponse("maxItemCount must be number"); - } - if ((options === null || options === void 0 ? void 0 : options.maxItemCount) !== undefined && (options === null || options === void 0 ? void 0 : options.maxItemCount) < 1) { - throw new ErrorResponse("maxItemCount must be a positive number"); +const AuthenticationErrorName = "AuthenticationError"; +/** + * Provides details about a failure to authenticate with Azure Active + * Directory. The `errorResponse` field contains more details about + * the specific failure. + */ +class AuthenticationError extends Error { + // eslint-disable-next-line @typescript-eslint/ban-types + constructor(statusCode, errorBody) { + let errorResponse = { + error: "unknown", + errorDescription: "An unknown error occurred and no additional details are available.", + }; + if (isErrorResponse(errorBody)) { + errorResponse = convertOAuthErrorResponseToErrorResponse(errorBody); + } + else if (typeof errorBody === "string") { + try { + // Most error responses will contain JSON-formatted error details + // in the response body + const oauthErrorResponse = JSON.parse(errorBody); + errorResponse = convertOAuthErrorResponseToErrorResponse(oauthErrorResponse); + } + catch (e) { + if (statusCode === 400) { + errorResponse = { + error: "authority_not_found", + errorDescription: "The specified authority URL was not found.", + }; + } + else { + errorResponse = { + error: "unknown_error", + errorDescription: `An unknown error has occurred. Response body:\n\n${errorBody}`, + }; + } + } + } + else { + errorResponse = { + error: "unknown_error", + errorDescription: "An unknown error occurred and no additional details are available.", + }; + } + super(`${errorResponse.error} Status code: ${statusCode}\nMore details:\n${errorResponse.errorDescription}`); + this.statusCode = statusCode; + this.errorResponse = errorResponse; + // Ensure that this type reports the correct name + this.name = AuthenticationErrorName; } } -function isChangeFeedIteratorOptions(options) { - if (typeof options !== "object") { - return false; - } - if (Object.keys(options).length === 0 && JSON.stringify(options) === "{}") { - return true; +/** + * The Error.name value of an AggregateAuthenticationError + */ +const AggregateAuthenticationErrorName = "AggregateAuthenticationError"; +/** + * Provides an `errors` array containing {@link AuthenticationError} instance + * for authentication failures from credentials in a {@link ChainedTokenCredential}. + */ +class AggregateAuthenticationError extends Error { + constructor(errors, errorMessage) { + const errorDetail = errors.join("\n"); + super(`${errorMessage}\n${errorDetail}`); + this.errors = errors; + // Ensure that this type reports the correct name + this.name = AggregateAuthenticationErrorName; } - return options && !(isPrimitivePartitionKeyValue(options) || Array.isArray(options)); +} +function convertOAuthErrorResponseToErrorResponse(errorBody) { + return { + error: errorBody.error, + errorDescription: errorBody.error_description, + correlationId: errorBody.correlation_id, + errorCodes: errorBody.error_codes, + timestamp: errorBody.timestamp, + traceId: errorBody.trace_id, + }; } /** - * @hidden - * Checks if pkRange entirely covers the given overLapping range or there is only partial overlap. - * - * If no complete overlap, exact range which overlaps is retured which is used to set minEpk and maxEpk headers while quering change feed. + * Error used to enforce authentication after trying to retrieve a token silently. */ -async function extractOverlappingRanges(epkRange, overLappingRange) { - if (overLappingRange.minInclusive >= epkRange.min && - overLappingRange.maxExclusive <= epkRange.max) { - return [undefined, undefined]; - } - else if (overLappingRange.minInclusive <= epkRange.min && - overLappingRange.maxExclusive >= epkRange.max) { - return [epkRange.min, epkRange.max]; - } - // Right Side of overlapping range is covered - else if (overLappingRange.minInclusive <= epkRange.min && - overLappingRange.maxExclusive <= epkRange.max && - overLappingRange.maxExclusive >= epkRange.min) { - return [epkRange.min, overLappingRange.maxExclusive]; - } - // Left Side of overlapping range is covered - else { - return [overLappingRange.minInclusive, epkRange.max]; +class AuthenticationRequiredError extends Error { + constructor( + /** + * Optional parameters. A message can be specified. The {@link GetTokenOptions} of the request can also be specified to more easily associate the error with the received parameters. + */ + options) { + super(options.message); + this.scopes = options.scopes; + this.getTokenOptions = options.getTokenOptions; + this.name = "AuthenticationRequiredError"; } } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * @hidden - * Checks if the object is a valid EpkRange + * The AzureLogger used for all clients within the identity package */ -function isEpkRange(obj) { - return (obj instanceof FeedRangeInternal && - typeof obj.minInclusive === "string" && - typeof obj.maxExclusive === "string" && - obj.minInclusive >= - Constants$1.EffectivePartitionKeyConstants.MinimumInclusiveEffectivePartitionKey && - obj.maxExclusive <= - Constants$1.EffectivePartitionKeyConstants.MaximumExclusiveEffectivePartitionKey && - obj.maxExclusive > obj.minInclusive); +const logger$n = logger$o.createClientLogger("identity"); +/** + * Separates a list of environment variable names into a plain object with two arrays: an array of missing environment variables and another array with assigned environment variables. + * @param supportedEnvVars - List of environment variable names + */ +function processEnvVars(supportedEnvVars) { + return supportedEnvVars.reduce((acc, envVariable) => { + if (process.env[envVariable]) { + acc.assigned.push(envVariable); + } + else { + acc.missing.push(envVariable); + } + return acc; + }, { missing: [], assigned: [] }); } /** - * @hidden + * Formatting the success event on the credentials */ -function buildInternalChangeFeedOptions(options, continuationToken, startTime) { - const internalCfOptions = {}; - internalCfOptions.maxItemCount = options === null || options === void 0 ? void 0 : options.maxItemCount; - internalCfOptions.sessionToken = options === null || options === void 0 ? void 0 : options.sessionToken; - internalCfOptions.continuationToken = continuationToken; - // Default option of changefeed is to start from now. - internalCfOptions.startTime = startTime; - return internalCfOptions; +function formatSuccess(scope) { + return `SUCCESS. Scopes: ${Array.isArray(scope) ? scope.join(", ") : scope}.`; } /** - * @hidden + * Formatting the success event on the credentials */ -function fetchStartTime(changeFeedStartFrom) { - if (changeFeedStartFrom instanceof ChangeFeedStartFromBeginning) { - return undefined; +function formatError(scope, error) { + let message = "ERROR."; + if (scope === null || scope === void 0 ? void 0 : scope.length) { + message += ` Scopes: ${Array.isArray(scope) ? scope.join(", ") : scope}.`; } - else if (changeFeedStartFrom instanceof ChangeFeedStartFromNow) { - return new Date(); + return `${message} Error message: ${typeof error === "string" ? error : error.message}.`; +} +/** + * Generates a CredentialLoggerInstance. + * + * It logs with the format: + * + * `[title] => [message]` + * + */ +function credentialLoggerInstance(title, parent, log = logger$n) { + const fullTitle = parent ? `${parent.fullTitle} ${title}` : title; + function info(message) { + log.info(`${fullTitle} =>`, message); } - else if (changeFeedStartFrom instanceof ChangeFeedStartFromTime) { - return changeFeedStartFrom.getStartTime(); + function warning(message) { + log.warning(`${fullTitle} =>`, message); + } + function verbose(message) { + log.verbose(`${fullTitle} =>`, message); } + return { + title, + fullTitle, + info, + warning, + verbose, + }; } /** - * @hidden + * Generates a CredentialLogger, which is a logger declared at the credential's constructor, and used at any point in the credential. + * It has all the properties of a CredentialLoggerInstance, plus other logger instances, one per method. + * + * It logs with the format: + * + * `[title] => [message]` + * `[title] => getToken() => [message]` + * */ -function isNullOrEmpty(text) { - return text === null || text === undefined || text.trim() === ""; +function credentialLogger(title, log = logger$n) { + const credLogger = credentialLoggerInstance(title, undefined, log); + return Object.assign(Object.assign({}, credLogger), { parent: log, getToken: credentialLoggerInstance("=> getToken()", credLogger, log) }); } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * @hidden - * Provides iterator for change feed for entire container or an epk range. - * - * Use `Items.getChangeFeedIterator()` to get an instance of the iterator. + * Current version of the `@azure/identity` package. */ -class ChangeFeedForEpkRange { - /** - * @internal - */ - constructor(clientContext, container, partitionKeyRangeCache, resourceId, resourceLink, url, changeFeedOptions, epkRange) { - this.clientContext = clientContext; - this.container = container; - this.partitionKeyRangeCache = partitionKeyRangeCache; - this.resourceId = resourceId; - this.resourceLink = resourceLink; - this.url = url; - this.changeFeedOptions = changeFeedOptions; - this.epkRange = epkRange; - this.generateContinuationToken = () => { - return JSON.stringify(new CompositeContinuationToken(this.rId, this.queue.returnSnapshot())); - }; - this.queue = new FeedRangeQueue(); - this.continuationToken = changeFeedOptions.continuationToken - ? JSON.parse(changeFeedOptions.continuationToken) - : undefined; - this.startTime = changeFeedOptions.startTime - ? changeFeedOptions.startTime.toUTCString() - : undefined; - this.isInstantiated = false; - } - async setIteratorRid(diagnosticNode) { - const { resource } = await this.container.readInternal(diagnosticNode); - this.rId = resource._rid; - } - continuationTokenRidMatchContainerRid() { - if (this.continuationToken.rid !== this.rId) { - return false; - } - return true; - } - async fillChangeFeedQueue(diagnosticNode) { - if (this.continuationToken) { - // fill the queue with feed ranges in continuation token. - await this.fetchContinuationTokenFeedRanges(diagnosticNode); - } - else { - // fill the queue with feed ranges overlapping the given epk range. - await this.fetchOverLappingFeedRanges(diagnosticNode); - } - this.isInstantiated = true; - } +const SDK_VERSION = `4.0.0`; +/** + * The default client ID for authentication + * @internal + */ +// TODO: temporary - this is the Azure CLI clientID - we'll replace it when +// Developer Sign On application is available +// https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/identity/Azure.Identity/src/Constants.cs#L9 +const DeveloperSignOnClientId = "04b07795-8ddb-461a-bbee-02f9e1bf7b46"; +/** + * The default tenant for authentication + * @internal + */ +const DefaultTenantId = "common"; +/** + * A list of known Azure authority hosts + */ +exports.AzureAuthorityHosts = void 0; +(function (AzureAuthorityHosts) { /** - * Fill the queue with the feed ranges overlapping with the given epk range. + * China-based Azure Authority Host */ - async fetchOverLappingFeedRanges(diagnosticNode) { - try { - const overLappingRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, this.epkRange, diagnosticNode); - for (const overLappingRange of overLappingRanges) { - const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(this.epkRange, overLappingRange); - const feedRange = new ChangeFeedRange(overLappingRange.minInclusive, overLappingRange.maxExclusive, "", epkMinHeader, epkMaxHeader); - this.queue.enqueue(feedRange); - } - } - catch (err) { - throw new ErrorResponse(err.message); - } - } + AzureAuthorityHosts["AzureChina"] = "https://login.chinacloudapi.cn"; /** - * Fill the queue with feed ranges from continuation token + * Germany-based Azure Authority Host */ - async fetchContinuationTokenFeedRanges(diagnosticNode) { - const contToken = this.continuationToken; - if (!this.continuationTokenRidMatchContainerRid()) { - throw new ErrorResponse("The continuation token is not for the current container definition"); - } - else { - for (const cToken of contToken.Continuation) { - const queryRange = new QueryRange(cToken.minInclusive, cToken.maxExclusive, true, false); - try { - const overLappingRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, queryRange, diagnosticNode); - for (const overLappingRange of overLappingRanges) { - // check if the epk range present in continuation token entirely covers the overlapping range. - // If yes, minInclusive and maxExclusive of the overlapping range will be set. - // If no, i.e. there is only partial overlap, epkMinHeader and epkMaxHeader are set as min and max of overlap. - // This will be used when we make a call to fetch change feed. - const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(queryRange, overLappingRange); - const feedRange = new ChangeFeedRange(overLappingRange.minInclusive, overLappingRange.maxExclusive, cToken.continuationToken, epkMinHeader, epkMaxHeader); - this.queue.enqueue(feedRange); - } - } - catch (err) { - throw new ErrorResponse(err.message); - } - } - } - } + AzureAuthorityHosts["AzureGermany"] = "https://login.microsoftonline.de"; /** - * Change feed is an infinite feed. hasMoreResults is always true. + * US Government Azure Authority Host */ - get hasMoreResults() { - return true; - } + AzureAuthorityHosts["AzureGovernment"] = "https://login.microsoftonline.us"; /** - * Gets an async iterator which will yield change feed results. + * Public Cloud Azure Authority Host */ - getAsyncIterator() { - return tslib.__asyncGenerator(this, arguments, function* getAsyncIterator_1() { - do { - const result = yield tslib.__await(this.readNext()); - yield yield tslib.__await(result); - } while (this.hasMoreResults); + AzureAuthorityHosts["AzurePublicCloud"] = "https://login.microsoftonline.com"; +})(exports.AzureAuthorityHosts || (exports.AzureAuthorityHosts = {})); +/** + * The default authority host. + */ +const DefaultAuthorityHost = exports.AzureAuthorityHosts.AzurePublicCloud; +/** + * Allow acquiring tokens for any tenant for multi-tentant auth. + */ +const ALL_TENANTS = ["*"]; +const CACHE_CAE_SUFFIX = ".cae"; +const CACHE_NON_CAE_SUFFIX = ".nocae"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Latest AuthenticationRecord version + * @internal + */ +const LatestAuthenticationRecordVersion = "1.0"; +/** + * Ensures the validity of the MSAL token + * @internal + */ +function ensureValidMsalToken(scopes, logger, msalToken, getTokenOptions) { + const error = (message) => { + logger.getToken.info(message); + return new AuthenticationRequiredError({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + getTokenOptions, + message, }); + }; + if (!msalToken) { + throw error("No response"); } - /** - * Gets an async iterator which will yield pages of results from Azure Cosmos DB. - * - * Keeps iterating over the feedranges and checks if any feed range has new result. Keeps note of the last feed range which returned non 304 result. - * - * When same feed range is reached and no new changes are found, a 304 (not Modified) is returned to the end user. Then starts process all over again. - */ - async readNext() { - return withDiagnostics(async (diagnosticNode) => { - // validate if the internal queue is filled up with feed ranges. - if (!this.isInstantiated) { - await this.setIteratorRid(diagnosticNode); - await this.fillChangeFeedQueue(diagnosticNode); - } - // stores the last feedRange for which statusCode is not 304 i.e. there were new changes in that feed range. - let firstNotModifiedFeedRange = undefined; - let result; - do { - const [processedFeedRange, response] = await this.fetchNext(diagnosticNode); - result = response; - if (result !== undefined) { - { - if (firstNotModifiedFeedRange === undefined) { - firstNotModifiedFeedRange = processedFeedRange; - } - // move current feed range to end of queue to fetch result of next feed range. - // This is done to fetch changes in breadth first manner and avoid starvation. - this.queue.moveFirstElementToTheEnd(); - // check if there are new results for the given feed range. - if (result.statusCode === StatusCodes.Ok) { - result.headers[Constants$1.HttpHeaders.ContinuationToken] = - this.generateContinuationToken(); - return result; - } - } - } - } while (!this.checkedAllFeedRanges(firstNotModifiedFeedRange)); - // set the continuation token after processing. - result.headers[Constants$1.HttpHeaders.ContinuationToken] = this.generateContinuationToken(); - return result; - }, this.clientContext); - } - /** - * Read feed and retrieves the next page of results in Azure Cosmos DB. - */ - async fetchNext(diagnosticNode) { - const feedRange = this.queue.peek(); - if (feedRange) { - // fetch results for feed range at the beginning of the queue. - const result = await this.getFeedResponse(feedRange, diagnosticNode); - // check if results need to be fetched again depending on status code returned. - // Eg. in case of paritionSplit, results need to be fetched for the child partitions. - const shouldRetry = await this.shouldRetryOnFailure(feedRange, result, diagnosticNode); - if (shouldRetry) { - this.queue.dequeue(); - return this.fetchNext(diagnosticNode); - } - else { - // update the continuation value for the current feed range. - const continuationValueForFeedRange = result.headers[Constants$1.HttpHeaders.ETag]; - const newFeedRange = this.queue.peek(); - newFeedRange.continuationToken = continuationValueForFeedRange; - return [[newFeedRange.minInclusive, newFeedRange.maxExclusive], result]; - } - } - else { - return [[undefined, undefined], undefined]; - } + if (!msalToken.expiresOn) { + throw error(`Response had no "expiresOn" property.`); } - checkedAllFeedRanges(firstNotModifiedFeedRange) { - if (firstNotModifiedFeedRange === undefined) { - return false; - } - const feedRangeQueueFirstElement = this.queue.peek(); - return (firstNotModifiedFeedRange[0] === (feedRangeQueueFirstElement === null || feedRangeQueueFirstElement === void 0 ? void 0 : feedRangeQueueFirstElement.minInclusive) && - firstNotModifiedFeedRange[1] === (feedRangeQueueFirstElement === null || feedRangeQueueFirstElement === void 0 ? void 0 : feedRangeQueueFirstElement.maxExclusive)); + if (!msalToken.accessToken) { + throw error(`Response had no "accessToken" property.`); } - /** - * Checks whether the current EpkRange is split into multiple ranges or not. - * - * If yes, it force refreshes the partitionKeyRange cache and enqueue children epk ranges. - */ - async shouldRetryOnFailure(feedRange, response, diagnosticNode) { - if (response.statusCode === StatusCodes.Ok || response.statusCode === StatusCodes.NotModified) { - return false; - } - const partitionSplit = response.statusCode === StatusCodes.Gone && - (response.subStatusCode === SubStatusCodes.PartitionKeyRangeGone || - response.subStatusCode === SubStatusCodes.CompletingSplit); - if (partitionSplit) { - const queryRange = new QueryRange(feedRange.minInclusive, feedRange.maxExclusive, true, false); - const resolvedRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, queryRange, diagnosticNode, true); - if (resolvedRanges.length < 1) { - throw new ErrorResponse("Partition split/merge detected but no overlapping ranges found."); - } - // This covers both cases of merge and split. - // resolvedRanges.length > 1 in case of split. - // resolvedRanges.length === 1 in case of merge. EpkRange headers will be added in this case. - if (resolvedRanges.length >= 1) { - await this.handleSplit(false, resolvedRanges, queryRange, feedRange.continuationToken); - } - return true; - } - return false; +} +/** + * Generates a valid authority by combining a host with a tenantId. + * @internal + */ +function getAuthority(tenantId, host) { + if (!host) { + host = DefaultAuthorityHost; } - /* - * Enqueues all the children feed ranges for the given feed range. - */ - async handleSplit(shiftLeft, resolvedRanges, oldFeedRange, continuationToken) { - let flag = 0; - if (shiftLeft) { - // This section is only applicable when handleSplit is called by getPartitionRangeId(). - // used only when existing partition key range cache is used to check for any overlapping ranges. - // Modifies the first element with the first overlapping range. - const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(oldFeedRange, resolvedRanges[0]); - const newFeedRange = new ChangeFeedRange(resolvedRanges[0].minInclusive, resolvedRanges[0].maxExclusive, continuationToken, epkMinHeader, epkMaxHeader); - this.queue.modifyFirstElement(newFeedRange); - flag = 1; - } - // Enqueue the overlapping ranges. - for (let i = flag; i < resolvedRanges.length; i++) { - const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(oldFeedRange, resolvedRanges[i]); - const newFeedRange = new ChangeFeedRange(resolvedRanges[i].minInclusive, resolvedRanges[i].maxExclusive, continuationToken, epkMinHeader, epkMaxHeader); - this.queue.enqueue(newFeedRange); - } + if (new RegExp(`${tenantId}/?$`).test(host)) { + return host; } - /** - * Fetch the partitionKeyRangeId for the given feed range. - * - * This partitionKeyRangeId is passed to queryFeed to fetch the results. - */ - async getPartitionRangeId(feedRange, diagnosticNode) { - const min = feedRange.epkMinHeader ? feedRange.epkMinHeader : feedRange.minInclusive; - const max = feedRange.epkMaxHeader ? feedRange.epkMaxHeader : feedRange.maxExclusive; - const queryRange = new QueryRange(min, max, true, false); - const resolvedRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, queryRange, diagnosticNode, false); - if (resolvedRanges.length < 1) { - throw new ErrorResponse("No overlapping ranges found."); - } - const firstResolvedRange = resolvedRanges[0]; - if (resolvedRanges.length > 1) { - await this.handleSplit(true, resolvedRanges, queryRange, feedRange.continuationToken); - } - return firstResolvedRange.id; + if (host.endsWith("/")) { + return host + tenantId; } - async getFeedResponse(feedRange, diagnosticNode) { - const feedOptions = { initialHeaders: {}, useIncrementalFeed: true }; - if (typeof this.changeFeedOptions.maxItemCount === "number") { - feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount; - } - if (this.changeFeedOptions.sessionToken) { - feedOptions.sessionToken = this.changeFeedOptions.sessionToken; - } - if (feedRange.continuationToken) { - feedOptions.accessCondition = { - type: Constants$1.HttpHeaders.IfNoneMatch, - condition: feedRange.continuationToken, - }; - } - if (this.startTime) { - feedOptions.initialHeaders[Constants$1.HttpHeaders.IfModifiedSince] = this.startTime; - } - const rangeId = await this.getPartitionRangeId(feedRange, diagnosticNode); - try { - // startEpk and endEpk are only valid in case we want to fetch result for a part of partition and not the entire partition. - const response = await this.clientContext.queryFeed({ - path: this.resourceLink, - resourceType: exports.ResourceType.item, - resourceId: this.resourceId, - resultFn: (result) => (result ? result.Documents : []), - query: undefined, - options: feedOptions, - diagnosticNode, - partitionKey: undefined, - partitionKeyRangeId: rangeId, - startEpk: feedRange.epkMinHeader, - endEpk: feedRange.epkMaxHeader, - }); - return new ChangeFeedIteratorResponse(response.result, response.result ? response.result.length : 0, response.code, response.headers, getEmptyCosmosDiagnostics()); - } - catch (err) { - // If any errors are encountered, eg. partition split or gone, handle it based on error code and not break the flow. - return new ChangeFeedIteratorResponse([], 0, err.code, err.headers, getEmptyCosmosDiagnostics(), err.substatus); - } + else { + return `${host}/${tenantId}`; } } - /** - * Continuation token for change feed of entire container, or a specific Epk Range. + * Generates the known authorities. + * If the Tenant Id is `adfs`, the authority can't be validated since the format won't match the expected one. + * For that reason, we have to force MSAL to disable validating the authority + * by sending it within the known authorities in the MSAL configuration. * @internal */ -class ContinuationTokenForPartitionKey { - constructor(rid, partitionKey, continuation) { - this.rid = rid; - this.partitionKey = partitionKey; - this.Continuation = continuation; +function getKnownAuthorities(tenantId, authorityHost, disableInstanceDiscovery) { + if ((tenantId === "adfs" && authorityHost) || disableInstanceDiscovery) { + return [authorityHost]; } + return []; } - /** - * @hidden - * Provides iterator for change feed for one partition key. - * - * Use `Items.getChangeFeedIterator()` to get an instance of the iterator. + * Generates a logger that can be passed to the MSAL clients. + * @param logger - The logger of the credential. + * @internal */ -class ChangeFeedForPartitionKey { - /** - * @internal - */ - constructor(clientContext, container, resourceId, resourceLink, partitionKey, changeFeedOptions) { - this.clientContext = clientContext; - this.container = container; - this.resourceId = resourceId; - this.resourceLink = resourceLink; - this.partitionKey = partitionKey; - this.changeFeedOptions = changeFeedOptions; - this.continuationToken = changeFeedOptions.continuationToken - ? JSON.parse(changeFeedOptions.continuationToken) - : undefined; - this.isInstantiated = false; - if (changeFeedOptions.startTime) { - this.startTime = changeFeedOptions.startTime.toUTCString(); - } +const defaultLoggerCallback = (logger, platform = coreUtil.isNode ? "Node" : "Browser") => (level, message, containsPii) => { + if (containsPii) { + return; } - async instantiateIterator(diagnosticNode) { - await this.setIteratorRid(diagnosticNode); - if (this.continuationToken) { - if (!this.continuationTokenRidMatchContainerRid()) { - throw new ErrorResponse("The continuation is not for the current container definition."); - } - } - else { - this.continuationToken = new ContinuationTokenForPartitionKey(this.rId, this.partitionKey, ""); - } - this.isInstantiated = true; + switch (level) { + case msalCommon__namespace.LogLevel.Error: + logger.info(`MSAL ${platform} V2 error: ${message}`); + return; + case msalCommon__namespace.LogLevel.Info: + logger.info(`MSAL ${platform} V2 info message: ${message}`); + return; + case msalCommon__namespace.LogLevel.Verbose: + logger.info(`MSAL ${platform} V2 verbose message: ${message}`); + return; + case msalCommon__namespace.LogLevel.Warning: + logger.info(`MSAL ${platform} V2 warning: ${message}`); + return; } - continuationTokenRidMatchContainerRid() { - if (this.continuationToken.rid !== this.rId) { - return false; - } - return true; +}; +/** + * @internal + */ +function getMSALLogLevel(logLevel) { + switch (logLevel) { + case "error": + return msalCommon__namespace.LogLevel.Error; + case "info": + return msalCommon__namespace.LogLevel.Info; + case "verbose": + return msalCommon__namespace.LogLevel.Verbose; + case "warning": + return msalCommon__namespace.LogLevel.Warning; + default: + // default msal logging level should be Info + return msalCommon__namespace.LogLevel.Info; } - async setIteratorRid(diagnosticNode) { - const { resource } = await this.container.readInternal(diagnosticNode); - this.rId = resource._rid; +} +/** + * The common utility functions for the MSAL clients. + * Defined as a class so that the classes extending this one can have access to its methods and protected properties. + * + * It keeps track of a logger and an in-memory copy of the AuthenticationRecord. + * + * @internal + */ +class MsalBaseUtilities { + constructor(options) { + this.logger = options.logger; + this.account = options.authenticationRecord; } /** - * Change feed is an infinite feed. hasMoreResults is always true. + * Generates a UUID */ - get hasMoreResults() { - return true; + generateUuid() { + return coreUtil.randomUUID(); } /** - * Gets an async iterator which will yield change feed results. + * Handles the MSAL authentication result. + * If the result has an account, we update the local account reference. + * If the token received is invalid, an error will be thrown depending on what's missing. */ - getAsyncIterator() { - return tslib.__asyncGenerator(this, arguments, function* getAsyncIterator_1() { - do { - const result = yield tslib.__await(this.readNext()); - yield yield tslib.__await(result); - } while (this.hasMoreResults); - }); + handleResult(scopes, clientId, result, getTokenOptions) { + if (result === null || result === void 0 ? void 0 : result.account) { + this.account = msalToPublic(clientId, result.account); + } + ensureValidMsalToken(scopes, this.logger, result, getTokenOptions); + this.logger.getToken.info(formatSuccess(scopes)); + return { + token: result.accessToken, + expiresOnTimestamp: result.expiresOn.getTime(), + }; } /** - * Returns the result of change feed from Azure Cosmos DB. + * Handles MSAL errors. */ - async readNext() { - return withDiagnostics(async (diagnosticNode) => { - if (!this.isInstantiated) { - await this.instantiateIterator(diagnosticNode); + handleError(scopes, error, getTokenOptions) { + if (error.name === "AuthError" || + error.name === "ClientAuthError" || + error.name === "BrowserAuthError") { + const msalError = error; + switch (msalError.errorCode) { + case "endpoints_resolution_error": + this.logger.info(formatError(scopes, error.message)); + return new CredentialUnavailableError(error.message); + case "device_code_polling_cancelled": + return new abortController.AbortError("The authentication has been aborted by the caller."); + case "consent_required": + case "interaction_required": + case "login_required": + this.logger.info(formatError(scopes, `Authentication returned errorCode ${msalError.errorCode}`)); + break; + default: + this.logger.info(formatError(scopes, `Failed to acquire token: ${error.message}`)); + break; } - const result = await this.fetchNext(diagnosticNode); - return result; - }, this.clientContext); - } - /** - * Read feed and retrieves the next set of results in Azure Cosmos DB. - */ - async fetchNext(diagnosticNode) { - const response = await this.getFeedResponse(diagnosticNode); - this.continuationToken.Continuation = response.headers[Constants$1.HttpHeaders.ETag]; - response.headers[Constants$1.HttpHeaders.ContinuationToken] = JSON.stringify(this.continuationToken); - return response; - } - async getFeedResponse(diagnosticNode) { - const feedOptions = { initialHeaders: {}, useIncrementalFeed: true }; - if (typeof this.changeFeedOptions.maxItemCount === "number") { - feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount; - } - if (this.changeFeedOptions.sessionToken) { - feedOptions.sessionToken = this.changeFeedOptions.sessionToken; } - const continuation = this.continuationToken.Continuation; - if (continuation) { - feedOptions.accessCondition = { - type: Constants$1.HttpHeaders.IfNoneMatch, - condition: continuation, - }; + if (error.name === "ClientConfigurationError" || + error.name === "BrowserConfigurationAuthError" || + error.name === "AbortError") { + return error; } - if (this.startTime) { - feedOptions.initialHeaders[Constants$1.HttpHeaders.IfModifiedSince] = this.startTime; + if (error.name === "NativeAuthError") { + this.logger.info(formatError(scopes, `Error from the native broker: ${error.message} with status code: ${error.statusCode}`)); + return error; } - const response = await this.clientContext.queryFeed({ - path: this.resourceLink, - resourceType: exports.ResourceType.item, - resourceId: this.resourceId, - resultFn: (result) => (result ? result.Documents : []), - diagnosticNode, - query: undefined, - options: feedOptions, - partitionKey: this.partitionKey, - }); - return new ChangeFeedIteratorResponse(response.result, response.result ? response.result.length : 0, response.code, response.headers, getEmptyCosmosDiagnostics()); + return new AuthenticationRequiredError({ scopes, getTokenOptions, message: error.message }); } } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +// transformations.ts +function publicToMsal(account) { + const [environment] = account.authority.match(/([a-z]*\.[a-z]*\.[a-z]*)/) || [""]; + return Object.assign(Object.assign({}, account), { localAccountId: account.homeAccountId, environment }); +} +function msalToPublic(clientId, account) { + const record = { + authority: getAuthority(account.tenantId, account.environment), + homeAccountId: account.homeAccountId, + tenantId: account.tenantId || DefaultTenantId, + username: account.username, + clientId, + version: LatestAuthenticationRecordVersion, + }; + return record; +} /** - * Enum to specify the resource for which change feed is being fetched. + * Serializes an `AuthenticationRecord` into a string. + * + * The output of a serialized authentication record will contain the following properties: + * + * - "authority" + * - "homeAccountId" + * - "clientId" + * - "tenantId" + * - "username" + * - "version" + * + * To later convert this string to a serialized `AuthenticationRecord`, please use the exported function `deserializeAuthenticationRecord()`. */ -var ChangeFeedResourceType; -(function (ChangeFeedResourceType) { - ChangeFeedResourceType[ChangeFeedResourceType["FeedRange"] = 0] = "FeedRange"; - ChangeFeedResourceType[ChangeFeedResourceType["PartitionKey"] = 1] = "PartitionKey"; -})(ChangeFeedResourceType || (ChangeFeedResourceType = {})); +function serializeAuthenticationRecord(record) { + return JSON.stringify(record); +} +/** + * Deserializes a previously serialized authentication record from a string into an object. + * + * The input string must contain the following properties: + * + * - "authority" + * - "homeAccountId" + * - "clientId" + * - "tenantId" + * - "username" + * - "version" + * + * If the version we receive is unsupported, an error will be thrown. + * + * At the moment, the only available version is: "1.0", which is always set when the authentication record is serialized. + * + * @param serializedRecord - Authentication record previously serialized into string. + * @returns AuthenticationRecord. + */ +function deserializeAuthenticationRecord(serializedRecord) { + const parsed = JSON.parse(serializedRecord); + if (parsed.version && parsed.version !== LatestAuthenticationRecordVersion) { + throw Error("Unsupported AuthenticationRecord version"); + } + return parsed; +} // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function createConfigurationErrorMessage(tenantId) { + return `The current credential is not configured to acquire tokens for tenant ${tenantId}. To enable acquiring tokens for this tenant add it to the AdditionallyAllowedTenants on the credential options, or add "*" to AdditionallyAllowedTenants to allow acquiring tokens for any tenant.`; +} /** - * @hidden - * Class which specifies the ChangeFeedIterator to start reading changes from a saved point. + * Of getToken contains a tenantId, this functions allows picking this tenantId as the appropriate for authentication, + * unless multitenant authentication has been disabled through the AZURE_IDENTITY_DISABLE_MULTITENANTAUTH (on Node.js), + * or unless the original tenant Id is `adfs`. + * @internal */ -class ChangeFeedStartFromContinuation { - constructor(continuation) { - this.continuationToken = continuation; +function processMultiTenantRequest(tenantId, getTokenOptions, additionallyAllowedTenantIds = [], logger) { + var _a; + let resolvedTenantId; + if (process.env.AZURE_IDENTITY_DISABLE_MULTITENANTAUTH) { + resolvedTenantId = tenantId; } - getCfResource() { - return this.continuationToken; + else if (tenantId === "adfs") { + resolvedTenantId = tenantId; } - getCfResourceJson() { - return JSON.parse(this.continuationToken); + else { + resolvedTenantId = (_a = getTokenOptions === null || getTokenOptions === void 0 ? void 0 : getTokenOptions.tenantId) !== null && _a !== void 0 ? _a : tenantId; } - getResourceType() { - const cToken = this.getCfResourceJson(); - if (Object.prototype.hasOwnProperty.call(cToken, "partitionKey") && - Object.prototype.hasOwnProperty.call(cToken, "Continuation") && - typeof cToken.Continuation === "string") { - return ChangeFeedResourceType.PartitionKey; - } - else if (Object.prototype.hasOwnProperty.call(cToken, "Continuation") && - Array.isArray(cToken.Continuation) && - cToken.Continuation.length > 0) { - return ChangeFeedResourceType.FeedRange; - } - else { - throw new ErrorResponse("Invalid continuation token."); - } + if (tenantId && + resolvedTenantId !== tenantId && + !additionallyAllowedTenantIds.includes("*") && + !additionallyAllowedTenantIds.some((t) => t.localeCompare(resolvedTenantId) === 0)) { + const message = createConfigurationErrorMessage(tenantId); + logger === null || logger === void 0 ? void 0 : logger.info(message); + throw new CredentialUnavailableError(message); } + return resolvedTenantId; } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Base class for where to start a ChangeFeedIterator. + * @internal */ -/* eslint-disable @typescript-eslint/no-extraneous-class */ -class ChangeFeedStartFrom { - /** - * Returns an object that tells the ChangeFeedIterator to start from the beginning of time. - * @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container. - */ - static Beginning(cfResource) { - return new ChangeFeedStartFromBeginning(cfResource); +function checkTenantId(logger, tenantId) { + if (!tenantId.match(/^[0-9a-zA-Z-.]+$/)) { + const error = new Error("Invalid tenant id provided. You can locate your tenant id by following the instructions listed here: https://learn.microsoft.com/partner-center/find-ids-and-domain-names."); + logger.info(formatError("", error)); + throw error; } - /** - * Returns an object that tells the ChangeFeedIterator to start reading changes from this moment onward. - * @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container. - **/ - static Now(cfResource) { - return new ChangeFeedStartFromNow(cfResource); +} +/** + * @internal + */ +function resolveTenantId(logger, tenantId, clientId) { + if (tenantId) { + checkTenantId(logger, tenantId); + return tenantId; } - /** - * Returns an object that tells the ChangeFeedIterator to start reading changes from some point in time onward. - * @param startTime - Date object specfiying the time to start reading changes from. - * @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container. - */ - static Time(startTime, cfResource) { - if (!startTime) { - throw new ErrorResponse("startTime must be present"); - } - if (startTime instanceof Date === true) { - return new ChangeFeedStartFromTime(startTime, cfResource); - } - else { - throw new ErrorResponse("startTime must be a Date object."); - } + if (!clientId) { + clientId = DeveloperSignOnClientId; } - /** - * Returns an object that tells the ChangeFeedIterator to start reading changes from a save point. - * @param continuation - The continuation to resume from. - */ - static Continuation(continuationToken) { - if (!continuationToken) { - throw new ErrorResponse("Argument continuation must be passed."); - } - if (isNullOrEmpty(continuationToken)) { - throw new ErrorResponse("Argument continuationToken must be a non-empty string."); - } - return new ChangeFeedStartFromContinuation(continuationToken); + if (clientId !== DeveloperSignOnClientId) { + return "common"; + } + return "organizations"; +} +/** + * @internal + */ +function resolveAdditionallyAllowedTenantIds(additionallyAllowedTenants) { + if (!additionallyAllowedTenants || additionallyAllowedTenants.length === 0) { + return []; + } + if (additionallyAllowedTenants.includes("*")) { + return ALL_TENANTS; } + return additionallyAllowedTenants; } -function changeFeedIteratorBuilder(cfOptions, clientContext, container, partitionKeyRangeCache) { - const url = container.url; - const path = getPathFromLink(url, exports.ResourceType.item); - const id = getIdFromLink(url); - let changeFeedStartFrom = cfOptions.changeFeedStartFrom; - if (changeFeedStartFrom === undefined) { - changeFeedStartFrom = ChangeFeedStartFrom.Now(); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function getIdentityTokenEndpointSuffix(tenantId) { + if (tenantId === "adfs") { + return "oauth2/token"; } - if (changeFeedStartFrom instanceof ChangeFeedStartFromContinuation) { - const continuationToken = changeFeedStartFrom.getCfResourceJson(); - const resourceType = changeFeedStartFrom.getResourceType(); - const internalCfOptions = buildInternalChangeFeedOptions(cfOptions, changeFeedStartFrom.getCfResource()); - if (resourceType === ChangeFeedResourceType.PartitionKey && - isPartitionKey(continuationToken.partitionKey)) { - return new ChangeFeedForPartitionKey(clientContext, container, id, path, continuationToken.partitionKey, internalCfOptions); - } - else if (resourceType === ChangeFeedResourceType.FeedRange) { - return new ChangeFeedForEpkRange(clientContext, container, partitionKeyRangeCache, id, path, url, internalCfOptions, undefined); - } - else { - throw new ErrorResponse("Invalid continuation token."); + else { + return "oauth2/v2.0/token"; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Creates a span using the global tracer. + * @internal + */ +const tracingClient = coreTracing.createTracingClient({ + namespace: "Microsoft.AAD", + packageName: "@azure/identity", + packageVersion: SDK_VERSION, +}); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const DefaultScopeSuffix = "/.default"; +const imdsHost = "http://169.254.169.254"; +const imdsEndpointPath = "/metadata/identity/oauth2/token"; +const imdsApiVersion = "2018-02-01"; +const azureArcAPIVersion = "2019-11-01"; +const azureFabricVersion = "2019-07-01-preview"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Most MSIs send requests to the IMDS endpoint, or a similar endpoint. + * These are GET requests that require sending a `resource` parameter on the query. + * This resource can be derived from the scopes received through the getToken call, as long as only one scope is received. + * Multiple scopes assume that the resulting token will have access to multiple resources, which won't be the case. + * + * For that reason, when we encounter multiple scopes, we return undefined. + * It's up to the individual MSI implementations to throw the errors (which helps us provide less generic errors). + */ +function mapScopesToResource(scopes) { + let scope = ""; + if (Array.isArray(scopes)) { + if (scopes.length !== 1) { + return; } + scope = scopes[0]; } - else if (changeFeedStartFrom instanceof ChangeFeedStartFromNow || - changeFeedStartFrom instanceof ChangeFeedStartFromTime || - changeFeedStartFrom instanceof ChangeFeedStartFromBeginning) { - const startTime = fetchStartTime(changeFeedStartFrom); - const internalCfOptions = buildInternalChangeFeedOptions(cfOptions, undefined, startTime); - const cfResource = changeFeedStartFrom.getCfResource(); - if (isPartitionKey(cfResource)) { - return new ChangeFeedForPartitionKey(clientContext, container, id, path, cfResource, internalCfOptions); + else if (typeof scopes === "string") { + scope = scopes; + } + if (!scope.endsWith(DefaultScopeSuffix)) { + return scope; + } + return scope.substr(0, scope.lastIndexOf(DefaultScopeSuffix)); +} +/** + * Given a token response, return the expiration timestamp as the number of milliseconds from the Unix epoch. + * @param body - A parsed response body from the authentication endpoint. + */ +function parseExpirationTimestamp(body) { + if (typeof body.expires_on === "number") { + return body.expires_on * 1000; + } + if (typeof body.expires_on === "string") { + const asNumber = +body.expires_on; + if (!isNaN(asNumber)) { + return asNumber * 1000; } - else { - let internalCfResource; - if (cfResource === undefined) { - internalCfResource = new QueryRange(Constants$1.EffectivePartitionKeyConstants.MinimumInclusiveEffectivePartitionKey, Constants$1.EffectivePartitionKeyConstants.MaximumExclusiveEffectivePartitionKey, true, false); - } - else if (isEpkRange(cfResource)) { - internalCfResource = new QueryRange(cfResource.minInclusive, cfResource.maxExclusive, true, false); - } - else { - throw new ErrorResponse("Invalid feed range."); - } - return new ChangeFeedForEpkRange(clientContext, container, partitionKeyRangeCache, id, path, url, internalCfOptions, internalCfResource); + const asDate = Date.parse(body.expires_on); + if (!isNaN(asDate)) { + return asDate; } } - else { - throw new ErrorResponse("Invalid change feed start location."); + if (typeof body.expires_in === "number") { + return Date.now() + body.expires_in * 1000; } + throw new Error(`Failed to parse token expiration from body. expires_in="${body.expires_in}", expires_on="${body.expires_on}"`); } // Copyright (c) Microsoft Corporation. -const uuid$1 = uuid$3.v4; +// Licensed under the MIT license. +const noCorrelationId = "noCorrelationId"; /** - * @hidden + * @internal */ -function isChangeFeedOptions(options) { - return options && !(isPrimitivePartitionKeyValue(options) || Array.isArray(options)); +function getIdentityClientAuthorityHost(options) { + // The authorityHost can come from options or from the AZURE_AUTHORITY_HOST environment variable. + let authorityHost = options === null || options === void 0 ? void 0 : options.authorityHost; + // The AZURE_AUTHORITY_HOST environment variable can only be provided in Node.js. + if (coreUtil.isNode) { + authorityHost = authorityHost !== null && authorityHost !== void 0 ? authorityHost : process.env.AZURE_AUTHORITY_HOST; + } + // If the authorityHost is not provided, we use the default one from the public cloud: https://login.microsoftonline.com + return authorityHost !== null && authorityHost !== void 0 ? authorityHost : DefaultAuthorityHost; } /** - * Operations for creating new items, and reading/querying all items + * The network module used by the Identity credentials. + * + * It allows for credentials to abort any pending request independently of the MSAL flow, + * by calling to the `abortRequests()` method. * - * @see {@link Item} for reading, replacing, or deleting an existing container; use `.item(id)`. */ -class Items { - /** - * Create an instance of {@link Items} linked to the parent {@link Container}. - * @param container - The parent container. - * @hidden - */ - constructor(container, clientContext) { - this.container = container; - this.clientContext = clientContext; - this.partitionKeyRangeCache = new PartitionKeyRangeCache(this.clientContext); - } - query(query, options = {}) { - const path = getPathFromLink(this.container.url, exports.ResourceType.item); - const id = getIdFromLink(this.container.url); - const fetchFunction = async (diagnosticNode, innerOptions) => { - const response = await this.clientContext.queryFeed({ - path, - resourceType: exports.ResourceType.item, - resourceId: id, - resultFn: (result) => (result ? result.Documents : []), - query, - options: innerOptions, - partitionKey: options.partitionKey, - diagnosticNode, - }); - return response; - }; - return new QueryIterator(this.clientContext, query, options, fetchFunction, this.container.url, exports.ResourceType.item); +class IdentityClient extends coreClient.ServiceClient { + constructor(options) { + var _a, _b; + const packageDetails = `azsdk-js-identity/${SDK_VERSION}`; + const userAgentPrefix = ((_a = options === null || options === void 0 ? void 0 : options.userAgentOptions) === null || _a === void 0 ? void 0 : _a.userAgentPrefix) + ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + const baseUri = getIdentityClientAuthorityHost(options); + if (!baseUri.startsWith("https:")) { + throw new Error("The authorityHost address must use the 'https' protocol."); + } + super(Object.assign(Object.assign({ requestContentType: "application/json; charset=utf-8", retryOptions: { + maxRetries: 3, + } }, options), { userAgentOptions: { + userAgentPrefix, + }, baseUri })); + this.authorityHost = baseUri; + this.abortControllers = new Map(); + this.allowLoggingAccountIdentifiers = (_b = options === null || options === void 0 ? void 0 : options.loggingOptions) === null || _b === void 0 ? void 0 : _b.allowLoggingAccountIdentifiers; + // used for WorkloadIdentity + this.tokenCredentialOptions = Object.assign({}, options); } - readChangeFeed(partitionKeyOrChangeFeedOptions, changeFeedOptions) { - if (isChangeFeedOptions(partitionKeyOrChangeFeedOptions)) { - return this.changeFeed(partitionKeyOrChangeFeedOptions); + async sendTokenRequest(request) { + logger$n.info(`IdentityClient: sending token request to [${request.url}]`); + const response = await this.sendRequest(request); + if (response.bodyAsText && (response.status === 200 || response.status === 201)) { + const parsedBody = JSON.parse(response.bodyAsText); + if (!parsedBody.access_token) { + return null; + } + this.logIdentifiers(response); + const token = { + accessToken: { + token: parsedBody.access_token, + expiresOnTimestamp: parseExpirationTimestamp(parsedBody), + }, + refreshToken: parsedBody.refresh_token, + }; + logger$n.info(`IdentityClient: [${request.url}] token acquired, expires on ${token.accessToken.expiresOnTimestamp}`); + return token; } else { - return this.changeFeed(partitionKeyOrChangeFeedOptions, changeFeedOptions); + const error = new AuthenticationError(response.status, response.bodyAsText); + logger$n.warning(`IdentityClient: authentication error. HTTP status: ${response.status}, ${error.errorResponse.errorDescription}`); + throw error; } } - changeFeed(partitionKeyOrChangeFeedOptions, changeFeedOptions) { - let partitionKey; - if (!changeFeedOptions && isChangeFeedOptions(partitionKeyOrChangeFeedOptions)) { - partitionKey = undefined; - changeFeedOptions = partitionKeyOrChangeFeedOptions; - } - else if (partitionKeyOrChangeFeedOptions !== undefined && - !isChangeFeedOptions(partitionKeyOrChangeFeedOptions)) { - partitionKey = partitionKeyOrChangeFeedOptions; + async refreshAccessToken(tenantId, clientId, scopes, refreshToken, clientSecret, options = {}) { + if (refreshToken === undefined) { + return null; } - if (!changeFeedOptions) { - changeFeedOptions = {}; + logger$n.info(`IdentityClient: refreshing access token with client ID: ${clientId}, scopes: ${scopes} started`); + const refreshParams = { + grant_type: "refresh_token", + client_id: clientId, + refresh_token: refreshToken, + scope: scopes, + }; + if (clientSecret !== undefined) { + refreshParams.client_secret = clientSecret; } - const path = getPathFromLink(this.container.url, exports.ResourceType.item); - const id = getIdFromLink(this.container.url); - return new ChangeFeedIterator(this.clientContext, id, path, partitionKey, changeFeedOptions); - } - /** - * Returns an iterator to iterate over pages of changes. The iterator returned can be used to fetch changes for a single partition key, feed range or an entire container. - */ - getChangeFeedIterator(changeFeedIteratorOptions) { - const cfOptions = changeFeedIteratorOptions !== undefined ? changeFeedIteratorOptions : {}; - validateChangeFeedIteratorOptions(cfOptions); - const iterator = changeFeedIteratorBuilder(cfOptions, this.clientContext, this.container, this.partitionKeyRangeCache); - return iterator; - } - readAll(options) { - return this.query("SELECT * from c", options); - } - /** - * Create an item. - * - * Any provided type, T, is not necessarily enforced by the SDK. - * You may get more or less properties and it's up to your logic to enforce it. - * - * There is no set schema for JSON items. They may contain any number of custom properties. - * - * @param body - Represents the body of the item. Can contain any number of user defined properties. - * @param options - Used for modifying the request (for instance, specifying the partition key). - */ - async create(body, options = {}) { - // Generate random document id if the id is missing in the payload and - // options.disableAutomaticIdGeneration != true - return withDiagnostics(async (diagnosticNode) => { - if ((body.id === undefined || body.id === "") && !options.disableAutomaticIdGeneration) { - body.id = uuid$1(); + const query = new URLSearchParams(refreshParams); + return tracingClient.withSpan("IdentityClient.refreshAccessToken", options, async (updatedOptions) => { + try { + const urlSuffix = getIdentityTokenEndpointSuffix(tenantId); + const request = coreRestPipeline.createPipelineRequest({ + url: `${this.authorityHost}/${tenantId}/${urlSuffix}`, + method: "POST", + body: query.toString(), + abortSignal: options.abortSignal, + headers: coreRestPipeline.createHttpHeaders({ + Accept: "application/json", + "Content-Type": "application/x-www-form-urlencoded", + }), + tracingOptions: updatedOptions.tracingOptions, + }); + const response = await this.sendTokenRequest(request); + logger$n.info(`IdentityClient: refreshed token for client ID: ${clientId}`); + return response; } - const partitionKeyDefinition = await readPartitionKeyDefinition(diagnosticNode, this.container); - const partitionKey = extractPartitionKeys(body, partitionKeyDefinition); - const err = {}; - if (!isItemResourceValid(body, err)) { - throw err; + catch (err) { + if (err.name === AuthenticationErrorName && + err.errorResponse.error === "interaction_required") { + // It's likely that the refresh token has expired, so + // return null so that the credential implementation will + // initiate the authentication flow again. + logger$n.info(`IdentityClient: interaction required for client ID: ${clientId}`); + return null; + } + else { + logger$n.warning(`IdentityClient: failed refreshing token for client ID: ${clientId}: ${err}`); + throw err; + } } - const path = getPathFromLink(this.container.url, exports.ResourceType.item); - const id = getIdFromLink(this.container.url); - const response = await this.clientContext.create({ - body, - path, - resourceType: exports.ResourceType.item, - resourceId: id, - diagnosticNode, - options, - partitionKey, - }); - const ref = new Item(this.container, response.result.id, this.clientContext, partitionKey); - return new ItemResponse(response.result, response.headers, response.code, response.substatus, ref, getEmptyCosmosDiagnostics()); - }, this.clientContext); + }); } - async upsert(body, options = {}) { - return withDiagnostics(async (diagnosticNode) => { - // Generate random document id if the id is missing in the payload and - // options.disableAutomaticIdGeneration != true - if ((body.id === undefined || body.id === "") && !options.disableAutomaticIdGeneration) { - body.id = uuid$1(); - } - const partitionKeyDefinition = await readPartitionKeyDefinition(diagnosticNode, this.container); - const partitionKey = extractPartitionKeys(body, partitionKeyDefinition); - const err = {}; - if (!isItemResourceValid(body, err)) { - throw err; + // Here is a custom layer that allows us to abort requests that go through MSAL, + // since MSAL doesn't allow us to pass options all the way through. + generateAbortSignal(correlationId) { + const controller = new abortController.AbortController(); + const controllers = this.abortControllers.get(correlationId) || []; + controllers.push(controller); + this.abortControllers.set(correlationId, controllers); + const existingOnAbort = controller.signal.onabort; + controller.signal.onabort = (...params) => { + this.abortControllers.set(correlationId, undefined); + if (existingOnAbort) { + existingOnAbort(...params); } - const path = getPathFromLink(this.container.url, exports.ResourceType.item); - const id = getIdFromLink(this.container.url); - const response = await this.clientContext.upsert({ - body, - path, - resourceType: exports.ResourceType.item, - resourceId: id, - options, - partitionKey, - diagnosticNode, - }); - const ref = new Item(this.container, response.result.id, this.clientContext, partitionKey); - return new ItemResponse(response.result, response.headers, response.code, response.substatus, ref, getEmptyCosmosDiagnostics()); - }, this.clientContext); + }; + return controller.signal; } - /** - * Execute bulk operations on items. - * - * Bulk takes an array of Operations which are typed based on what the operation does. - * The choices are: Create, Upsert, Read, Replace, and Delete - * - * Usage example: - * ```typescript - * // partitionKey is optional at the top level if present in the resourceBody - * const operations: OperationInput[] = [ - * { - * operationType: "Create", - * resourceBody: { id: "doc1", name: "sample", key: "A" } - * }, - * { - * operationType: "Upsert", - * partitionKey: 'A', - * resourceBody: { id: "doc2", name: "other", key: "A" } - * } - * ] - * - * await database.container.items.bulk(operations) - * ``` - * - * @param operations - List of operations. Limit 100 - * @param bulkOptions - Optional options object to modify bulk behavior. Pass \{ continueOnError: true \} to continue executing operations when one fails. (Defaults to false) ** NOTE: THIS WILL DEFAULT TO TRUE IN THE 4.0 RELEASE - * @param options - Used for modifying the request. - */ - async bulk(operations, bulkOptions, options) { - return withDiagnostics(async (diagnosticNode) => { - const { resources: partitionKeyRanges } = await this.container - .readPartitionKeyRanges() - .fetchAll(); - const partitionKeyDefinition = await readPartitionKeyDefinition(diagnosticNode, this.container); - const batches = partitionKeyRanges.map((keyRange) => { - return { - min: keyRange.minInclusive, - max: keyRange.maxExclusive, - rangeId: keyRange.id, - indexes: [], - operations: [], - }; - }); - this.groupOperationsBasedOnPartitionKey(operations, partitionKeyDefinition, options, batches); - const path = getPathFromLink(this.container.url, exports.ResourceType.item); - const orderedResponses = []; - await Promise.all(batches - .filter((batch) => batch.operations.length) - .flatMap((batch) => splitBatchBasedOnBodySize(batch)) - .map(async (batch) => { - if (batch.operations.length > 100) { - throw new Error("Cannot run bulk request with more than 100 operations per partition"); - } - try { - const response = await addDignosticChild(async (childNode) => this.clientContext.bulk({ - body: batch.operations, - partitionKeyRangeId: batch.rangeId, - path, - resourceId: this.container.url, - bulkOptions, - options, - diagnosticNode: childNode, - }), diagnosticNode, exports.DiagnosticNodeType.BATCH_REQUEST); - response.result.forEach((operationResponse, index) => { - orderedResponses[batch.indexes[index]] = operationResponse; - }); - } - catch (err) { - // In the case of 410 errors, we need to recompute the partition key ranges - // and redo the batch request, however, 410 errors occur for unsupported - // partition key types as well since we don't support them, so for now we throw - if (err.code === 410) { - throw new Error("Partition key error. Either the partitions have split or an operation has an unsupported partitionKey type" + - err.message); - } - throw new Error(`Bulk request errored with: ${err.message}`); - } - })); - const response = orderedResponses; - response.diagnostics = diagnosticNode.toDiagnostic(this.clientContext.getClientConfig()); - return response; - }, this.clientContext); + abortRequests(correlationId) { + const key = correlationId || noCorrelationId; + const controllers = [ + ...(this.abortControllers.get(key) || []), + // MSAL passes no correlation ID to the get requests... + ...(this.abortControllers.get(noCorrelationId) || []), + ]; + if (!controllers.length) { + return; + } + for (const controller of controllers) { + controller.abort(); + } + this.abortControllers.set(key, undefined); } - /** - * Function to create batches based of partition key Ranges. - * @param operations - operations to group - * @param partitionDefinition - PartitionKey definition of container. - * @param options - Request options for bulk request. - * @param batches - Groups to be filled with operations. - */ - groupOperationsBasedOnPartitionKey(operations, partitionDefinition, options, batches) { - operations.forEach((operationInput, index) => { - const { operation, partitionKey } = prepareOperations(operationInput, partitionDefinition, options); - const hashed = hashPartitionKey(assertNotUndefined(partitionKey, "undefined value for PartitionKey is not expected during grouping of bulk operations."), partitionDefinition); - const batchForKey = assertNotUndefined(batches.find((batch) => { - return isKeyInRange(batch.min, batch.max, hashed); - }), "No suitable Batch found."); - batchForKey.operations.push(operation); - batchForKey.indexes.push(index); + getCorrelationId(options) { + var _a; + const parameter = (_a = options === null || options === void 0 ? void 0 : options.body) === null || _a === void 0 ? void 0 : _a.split("&").map((part) => part.split("=")).find(([key]) => key === "client-request-id"); + return parameter && parameter.length ? parameter[1] || noCorrelationId : noCorrelationId; + } + // The MSAL network module methods follow + async sendGetRequestAsync(url, options) { + const request = coreRestPipeline.createPipelineRequest({ + url, + method: "GET", + body: options === null || options === void 0 ? void 0 : options.body, + headers: coreRestPipeline.createHttpHeaders(options === null || options === void 0 ? void 0 : options.headers), + abortSignal: this.generateAbortSignal(noCorrelationId), + }); + const response = await this.sendRequest(request); + this.logIdentifiers(response); + return { + body: response.bodyAsText ? JSON.parse(response.bodyAsText) : undefined, + headers: response.headers.toJSON(), + status: response.status, + }; + } + async sendPostRequestAsync(url, options) { + const request = coreRestPipeline.createPipelineRequest({ + url, + method: "POST", + body: options === null || options === void 0 ? void 0 : options.body, + headers: coreRestPipeline.createHttpHeaders(options === null || options === void 0 ? void 0 : options.headers), + // MSAL doesn't send the correlation ID on the get requests. + abortSignal: this.generateAbortSignal(this.getCorrelationId(options)), }); + const response = await this.sendRequest(request); + this.logIdentifiers(response); + return { + body: response.bodyAsText ? JSON.parse(response.bodyAsText) : undefined, + headers: response.headers.toJSON(), + status: response.status, + }; } /** - * Execute transactional batch operations on items. - * - * Batch takes an array of Operations which are typed based on what the operation does. Batch is transactional and will rollback all operations if one fails. - * The choices are: Create, Upsert, Read, Replace, and Delete - * - * Usage example: - * ```typescript - * // partitionKey is required as a second argument to batch, but defaults to the default partition key - * const operations: OperationInput[] = [ - * { - * operationType: "Create", - * resourceBody: { id: "doc1", name: "sample", key: "A" } - * }, - * { - * operationType: "Upsert", - * partitionKey: 'A', - * resourceBody: { id: "doc2", name: "other", key: "A" } - * } - * ] - * - * await database.container.items.batch(operations) - * ``` * - * @param operations - List of operations. Limit 100 - * @param options - Used for modifying the request + * @internal */ - async batch(operations, partitionKey, options) { - return withDiagnostics(async (diagnosticNode) => { - operations.map((operation) => decorateBatchOperation(operation, options)); - const path = getPathFromLink(this.container.url, exports.ResourceType.item); - if (operations.length > 100) { - throw new Error("Cannot run batch request with more than 100 operations per partition"); - } - try { - const response = await this.clientContext.batch({ - body: operations, - partitionKey, - path, - resourceId: this.container.url, - options, - diagnosticNode, - }); - return response; - } - catch (err) { - throw new Error(`Batch request error: ${err.message}`); - } - }, this.clientContext); - } -} - -class StoredProcedureResponse extends ResourceResponse { - constructor(resource, headers, statusCode, storedProcedure, diagnostics) { - super(resource, headers, statusCode, diagnostics); - this.storedProcedure = storedProcedure; + getTokenCredentialOptions() { + return this.tokenCredentialOptions; } /** - * Alias for storedProcedure. + * If allowLoggingAccountIdentifiers was set on the constructor options + * we try to log the account identifiers by parsing the received access token. * - * A reference to the {@link StoredProcedure} which the {@link StoredProcedureDefinition} corresponds to. + * The account identifiers we try to log are: + * - `appid`: The application or Client Identifier. + * - `upn`: User Principal Name. + * - It might not be available in some authentication scenarios. + * - If it's not available, we put a placeholder: "No User Principal Name available". + * - `tid`: Tenant Identifier. + * - `oid`: Object Identifier of the authenticated user. */ - get sproc() { - return this.storedProcedure; + logIdentifiers(response) { + if (!this.allowLoggingAccountIdentifiers || !response.bodyAsText) { + return; + } + const unavailableUpn = "No User Principal Name available"; + try { + const parsed = response.parsedBody || JSON.parse(response.bodyAsText); + const accessToken = parsed.access_token; + if (!accessToken) { + // Without an access token allowLoggingAccountIdentifiers isn't useful. + return; + } + const base64Metadata = accessToken.split(".")[1]; + const { appid, upn, tid, oid } = JSON.parse(Buffer.from(base64Metadata, "base64").toString("utf8")); + logger$n.info(`[Authenticated account] Client ID: ${appid}. Tenant ID: ${tid}. User Principal Name: ${upn || unavailableUpn}. Object ID (user): ${oid}`); + } + catch (e) { + logger$n.warning("allowLoggingAccountIdentifiers was set, but we couldn't log the account information. Error:", e.message); + } } } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Operations for reading, replacing, deleting, or executing a specific, existing stored procedure by id. + * Helps specify a regional authority, or "AutoDiscoverRegion" to auto-detect the region. + */ +var RegionalAuthority; +(function (RegionalAuthority) { + /** Instructs MSAL to attempt to discover the region */ + RegionalAuthority["AutoDiscoverRegion"] = "AutoDiscoverRegion"; + /** Uses the {@link RegionalAuthority} for the Azure 'westus' region. */ + RegionalAuthority["USWest"] = "westus"; + /** Uses the {@link RegionalAuthority} for the Azure 'westus2' region. */ + RegionalAuthority["USWest2"] = "westus2"; + /** Uses the {@link RegionalAuthority} for the Azure 'centralus' region. */ + RegionalAuthority["USCentral"] = "centralus"; + /** Uses the {@link RegionalAuthority} for the Azure 'eastus' region. */ + RegionalAuthority["USEast"] = "eastus"; + /** Uses the {@link RegionalAuthority} for the Azure 'eastus2' region. */ + RegionalAuthority["USEast2"] = "eastus2"; + /** Uses the {@link RegionalAuthority} for the Azure 'northcentralus' region. */ + RegionalAuthority["USNorthCentral"] = "northcentralus"; + /** Uses the {@link RegionalAuthority} for the Azure 'southcentralus' region. */ + RegionalAuthority["USSouthCentral"] = "southcentralus"; + /** Uses the {@link RegionalAuthority} for the Azure 'westcentralus' region. */ + RegionalAuthority["USWestCentral"] = "westcentralus"; + /** Uses the {@link RegionalAuthority} for the Azure 'canadacentral' region. */ + RegionalAuthority["CanadaCentral"] = "canadacentral"; + /** Uses the {@link RegionalAuthority} for the Azure 'canadaeast' region. */ + RegionalAuthority["CanadaEast"] = "canadaeast"; + /** Uses the {@link RegionalAuthority} for the Azure 'brazilsouth' region. */ + RegionalAuthority["BrazilSouth"] = "brazilsouth"; + /** Uses the {@link RegionalAuthority} for the Azure 'northeurope' region. */ + RegionalAuthority["EuropeNorth"] = "northeurope"; + /** Uses the {@link RegionalAuthority} for the Azure 'westeurope' region. */ + RegionalAuthority["EuropeWest"] = "westeurope"; + /** Uses the {@link RegionalAuthority} for the Azure 'uksouth' region. */ + RegionalAuthority["UKSouth"] = "uksouth"; + /** Uses the {@link RegionalAuthority} for the Azure 'ukwest' region. */ + RegionalAuthority["UKWest"] = "ukwest"; + /** Uses the {@link RegionalAuthority} for the Azure 'francecentral' region. */ + RegionalAuthority["FranceCentral"] = "francecentral"; + /** Uses the {@link RegionalAuthority} for the Azure 'francesouth' region. */ + RegionalAuthority["FranceSouth"] = "francesouth"; + /** Uses the {@link RegionalAuthority} for the Azure 'switzerlandnorth' region. */ + RegionalAuthority["SwitzerlandNorth"] = "switzerlandnorth"; + /** Uses the {@link RegionalAuthority} for the Azure 'switzerlandwest' region. */ + RegionalAuthority["SwitzerlandWest"] = "switzerlandwest"; + /** Uses the {@link RegionalAuthority} for the Azure 'germanynorth' region. */ + RegionalAuthority["GermanyNorth"] = "germanynorth"; + /** Uses the {@link RegionalAuthority} for the Azure 'germanywestcentral' region. */ + RegionalAuthority["GermanyWestCentral"] = "germanywestcentral"; + /** Uses the {@link RegionalAuthority} for the Azure 'norwaywest' region. */ + RegionalAuthority["NorwayWest"] = "norwaywest"; + /** Uses the {@link RegionalAuthority} for the Azure 'norwayeast' region. */ + RegionalAuthority["NorwayEast"] = "norwayeast"; + /** Uses the {@link RegionalAuthority} for the Azure 'eastasia' region. */ + RegionalAuthority["AsiaEast"] = "eastasia"; + /** Uses the {@link RegionalAuthority} for the Azure 'southeastasia' region. */ + RegionalAuthority["AsiaSouthEast"] = "southeastasia"; + /** Uses the {@link RegionalAuthority} for the Azure 'japaneast' region. */ + RegionalAuthority["JapanEast"] = "japaneast"; + /** Uses the {@link RegionalAuthority} for the Azure 'japanwest' region. */ + RegionalAuthority["JapanWest"] = "japanwest"; + /** Uses the {@link RegionalAuthority} for the Azure 'australiaeast' region. */ + RegionalAuthority["AustraliaEast"] = "australiaeast"; + /** Uses the {@link RegionalAuthority} for the Azure 'australiasoutheast' region. */ + RegionalAuthority["AustraliaSouthEast"] = "australiasoutheast"; + /** Uses the {@link RegionalAuthority} for the Azure 'australiacentral' region. */ + RegionalAuthority["AustraliaCentral"] = "australiacentral"; + /** Uses the {@link RegionalAuthority} for the Azure 'australiacentral2' region. */ + RegionalAuthority["AustraliaCentral2"] = "australiacentral2"; + /** Uses the {@link RegionalAuthority} for the Azure 'centralindia' region. */ + RegionalAuthority["IndiaCentral"] = "centralindia"; + /** Uses the {@link RegionalAuthority} for the Azure 'southindia' region. */ + RegionalAuthority["IndiaSouth"] = "southindia"; + /** Uses the {@link RegionalAuthority} for the Azure 'westindia' region. */ + RegionalAuthority["IndiaWest"] = "westindia"; + /** Uses the {@link RegionalAuthority} for the Azure 'koreasouth' region. */ + RegionalAuthority["KoreaSouth"] = "koreasouth"; + /** Uses the {@link RegionalAuthority} for the Azure 'koreacentral' region. */ + RegionalAuthority["KoreaCentral"] = "koreacentral"; + /** Uses the {@link RegionalAuthority} for the Azure 'uaecentral' region. */ + RegionalAuthority["UAECentral"] = "uaecentral"; + /** Uses the {@link RegionalAuthority} for the Azure 'uaenorth' region. */ + RegionalAuthority["UAENorth"] = "uaenorth"; + /** Uses the {@link RegionalAuthority} for the Azure 'southafricanorth' region. */ + RegionalAuthority["SouthAfricaNorth"] = "southafricanorth"; + /** Uses the {@link RegionalAuthority} for the Azure 'southafricawest' region. */ + RegionalAuthority["SouthAfricaWest"] = "southafricawest"; + /** Uses the {@link RegionalAuthority} for the Azure 'chinanorth' region. */ + RegionalAuthority["ChinaNorth"] = "chinanorth"; + /** Uses the {@link RegionalAuthority} for the Azure 'chinaeast' region. */ + RegionalAuthority["ChinaEast"] = "chinaeast"; + /** Uses the {@link RegionalAuthority} for the Azure 'chinanorth2' region. */ + RegionalAuthority["ChinaNorth2"] = "chinanorth2"; + /** Uses the {@link RegionalAuthority} for the Azure 'chinaeast2' region. */ + RegionalAuthority["ChinaEast2"] = "chinaeast2"; + /** Uses the {@link RegionalAuthority} for the Azure 'germanycentral' region. */ + RegionalAuthority["GermanyCentral"] = "germanycentral"; + /** Uses the {@link RegionalAuthority} for the Azure 'germanynortheast' region. */ + RegionalAuthority["GermanyNorthEast"] = "germanynortheast"; + /** Uses the {@link RegionalAuthority} for the Azure 'usgovvirginia' region. */ + RegionalAuthority["GovernmentUSVirginia"] = "usgovvirginia"; + /** Uses the {@link RegionalAuthority} for the Azure 'usgoviowa' region. */ + RegionalAuthority["GovernmentUSIowa"] = "usgoviowa"; + /** Uses the {@link RegionalAuthority} for the Azure 'usgovarizona' region. */ + RegionalAuthority["GovernmentUSArizona"] = "usgovarizona"; + /** Uses the {@link RegionalAuthority} for the Azure 'usgovtexas' region. */ + RegionalAuthority["GovernmentUSTexas"] = "usgovtexas"; + /** Uses the {@link RegionalAuthority} for the Azure 'usdodeast' region. */ + RegionalAuthority["GovernmentUSDodEast"] = "usdodeast"; + /** Uses the {@link RegionalAuthority} for the Azure 'usdodcentral' region. */ + RegionalAuthority["GovernmentUSDodCentral"] = "usdodcentral"; +})(RegionalAuthority || (RegionalAuthority = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The current persistence provider, undefined by default. + * @internal + */ +let persistenceProvider = undefined; +/** + * An object that allows setting the persistence provider. + * @internal + */ +const msalNodeFlowCacheControl = { + setPersistence(pluginProvider) { + persistenceProvider = pluginProvider; + }, +}; +/** + * The current native broker provider, undefined by default. + * @internal + */ +let nativeBrokerInfo = undefined; +function hasNativeBroker() { + return nativeBrokerInfo !== undefined; +} +/** + * An object that allows setting the native broker provider. + * @internal + */ +const msalNodeFlowNativeBrokerControl = { + setNativeBroker(broker) { + nativeBrokerInfo = { + broker, + }; + }, +}; +/** + * MSAL partial base client for Node.js. * - * For operations to create, read all, or query Stored Procedures, + * It completes the input configuration with some default values. + * It also provides with utility protected methods that can be used from any of the clients, + * which includes handlers for successful responses and errors. + * + * @internal */ -class StoredProcedure { - /** - * Returns a reference URL to the resource. Used for linking in Permissions. - */ - get url() { - return createStoredProcedureUri(this.container.database.id, this.container.id, this.id); - } - /** - * Creates a new instance of {@link StoredProcedure} linked to the parent {@link Container}. - * @param container - The parent {@link Container}. - * @param id - The id of the given {@link StoredProcedure}. - * @hidden - */ - constructor(container, id, clientContext) { - this.container = container; - this.id = id; - this.clientContext = clientContext; +class MsalNode extends MsalBaseUtilities { + constructor(options) { + var _a, _b, _c, _d, _e, _f, _g; + super(options); + this.app = {}; + this.caeApp = {}; + this.requiresConfidential = false; + this.msalConfig = this.defaultNodeMsalConfig(options); + this.tenantId = resolveTenantId(options.logger, options.tenantId, options.clientId); + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds((_a = options === null || options === void 0 ? void 0 : options.tokenCredentialOptions) === null || _a === void 0 ? void 0 : _a.additionallyAllowedTenants); + this.clientId = this.msalConfig.auth.clientId; + if (options === null || options === void 0 ? void 0 : options.getAssertion) { + this.getAssertion = options.getAssertion; + } + this.enableBroker = (_b = options === null || options === void 0 ? void 0 : options.brokerOptions) === null || _b === void 0 ? void 0 : _b.enabled; + this.enableMsaPassthrough = (_c = options === null || options === void 0 ? void 0 : options.brokerOptions) === null || _c === void 0 ? void 0 : _c.legacyEnableMsaPassthrough; + this.parentWindowHandle = (_d = options.brokerOptions) === null || _d === void 0 ? void 0 : _d.parentWindowHandle; + // If persistence has been configured + if (persistenceProvider !== undefined && ((_e = options.tokenCachePersistenceOptions) === null || _e === void 0 ? void 0 : _e.enabled)) { + const nonCaeOptions = Object.assign({ name: `${options.tokenCachePersistenceOptions.name}.${CACHE_NON_CAE_SUFFIX}` }, options.tokenCachePersistenceOptions); + const caeOptions = Object.assign({ name: `${options.tokenCachePersistenceOptions.name}.${CACHE_CAE_SUFFIX}` }, options.tokenCachePersistenceOptions); + this.createCachePlugin = () => persistenceProvider(nonCaeOptions); + this.createCachePluginCae = () => persistenceProvider(caeOptions); + } + else if ((_f = options.tokenCachePersistenceOptions) === null || _f === void 0 ? void 0 : _f.enabled) { + throw new Error([ + "Persistent token caching was requested, but no persistence provider was configured.", + "You must install the identity-cache-persistence plugin package (`npm install --save @azure/identity-cache-persistence`)", + "and enable it by importing `useIdentityPlugin` from `@azure/identity` and calling", + "`useIdentityPlugin(cachePersistencePlugin)` before using `tokenCachePersistenceOptions`.", + ].join(" ")); + } + // If broker has not been configured + if (!hasNativeBroker() && this.enableBroker) { + throw new Error([ + "Broker for WAM was requested to be enabled, but no native broker was configured.", + "You must install the identity-broker plugin package (`npm install --save @azure/identity-broker`)", + "and enable it by importing `useIdentityPlugin` from `@azure/identity` and calling", + "`useIdentityPlugin(createNativeBrokerPlugin())` before using `enableBroker`.", + ].join(" ")); + } + this.azureRegion = (_g = options.regionalAuthority) !== null && _g !== void 0 ? _g : process.env.AZURE_REGIONAL_AUTHORITY_NAME; + if (this.azureRegion === RegionalAuthority.AutoDiscoverRegion) { + this.azureRegion = "AUTO_DISCOVER"; + } } /** - * Read the {@link StoredProcedureDefinition} for the given {@link StoredProcedure}. + * Generates a MSAL configuration that generally works for Node.js */ - async read(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.read({ - path, - resourceType: exports.ResourceType.sproc, - resourceId: id, - options, - diagnosticNode, - }); - return new StoredProcedureResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + defaultNodeMsalConfig(options) { + var _a; + const clientId = options.clientId || DeveloperSignOnClientId; + const tenantId = resolveTenantId(options.logger, options.tenantId, options.clientId); + this.authorityHost = options.authorityHost || process.env.AZURE_AUTHORITY_HOST; + const authority = getAuthority(tenantId, this.authorityHost); + this.identityClient = new IdentityClient(Object.assign(Object.assign({}, options.tokenCredentialOptions), { authorityHost: authority, loggingOptions: options.loggingOptions })); + const clientCapabilities = []; + return { + auth: { + clientId, + authority, + knownAuthorities: getKnownAuthorities(tenantId, authority, options.disableInstanceDiscovery), + clientCapabilities, + }, + // Cache is defined in this.prepare(); + system: { + networkClient: this.identityClient, + loggerOptions: { + loggerCallback: defaultLoggerCallback(options.logger), + logLevel: getMSALLogLevel(logger$o.getLogLevel()), + piiLoggingEnabled: (_a = options.loggingOptions) === null || _a === void 0 ? void 0 : _a.enableUnsafeSupportLogging, + }, + }, + }; } - /** - * Replace the given {@link StoredProcedure} with the specified {@link StoredProcedureDefinition}. - * @param body - The specified {@link StoredProcedureDefinition} to replace the existing definition. - */ - async replace(body, options) { - return withDiagnostics(async (diagnosticNode) => { - if (body.body) { - body.body = body.body.toString(); - } - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.replace({ - body, - path, - resourceType: exports.ResourceType.sproc, - resourceId: id, - options, - diagnosticNode, - }); - return new StoredProcedureResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + getApp(appType, enableCae) { + const app = enableCae ? this.caeApp : this.app; + if (appType === "publicFirst") { + return (app.public || app.confidential); + } + else if (appType === "confidentialFirst") { + return (app.confidential || app.public); + } + else if (appType === "confidential") { + return app.confidential; + } + else { + return app.public; + } } /** - * Delete the given {@link StoredProcedure}. + * Prepares the MSAL applications. */ - async delete(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.delete({ - path, - resourceType: exports.ResourceType.sproc, - resourceId: id, - options, - diagnosticNode, + async init(options) { + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", () => { + // This will abort any pending request in the IdentityClient, + // based on the received or generated correlationId + this.identityClient.abortRequests(options.correlationId); }); - return new StoredProcedureResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } - /** - * Execute the given {@link StoredProcedure}. - * - * The specified type, T, is not enforced by the client. - * Be sure to validate the response from the stored procedure matches the type, T, you provide. - * - * @param partitionKey - The partition key to use when executing the stored procedure - * @param params - Array of parameters to pass as arguments to the given {@link StoredProcedure}. - * @param options - Additional options, such as the partition key to invoke the {@link StoredProcedure} on. - */ - async execute(partitionKey, params, options) { - return withDiagnostics(async (diagnosticNode) => { - if (partitionKey === undefined) { - const partitionKeyResponse = await readPartitionKeyDefinition(diagnosticNode, this.container); - partitionKey = undefinedPartitionKey(partitionKeyResponse); + } + const app = (options === null || options === void 0 ? void 0 : options.enableCae) ? this.caeApp : this.app; + if (options === null || options === void 0 ? void 0 : options.enableCae) { + this.msalConfig.auth.clientCapabilities = ["cp1"]; + } + if (app.public || app.confidential) { + return; + } + if ((options === null || options === void 0 ? void 0 : options.enableCae) && this.createCachePluginCae !== undefined) { + this.msalConfig.cache = { + cachePlugin: await this.createCachePluginCae(), + }; + } + if (this.createCachePlugin !== undefined) { + this.msalConfig.cache = { + cachePlugin: await this.createCachePlugin(), + }; + } + if (hasNativeBroker() && this.enableBroker) { + this.msalConfig.broker = { + nativeBrokerPlugin: nativeBrokerInfo.broker, + }; + if (!this.parentWindowHandle) { + // error should have been thrown from within the constructor of InteractiveBrowserCredential + this.logger.warning("Parent window handle is not specified for the broker. This may cause unexpected behavior. Please provide the parentWindowHandle."); } - const response = await this.clientContext.execute({ - sprocLink: this.url, - params, - options, - partitionKey, - diagnosticNode, - }); - return new ResourceResponse(response.result, response.headers, response.code, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } -} - -/** - * Operations for creating, upserting, or reading/querying all Stored Procedures. - * - * For operations to read, replace, delete, or execute a specific, existing stored procedure by id, see `container.storedProcedure()`. - */ -class StoredProcedures { - /** - * @param container - The parent {@link Container}. - * @hidden - */ - constructor(container, clientContext) { - this.container = container; - this.clientContext = clientContext; - } - query(query, options) { - const path = getPathFromLink(this.container.url, exports.ResourceType.sproc); - const id = getIdFromLink(this.container.url); - return new QueryIterator(this.clientContext, query, options, (diagNode, innerOptions) => { - return this.clientContext.queryFeed({ - path, - resourceType: exports.ResourceType.sproc, - resourceId: id, - resultFn: (result) => result.StoredProcedures, - query, - options: innerOptions, - diagnosticNode: diagNode, - }); - }); - } - /** - * Read all stored procedures. - * @example Read all stored procedures to array. - * ```typescript - * const {body: sprocList} = await containers.storedProcedures.readAll().fetchAll(); - * ``` - */ - readAll(options) { - return this.query(undefined, options); - } - /** - * Create a StoredProcedure. - * - * Azure Cosmos DB allows stored procedures to be executed in the storage tier, - * directly against an item container. The script - * gets executed under ACID transactions on the primary storage partition of the - * specified container. For additional details, - * refer to the server-side JavaScript API documentation. - */ - async create(body, options) { - return withDiagnostics(async (diagnosticNode) => { - if (body.body) { - body.body = body.body.toString(); + } + if (options === null || options === void 0 ? void 0 : options.enableCae) { + this.caeApp.public = new msalCommon__namespace.PublicClientApplication(this.msalConfig); + } + else { + this.app.public = new msalCommon__namespace.PublicClientApplication(this.msalConfig); + } + if (this.getAssertion) { + this.msalConfig.auth.clientAssertion = await this.getAssertion(); + } + // The confidential client requires either a secret, assertion or certificate. + if (this.msalConfig.auth.clientSecret || + this.msalConfig.auth.clientAssertion || + this.msalConfig.auth.clientCertificate) { + if (options === null || options === void 0 ? void 0 : options.enableCae) { + this.caeApp.confidential = new msalCommon__namespace.ConfidentialClientApplication(this.msalConfig); } - const err = {}; - if (!isResourceValid(body, err)) { - throw err; + else { + this.app.confidential = new msalCommon__namespace.ConfidentialClientApplication(this.msalConfig); } - const path = getPathFromLink(this.container.url, exports.ResourceType.sproc); - const id = getIdFromLink(this.container.url); - const response = await this.clientContext.create({ - body, - path, - resourceType: exports.ResourceType.sproc, - resourceId: id, - options, - diagnosticNode, - }); - const ref = new StoredProcedure(this.container, response.result.id, this.clientContext); - return new StoredProcedureResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } -} - -class TriggerResponse extends ResourceResponse { - constructor(resource, headers, statusCode, trigger, diagnostics) { - super(resource, headers, statusCode, diagnostics); - this.trigger = trigger; + } + else { + if (this.requiresConfidential) { + throw new Error("Unable to generate the MSAL confidential client. Missing either the client's secret, certificate or assertion."); + } + } } -} - -/** - * Operations to read, replace, or delete a {@link Trigger}. - * - * Use `container.triggers` to create, upsert, query, or read all. - */ -class Trigger { /** - * Returns a reference URL to the resource. Used for linking in Permissions. + * Allows the cancellation of a MSAL request. */ - get url() { - return createTriggerUri(this.container.database.id, this.container.id, this.id); + withCancellation(promise, abortSignal, onCancel) { + return new Promise((resolve, reject) => { + promise + .then((msalToken) => { + return resolve(msalToken); + }) + .catch(reject); + if (abortSignal) { + abortSignal.addEventListener("abort", () => { + onCancel === null || onCancel === void 0 ? void 0 : onCancel(); + }); + } + }); } /** - * @hidden - * @param container - The parent {@link Container}. - * @param id - The id of the given {@link Trigger}. + * Returns the existing account, attempts to load the account from MSAL. */ - constructor(container, id, clientContext) { - this.container = container; - this.id = id; - this.clientContext = clientContext; + async getActiveAccount(enableCae = false) { + if (this.account) { + return this.account; + } + const cache = this.getApp("confidentialFirst", enableCae).getTokenCache(); + const accountsByTenant = await (cache === null || cache === void 0 ? void 0 : cache.getAllAccounts()); + if (!accountsByTenant) { + return; + } + if (accountsByTenant.length === 1) { + this.account = msalToPublic(this.clientId, accountsByTenant[0]); + } + else { + this.logger + .info(`More than one account was found authenticated for this Client ID and Tenant ID. +However, no "authenticationRecord" has been provided for this credential, +therefore we're unable to pick between these accounts. +A new login attempt will be requested, to ensure the correct account is picked. +To work with multiple accounts for the same Client ID and Tenant ID, please provide an "authenticationRecord" when initializing a credential to prevent this from happening.`); + return; + } + return this.account; } /** - * Read the {@link TriggerDefinition} for the given {@link Trigger}. + * Attempts to retrieve a token from cache. */ - async read(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.read({ - path, - resourceType: exports.ResourceType.trigger, - resourceId: id, - options, - diagnosticNode, + async getTokenSilent(scopes, options) { + var _a, _b, _c; + await this.getActiveAccount(options === null || options === void 0 ? void 0 : options.enableCae); + if (!this.account) { + throw new AuthenticationRequiredError({ + scopes, + getTokenOptions: options, + message: "Silent authentication failed. We couldn't retrieve an active account from the cache.", }); - return new TriggerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } - /** - * Replace the given {@link Trigger} with the specified {@link TriggerDefinition}. - * @param body - The specified {@link TriggerDefinition} to replace the existing definition with. - */ - async replace(body, options) { - return withDiagnostics(async (diagnosticNode) => { - if (body.body) { - body.body = body.body.toString(); + } + const silentRequest = { + // To be able to re-use the account, the Token Cache must also have been provided. + account: publicToMsal(this.account), + correlationId: options === null || options === void 0 ? void 0 : options.correlationId, + scopes, + authority: options === null || options === void 0 ? void 0 : options.authority, + claims: options === null || options === void 0 ? void 0 : options.claims, + }; + if (hasNativeBroker() && this.enableBroker) { + if (!silentRequest.tokenQueryParameters) { + silentRequest.tokenQueryParameters = {}; } - const err = {}; - if (!isResourceValid(body, err)) { - throw err; + if (!this.parentWindowHandle) { + // error should have been thrown from within the constructor of InteractiveBrowserCredential + this.logger.warning("Parent window handle is not specified for the broker. This may cause unexpected behavior. Please provide the parentWindowHandle."); } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.replace({ - body, - path, - resourceType: exports.ResourceType.trigger, - resourceId: id, - options, - diagnosticNode, - }); - return new TriggerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } - /** - * Delete the given {@link Trigger}. - */ - async delete(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.delete({ - path, - resourceType: exports.ResourceType.trigger, - resourceId: id, - options, - diagnosticNode, - }); - return new TriggerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } -} - -/** - * Operations to create, upsert, query, and read all triggers. - * - * Use `container.triggers` to read, replace, or delete a {@link Trigger}. - */ -class Triggers { - /** - * @hidden - * @param container - The parent {@link Container}. - */ - constructor(container, clientContext) { - this.container = container; - this.clientContext = clientContext; - } - query(query, options) { - const path = getPathFromLink(this.container.url, exports.ResourceType.trigger); - const id = getIdFromLink(this.container.url); - return new QueryIterator(this.clientContext, query, options, (diagnosticNode, innerOptions) => { - return this.clientContext.queryFeed({ - path, - resourceType: exports.ResourceType.trigger, - resourceId: id, - resultFn: (result) => result.Triggers, - query, - options: innerOptions, - diagnosticNode, - }); - }); - } - /** - * Read all Triggers. - * @example Read all trigger to array. - * ```typescript - * const {body: triggerList} = await container.triggers.readAll().fetchAll(); - * ``` - */ - readAll(options) { - return this.query(undefined, options); + if (this.enableMsaPassthrough) { + silentRequest.tokenQueryParameters["msal_request_type"] = "consumer_passthrough"; + } + } + try { + this.logger.info("Attempting to acquire token silently"); + /** + * The following code to retrieve all accounts is done as a workaround in an attempt to force the + * refresh of the token cache with the token and the account passed in through the + * `authenticationRecord` parameter. See issue - https://github.com/Azure/azure-sdk-for-js/issues/24349#issuecomment-1496715651 + * This workaround serves as a workaround for silent authentication not happening when authenticationRecord is passed. + */ + await ((_a = this.getApp("publicFirst", options === null || options === void 0 ? void 0 : options.enableCae)) === null || _a === void 0 ? void 0 : _a.getTokenCache().getAllAccounts()); + const response = (_c = (await ((_b = this.getApp("confidential", options === null || options === void 0 ? void 0 : options.enableCae)) === null || _b === void 0 ? void 0 : _b.acquireTokenSilent(silentRequest)))) !== null && _c !== void 0 ? _c : (await this.getApp("public", options === null || options === void 0 ? void 0 : options.enableCae).acquireTokenSilent(silentRequest)); + return this.handleResult(scopes, this.clientId, response || undefined); + } + catch (err) { + throw this.handleError(scopes, err, options); + } } /** - * Create a trigger. - * - * Azure Cosmos DB supports pre and post triggers defined in JavaScript to be executed - * on creates, updates and deletes. - * - * For additional details, refer to the server-side JavaScript API documentation. + * Wrapper around each MSAL flow get token operation: doGetToken. + * If disableAutomaticAuthentication is sent through the constructor, it will prevent MSAL from requesting the user input. */ - async create(body, options) { - return withDiagnostics(async (diagnosticNode) => { - if (body.body) { - body.body = body.body.toString(); + async getToken(scopes, options = {}) { + const tenantId = processMultiTenantRequest(this.tenantId, options, this.additionallyAllowedTenantIds) || + this.tenantId; + options.authority = getAuthority(tenantId, this.authorityHost); + options.correlationId = (options === null || options === void 0 ? void 0 : options.correlationId) || this.generateUuid(); + await this.init(options); + try { + // MSAL now caches tokens based on their claims, + // so now one has to keep track fo claims in order to retrieve the newer tokens from acquireTokenSilent + // This update happened on PR: https://github.com/AzureAD/microsoft-authentication-library-for-js/pull/4533 + const optionsClaims = options.claims; + if (optionsClaims) { + this.cachedClaims = optionsClaims; } - const err = {}; - if (!isResourceValid(body, err)) { + if (this.cachedClaims && !optionsClaims) { + options.claims = this.cachedClaims; + } + // We don't return the promise since we want to catch errors right here. + return await this.getTokenSilent(scopes, options); + } + catch (err) { + if (err.name !== "AuthenticationRequiredError") { throw err; } - const path = getPathFromLink(this.container.url, exports.ResourceType.trigger); - const id = getIdFromLink(this.container.url); - const response = await this.clientContext.create({ - body, - path, - resourceType: exports.ResourceType.trigger, - resourceId: id, - options, - diagnosticNode, - }); - const ref = new Trigger(this.container, response.result.id, this.clientContext); - return new TriggerResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); - }, this.clientContext); + if (options === null || options === void 0 ? void 0 : options.disableAutomaticAuthentication) { + throw new AuthenticationRequiredError({ + scopes, + getTokenOptions: options, + message: "Automatic authentication has been disabled. You may call the authentication() method.", + }); + } + this.logger.info(`Silent authentication failed, falling back to interactive method.`); + return this.doGetToken(scopes, options); + } } } -class UserDefinedFunctionResponse extends ResourceResponse { - constructor(resource, headers, statusCode, udf, diagnostics) { - super(resource, headers, statusCode, diagnostics); - this.userDefinedFunction = udf; - } - /** - * Alias for `userDefinedFunction(id)`. - * - * A reference to the {@link UserDefinedFunction} corresponding to the returned {@link UserDefinedFunctionDefinition}. - */ - get udf() { - return this.userDefinedFunction; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const CommonTenantId = "common"; +const AzureAccountClientId = "aebc6443-996d-45c2-90f0-388ff96faa56"; // VSC: 'aebc6443-996d-45c2-90f0-388ff96faa56' +const logger$m = credentialLogger("VisualStudioCodeCredential"); +let findCredentials = undefined; +const vsCodeCredentialControl = { + setVsCodeCredentialFinder(finder) { + findCredentials = finder; + }, +}; +// Map of unsupported Tenant IDs and the errors we will be throwing. +const unsupportedTenantIds = { + adfs: "The VisualStudioCodeCredential does not support authentication with ADFS tenants.", +}; +function checkUnsupportedTenant(tenantId) { + // If the Tenant ID isn't supported, we throw. + const unsupportedTenantError = unsupportedTenantIds[tenantId]; + if (unsupportedTenantError) { + throw new CredentialUnavailableError(unsupportedTenantError); } } - +const mapVSCodeAuthorityHosts = { + AzureCloud: exports.AzureAuthorityHosts.AzurePublicCloud, + AzureChina: exports.AzureAuthorityHosts.AzureChina, + AzureGermanCloud: exports.AzureAuthorityHosts.AzureGermany, + AzureUSGovernment: exports.AzureAuthorityHosts.AzureGovernment, +}; /** - * Used to read, replace, or delete a specified User Definied Function by id. - * - * @see {@link UserDefinedFunction} to create, upsert, query, read all User Defined Functions. + * Attempts to load a specific property from the VSCode configurations of the current OS. + * If it fails at any point, returns undefined. */ -class UserDefinedFunction { - /** - * Returns a reference URL to the resource. Used for linking in Permissions. - */ - get url() { - return createUserDefinedFunctionUri(this.container.database.id, this.container.id, this.id); - } - /** - * @hidden - * @param container - The parent {@link Container}. - * @param id - The id of the given {@link UserDefinedFunction}. - */ - constructor(container, id, clientContext) { - this.container = container; - this.id = id; - this.clientContext = clientContext; - } - /** - * Read the {@link UserDefinedFunctionDefinition} for the given {@link UserDefinedFunction}. - */ - async read(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.read({ - path, - resourceType: exports.ResourceType.udf, - resourceId: id, - options, - diagnosticNode, - }); - return new UserDefinedFunctionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); +function getPropertyFromVSCode(property) { + const settingsPath = ["User", "settings.json"]; + // Eventually we can add more folders for more versions of VSCode. + const vsCodeFolder = "Code"; + const homedir = os.homedir(); + function loadProperty(...pathSegments) { + const fullPath = path.join(...pathSegments, vsCodeFolder, ...settingsPath); + const settings = JSON.parse(fs.readFileSync(fullPath, { encoding: "utf8" })); + return settings[property]; } - /** - * Replace the given {@link UserDefinedFunction} with the specified {@link UserDefinedFunctionDefinition}. - * @param options - - */ - async replace(body, options) { - return withDiagnostics(async (diagnosticNode) => { - if (body.body) { - body.body = body.body.toString(); - } - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.replace({ - body, - path, - resourceType: exports.ResourceType.udf, - resourceId: id, - options, - diagnosticNode, - }); - return new UserDefinedFunctionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + try { + let appData; + switch (process.platform) { + case "win32": + appData = process.env.APPDATA; + return appData ? loadProperty(appData) : undefined; + case "darwin": + return loadProperty(homedir, "Library", "Application Support"); + case "linux": + return loadProperty(homedir, ".config"); + default: + return; + } } - /** - * Delete the given {@link UserDefined}. - */ - async delete(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.delete({ - path, - resourceType: exports.ResourceType.udf, - resourceId: id, - options, - diagnosticNode, - }); - return new UserDefinedFunctionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + catch (e) { + logger$m.info(`Failed to load the Visual Studio Code configuration file. Error: ${e.message}`); + return; } } - /** - * Used to create, upsert, query, or read all User Defined Functions. + * Connects to Azure using the credential provided by the VSCode extension 'Azure Account'. + * Once the user has logged in via the extension, this credential can share the same refresh token + * that is cached by the extension. * - * @see {@link UserDefinedFunction} to read, replace, or delete a given User Defined Function by id. + * It's a [known issue](https://github.com/Azure/azure-sdk-for-js/issues/20500) that this credential doesn't + * work with [Azure Account extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode.azure-account) + * versions newer than **0.9.11**. A long-term fix to this problem is in progress. In the meantime, consider + * authenticating with {@link AzureCliCredential}. */ -class UserDefinedFunctions { - /** - * @hidden - * @param container - The parent {@link Container}. - */ - constructor(container, clientContext) { - this.container = container; - this.clientContext = clientContext; - } - query(query, options) { - const path = getPathFromLink(this.container.url, exports.ResourceType.udf); - const id = getIdFromLink(this.container.url); - return new QueryIterator(this.clientContext, query, options, (diagnosticNode, innerOptions) => { - return this.clientContext.queryFeed({ - path, - resourceType: exports.ResourceType.udf, - resourceId: id, - resultFn: (result) => result.UserDefinedFunctions, - query, - options: innerOptions, - diagnosticNode, - }); - }); - } - /** - * Read all User Defined Functions. - * @example Read all User Defined Functions to array. - * ```typescript - * const {body: udfList} = await container.userDefinedFunctions.readAll().fetchAll(); - * ``` - */ - readAll(options) { - return this.query(undefined, options); - } - /** - * Create a UserDefinedFunction. - * - * Azure Cosmos DB supports JavaScript UDFs which can be used inside queries, stored procedures and triggers. - * - * For additional details, refer to the server-side JavaScript API documentation. - * - */ - async create(body, options) { - return withDiagnostics(async (diagnosticNode) => { - if (body.body) { - body.body = body.body.toString(); - } - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.container.url, exports.ResourceType.udf); - const id = getIdFromLink(this.container.url); - const response = await this.clientContext.create({ - body, - path, - resourceType: exports.ResourceType.udf, - resourceId: id, - options, - diagnosticNode, - }); - const ref = new UserDefinedFunction(this.container, response.result.id, this.clientContext); - return new UserDefinedFunctionResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } -} - -// Copyright (c) Microsoft Corporation. -class Scripts { - /** - * @param container - The parent {@link Container}. - * @hidden - */ - constructor(container, clientContext) { - this.container = container; - this.clientContext = clientContext; - } - /** - * Used to read, replace, or delete a specific, existing {@link StoredProcedure} by id. - * - * Use `.storedProcedures` for creating new stored procedures, or querying/reading all stored procedures. - * @param id - The id of the {@link StoredProcedure}. - */ - storedProcedure(id) { - return new StoredProcedure(this.container, id, this.clientContext); - } +class VisualStudioCodeCredential { /** - * Used to read, replace, or delete a specific, existing {@link Trigger} by id. + * Creates an instance of VisualStudioCodeCredential to use for automatically authenticating via VSCode. * - * Use `.triggers` for creating new triggers, or querying/reading all triggers. - * @param id - The id of the {@link Trigger}. - */ - trigger(id) { - return new Trigger(this.container, id, this.clientContext); - } - /** - * Used to read, replace, or delete a specific, existing {@link UserDefinedFunction} by id. + * **Note**: `VisualStudioCodeCredential` is provided by a plugin package: + * `@azure/identity-vscode`. If this package is not installed and registered + * using the plugin API (`useIdentityPlugin`), then authentication using + * `VisualStudioCodeCredential` will not be available. * - * Use `.userDefinedFunctions` for creating new user defined functions, or querying/reading all user defined functions. - * @param id - The id of the {@link UserDefinedFunction}. + * @param options - Options for configuring the client which makes the authentication request. */ - userDefinedFunction(id) { - return new UserDefinedFunction(this.container, id, this.clientContext); + constructor(options) { + // We want to make sure we use the one assigned by the user on the VSCode settings. + // Or just `AzureCloud` by default. + this.cloudName = (getPropertyFromVSCode("azure.cloud") || "AzureCloud"); + // Picking an authority host based on the cloud name. + const authorityHost = mapVSCodeAuthorityHosts[this.cloudName]; + this.identityClient = new IdentityClient(Object.assign({ authorityHost }, options)); + if (options && options.tenantId) { + checkTenantId(logger$m, options.tenantId); + this.tenantId = options.tenantId; + } + else { + this.tenantId = CommonTenantId; + } + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + checkUnsupportedTenant(this.tenantId); } /** - * Operations for creating new stored procedures, and reading/querying all stored procedures. - * - * For reading, replacing, or deleting an existing stored procedure, use `.storedProcedure(id)`. + * Runs preparations for any further getToken request. */ - get storedProcedures() { - if (!this.$sprocs) { - this.$sprocs = new StoredProcedures(this.container, this.clientContext); + async prepare() { + // Attempts to load the tenant from the VSCode configuration file. + const settingsTenant = getPropertyFromVSCode("azure.tenant"); + if (settingsTenant) { + this.tenantId = settingsTenant; } - return this.$sprocs; + checkUnsupportedTenant(this.tenantId); } /** - * Operations for creating new triggers, and reading/querying all triggers. - * - * For reading, replacing, or deleting an existing trigger, use `.trigger(id)`. + * Runs preparations for any further getToken, but only once. */ - get triggers() { - if (!this.$triggers) { - this.$triggers = new Triggers(this.container, this.clientContext); + prepareOnce() { + if (!this.preparePromise) { + this.preparePromise = this.prepare(); } - return this.$triggers; + return this.preparePromise; } /** - * Operations for creating new user defined functions, and reading/querying all user defined functions. + * Returns the token found by searching VSCode's authentication cache or + * returns null if no token could be found. * - * For reading, replacing, or deleting an existing user defined function, use `.userDefinedFunction(id)`. + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * `TokenCredential` implementation might make. */ - get userDefinedFunctions() { - if (!this.$udfs) { - this.$udfs = new UserDefinedFunctions(this.container, this.clientContext); + async getToken(scopes, options) { + var _a, _b; + await this.prepareOnce(); + const tenantId = processMultiTenantRequest(this.tenantId, options, this.additionallyAllowedTenantIds, logger$m) || this.tenantId; + if (findCredentials === undefined) { + throw new CredentialUnavailableError([ + "No implementation of `VisualStudioCodeCredential` is available.", + "You must install the identity-vscode plugin package (`npm install --save-dev @azure/identity-vscode`)", + "and enable it by importing `useIdentityPlugin` from `@azure/identity` and calling", + "`useIdentityPlugin(vsCodePlugin)` before creating a `VisualStudioCodeCredential`.", + "To troubleshoot, visit https://aka.ms/azsdk/js/identity/vscodecredential/troubleshoot.", + ].join(" ")); + } + let scopeString = typeof scopes === "string" ? scopes : scopes.join(" "); + // Check to make sure the scope we get back is a valid scope + if (!scopeString.match(/^[0-9a-zA-Z-.:/]+$/)) { + const error = new Error("Invalid scope was specified by the user or calling client"); + logger$m.getToken.info(formatError(scopes, error)); + throw error; + } + if (scopeString.indexOf("offline_access") < 0) { + scopeString += " offline_access"; + } + // findCredentials returns an array similar to: + // [ + // { + // account: "", + // password: "", + // }, + // /* ... */ + // ] + const credentials = await findCredentials(); + // If we can't find the credential based on the name, we'll pick the first one available. + const { password: refreshToken } = (_b = (_a = credentials.find(({ account }) => account === this.cloudName)) !== null && _a !== void 0 ? _a : credentials[0]) !== null && _b !== void 0 ? _b : {}; + if (refreshToken) { + const tokenResponse = await this.identityClient.refreshAccessToken(tenantId, AzureAccountClientId, scopeString, refreshToken, undefined); + if (tokenResponse) { + logger$m.getToken.info(formatSuccess(scopes)); + return tokenResponse.accessToken; + } + else { + const error = new CredentialUnavailableError("Could not retrieve the token associated with Visual Studio Code. Have you connected using the 'Azure Account' extension recently? To troubleshoot, visit https://aka.ms/azsdk/js/identity/vscodecredential/troubleshoot."); + logger$m.getToken.info(formatError(scopes, error)); + throw error; + } + } + else { + const error = new CredentialUnavailableError("Could not retrieve the token associated with Visual Studio Code. Did you connect using the 'Azure Account' extension? To troubleshoot, visit https://aka.ms/azsdk/js/identity/vscodecredential/troubleshoot."); + logger$m.getToken.info(formatError(scopes, error)); + throw error; } - return this.$udfs; - } -} - -/** Response object for Container operations */ -class ContainerResponse extends ResourceResponse { - constructor(resource, headers, statusCode, container, diagnostics) { - super(resource, headers, statusCode, diagnostics); - this.container = container; } } -class OfferResponse extends ResourceResponse { - constructor(resource, headers, statusCode, diagnostics, offer) { - super(resource, headers, statusCode, diagnostics); - this.offer = offer; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The context passed to an Identity plugin. This contains objects that + * plugins can use to set backend implementations. + * @internal + */ +const pluginContext = { + cachePluginControl: msalNodeFlowCacheControl, + nativeBrokerPluginControl: msalNodeFlowNativeBrokerControl, + vsCodeCredentialControl: vsCodeCredentialControl, +}; +/** + * Extend Azure Identity with additional functionality. Pass a plugin from + * a plugin package, such as: + * + * - `@azure/identity-cache-persistence`: provides persistent token caching + * - `@azure/identity-vscode`: provides the dependencies of + * `VisualStudioCodeCredential` and enables it + * + * Example: + * + * ```javascript + * import { cachePersistencePlugin } from "@azure/identity-cache-persistence"; + * + * import { useIdentityPlugin, DefaultAzureCredential } from "@azure/identity"; + * useIdentityPlugin(cachePersistencePlugin); + * + * // The plugin has the capability to extend `DefaultAzureCredential` and to + * // add middleware to the underlying credentials, such as persistence. + * const credential = new DefaultAzureCredential({ + * tokenCachePersistenceOptions: { + * enabled: true + * } + * }); + * ``` + * + * @param plugin - the plugin to register + */ +function useIdentityPlugin(plugin) { + plugin(pluginContext); } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const msiName$6 = "ManagedIdentityCredential - AppServiceMSI 2017"; +const logger$l = credentialLogger(msiName$6); /** - * Use to read or replace an existing {@link Offer} by id. - * - * @see {@link Offers} to query or read all offers. + * Generates the options used on the request for an access token. */ -class Offer { - /** - * Returns a reference URL to the resource. Used for linking in Permissions. - */ - get url() { - return `/${Constants$1.Path.OffersPathSegment}/${this.id}`; +function prepareRequestOptions$5(scopes, clientId) { + const resource = mapScopesToResource(scopes); + if (!resource) { + throw new Error(`${msiName$6}: Multiple scopes are not supported.`); } - /** - * @hidden - * @param client - The parent {@link CosmosClient} for the Database Account. - * @param id - The id of the given {@link Offer}. - */ - constructor(client, id, clientContext) { - this.client = client; - this.id = id; - this.clientContext = clientContext; + const queryParameters = { + resource, + "api-version": "2017-09-01", + }; + if (clientId) { + queryParameters.clientid = clientId; } - /** - * Read the {@link OfferDefinition} for the given {@link Offer}. - */ - async read(options) { - return withDiagnostics(async (diagnosticNode) => { - const response = await this.clientContext.read({ - path: this.url, - resourceType: exports.ResourceType.offer, - resourceId: this.id, - options, - diagnosticNode, - }); - return new OfferResponse(response.result, response.headers, response.code, getEmptyCosmosDiagnostics(), this); - }, this.clientContext); + const query = new URLSearchParams(queryParameters); + // This error should not bubble up, since we verify that this environment variable is defined in the isAvailable() method defined below. + if (!process.env.MSI_ENDPOINT) { + throw new Error(`${msiName$6}: Missing environment variable: MSI_ENDPOINT`); } - /** - * Replace the given {@link Offer} with the specified {@link OfferDefinition}. - * @param body - The specified {@link OfferDefinition} - */ - async replace(body, options) { - return withDiagnostics(async (diagnosticNode) => { - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const response = await this.clientContext.replace({ - body, - path: this.url, - resourceType: exports.ResourceType.offer, - resourceId: this.id, - options, - diagnosticNode, - }); - return new OfferResponse(response.result, response.headers, response.code, getEmptyCosmosDiagnostics(), this); - }, this.clientContext); + if (!process.env.MSI_SECRET) { + throw new Error(`${msiName$6}: Missing environment variable: MSI_SECRET`); } + return { + url: `${process.env.MSI_ENDPOINT}?${query.toString()}`, + method: "GET", + headers: coreRestPipeline.createHttpHeaders({ + Accept: "application/json", + secret: process.env.MSI_SECRET, + }), + }; } +/** + * Defines how to determine whether the Azure App Service MSI is available, and also how to retrieve a token from the Azure App Service MSI. + */ +const appServiceMsi2017 = { + name: "appServiceMsi2017", + async isAvailable({ scopes }) { + const resource = mapScopesToResource(scopes); + if (!resource) { + logger$l.info(`${msiName$6}: Unavailable. Multiple scopes are not supported.`); + return false; + } + const env = process.env; + const result = Boolean(env.MSI_ENDPOINT && env.MSI_SECRET); + if (!result) { + logger$l.info(`${msiName$6}: Unavailable. The environment variables needed are: MSI_ENDPOINT and MSI_SECRET.`); + } + return result; + }, + async getToken(configuration, getTokenOptions = {}) { + const { identityClient, scopes, clientId, resourceId } = configuration; + if (resourceId) { + logger$l.warning(`${msiName$6}: managed Identity by resource Id is not supported. Argument resourceId might be ignored by the service.`); + } + logger$l.info(`${msiName$6}: Using the endpoint and the secret coming form the environment variables: MSI_ENDPOINT=${process.env.MSI_ENDPOINT} and MSI_SECRET=[REDACTED].`); + const request = coreRestPipeline.createPipelineRequest(Object.assign(Object.assign({ abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions$5(scopes, clientId)), { + // Generally, MSI endpoints use the HTTP protocol, without transport layer security (TLS). + allowInsecureConnection: true })); + const tokenResponse = await identityClient.sendTokenRequest(request); + return (tokenResponse && tokenResponse.accessToken) || null; + }, +}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const msiName$5 = "ManagedIdentityCredential - CloudShellMSI"; +const logger$k = credentialLogger(msiName$5); /** - * Use to query or read all Offers. - * - * @see {@link Offer} to read or replace an existing {@link Offer} by id. + * Generates the options used on the request for an access token. */ -class Offers { - /** - * @hidden - * @param client - The parent {@link CosmosClient} for the offers. - */ - constructor(client, clientContext) { - this.client = client; - this.clientContext = clientContext; +function prepareRequestOptions$4(scopes, clientId, resourceId) { + const resource = mapScopesToResource(scopes); + if (!resource) { + throw new Error(`${msiName$5}: Multiple scopes are not supported.`); } - query(query, options) { - return new QueryIterator(this.clientContext, query, options, (diagnosticNode, innerOptions) => { - return this.clientContext.queryFeed({ - path: "/offers", - resourceType: exports.ResourceType.offer, - resourceId: "", - resultFn: (result) => result.Offers, - query, - options: innerOptions, - diagnosticNode, - }); - }); + const body = { + resource, + }; + if (clientId) { + body.client_id = clientId; } - /** - * Read all offers. - * @example Read all offers to array. - * ```typescript - * const {body: offerList} = await client.offers.readAll().fetchAll(); - * ``` - */ - readAll(options) { - return this.query(undefined, options); + if (resourceId) { + body.msi_res_id = resourceId; + } + // This error should not bubble up, since we verify that this environment variable is defined in the isAvailable() method defined below. + if (!process.env.MSI_ENDPOINT) { + throw new Error(`${msiName$5}: Missing environment variable: MSI_ENDPOINT`); } + const params = new URLSearchParams(body); + return { + url: process.env.MSI_ENDPOINT, + method: "POST", + body: params.toString(), + headers: coreRestPipeline.createHttpHeaders({ + Accept: "application/json", + Metadata: "true", + "Content-Type": "application/x-www-form-urlencoded", + }), + }; } - /** - * Operations for reading, replacing, or deleting a specific, existing container by id. - * - * @see {@link Containers} for creating new containers, and reading/querying all containers; use `.containers`. - * - * Note: all these operations make calls against a fixed budget. - * You should design your system such that these calls scale sublinearly with your application. - * For instance, do not call `container(id).read()` before every single `item.read()` call, to ensure the container exists; - * do this once on application start up. + * Defines how to determine whether the Azure Cloud Shell MSI is available, and also how to retrieve a token from the Azure Cloud Shell MSI. + * Since Azure Managed Identities aren't available in the Azure Cloud Shell, we log a warning for users that try to access cloud shell using user assigned identity. */ -class Container { - /** - * Operations for creating new items, and reading/querying all items - * - * For reading, replacing, or deleting an existing item, use `.item(id)`. - * - * @example Create a new item - * ```typescript - * const {body: createdItem} = await container.items.create({id: "", properties: {}}); - * ``` - */ - get items() { - if (!this.$items) { - this.$items = new Items(this, this.clientContext); - } - return this.$items; - } - /** - * All operations for Stored Procedures, Triggers, and User Defined Functions - */ - get scripts() { - if (!this.$scripts) { - this.$scripts = new Scripts(this, this.clientContext); +const cloudShellMsi = { + name: "cloudShellMsi", + async isAvailable({ scopes }) { + const resource = mapScopesToResource(scopes); + if (!resource) { + logger$k.info(`${msiName$5}: Unavailable. Multiple scopes are not supported.`); + return false; } - return this.$scripts; - } - /** - * Operations for reading and querying conflicts for the given container. - * - * For reading or deleting a specific conflict, use `.conflict(id)`. - */ - get conflicts() { - if (!this.$conflicts) { - this.$conflicts = new Conflicts(this, this.clientContext); + const result = Boolean(process.env.MSI_ENDPOINT); + if (!result) { + logger$k.info(`${msiName$5}: Unavailable. The environment variable MSI_ENDPOINT is needed.`); } - return this.$conflicts; - } - /** - * Returns a reference URL to the resource. Used for linking in Permissions. - */ - get url() { - return createDocumentCollectionUri(this.database.id, this.id); - } - /** - * Returns a container instance. Note: You should get this from `database.container(id)`, rather than creating your own object. - * @param database - The parent {@link Database}. - * @param id - The id of the given container. - * @hidden - */ - constructor(database, id, clientContext) { - this.database = database; - this.id = id; - this.clientContext = clientContext; - } - /** - * Used to read, replace, or delete a specific, existing {@link Item} by id. - * - * Use `.items` for creating new items, or querying/reading all items. - * - * @param id - The id of the {@link Item}. - * @param partitionKeyValue - The value of the {@link Item} partition key - * @example Replace an item - * `const {body: replacedItem} = await container.item("", "").replace({id: "", title: "Updated post", authorID: 5});` - */ - item(id, partitionKeyValue) { - return new Item(this, id, this.clientContext, partitionKeyValue); - } - /** - * Used to read, replace, or delete a specific, existing {@link Conflict} by id. - * - * Use `.conflicts` for creating new conflicts, or querying/reading all conflicts. - * @param id - The id of the {@link Conflict}. - */ - conflict(id, partitionKey) { - return new Conflict(this, id, this.clientContext, partitionKey); - } - /** Read the container's definition */ - async read(options) { - return withDiagnostics(async (diagnosticNode) => { - return this.readInternal(diagnosticNode, options); - }, this.clientContext); - } - /** - * @hidden - */ - async readInternal(diagnosticNode, options) { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.read({ - path, - resourceType: exports.ResourceType.container, - resourceId: id, - options, - diagnosticNode, - }); - this.clientContext.partitionKeyDefinitionCache[this.url] = response.result.partitionKey; - return new ContainerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - } - /** Replace the container's definition */ - async replace(body, options) { - return withDiagnostics(async (diagnosticNode) => { - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.replace({ - body, - path, - resourceType: exports.ResourceType.container, - resourceId: id, - options, - diagnosticNode, - }); - return new ContainerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } - /** Delete the container */ - async delete(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.delete({ - path, - resourceType: exports.ResourceType.container, - resourceId: id, - options, - diagnosticNode, - }); - return new ContainerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } - /** - * Gets the partition key definition first by looking into the cache otherwise by reading the collection. - * @deprecated This method has been renamed to readPartitionKeyDefinition. - */ - async getPartitionKeyDefinition() { - return withDiagnostics(async (diagnosticNode) => { - return this.readPartitionKeyDefinition(diagnosticNode); - }, this.clientContext); - } - /** - * Gets the partition key definition first by looking into the cache otherwise by reading the collection. - * @hidden - */ - async readPartitionKeyDefinition(diagnosticNode) { - // $ISSUE-felixfan-2016-03-17: Make name based path and link based path use the same key - // $ISSUE-felixfan-2016-03-17: Refresh partitionKeyDefinitionCache when necessary - if (this.url in this.clientContext.partitionKeyDefinitionCache) { - diagnosticNode.addData({ readFromCache: true }); - return new ResourceResponse(this.clientContext.partitionKeyDefinitionCache[this.url], {}, 0, getEmptyCosmosDiagnostics()); + return result; + }, + async getToken(configuration, getTokenOptions = {}) { + const { identityClient, scopes, clientId, resourceId } = configuration; + if (clientId) { + logger$k.warning(`${msiName$5}: user-assigned identities not supported. The argument clientId might be ignored by the service.`); } - const { headers, statusCode, diagnostics } = await withMetadataDiagnostics(async (node) => { - return this.readInternal(node); - }, diagnosticNode, exports.MetadataLookUpType.ContainerLookUp); - return new ResourceResponse(this.clientContext.partitionKeyDefinitionCache[this.url], headers, statusCode, diagnostics); - } - /** - * Gets offer on container. If none exists, returns an OfferResponse with undefined. - */ - async readOffer(options = {}) { - return withDiagnostics(async (diagnosticNode) => { - const { resource: container } = await this.read(); - const path = "/offers"; - const url = container._self; - const response = await this.clientContext.queryFeed({ - path, - resourceId: "", - resourceType: exports.ResourceType.offer, - query: `SELECT * from root where root.resource = "${url}"`, - resultFn: (result) => result.Offers, - options, - diagnosticNode, - }); - const offer = response.result[0] - ? new Offer(this.database.client, response.result[0].id, this.clientContext) - : undefined; - return new OfferResponse(response.result[0], response.headers, response.code, getEmptyCosmosDiagnostics(), offer); - }, this.clientContext); - } - async getQueryPlan(query) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - return this.clientContext.getQueryPlan(path + "/docs", exports.ResourceType.item, getIdFromLink(this.url), query, {}, diagnosticNode); - }, this.clientContext); - } - readPartitionKeyRanges(feedOptions) { - feedOptions = feedOptions || {}; - return this.clientContext.queryPartitionKeyRanges(this.url, undefined, feedOptions); - } - /** - * - * @returns all the feed ranges for which changefeed could be fetched. - */ - async getFeedRanges() { - return withDiagnostics(async (diagnosticNode) => { - const { resources } = await this.readPartitionKeyRanges().fetchAllInternal(diagnosticNode); - const feedRanges = []; - for (const resource of resources) { - const feedRange = new FeedRangeInternal(resource.minInclusive, resource.maxExclusive); - Object.freeze(feedRange); - feedRanges.push(feedRange); - } - return feedRanges; - }, this.clientContext); - } - /** - * Delete all documents belong to the container for the provided partition key value - * @param partitionKey - The partition key value of the items to be deleted - */ - async deleteAllItemsForPartitionKey(partitionKey, options) { - return withDiagnostics(async (diagnosticNode) => { - let path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - path = path + "/operations/partitionkeydelete"; - const response = await this.clientContext.delete({ - path, - resourceType: exports.ResourceType.container, - resourceId: id, - options, - partitionKey: partitionKey, - method: exports.HTTPMethod.post, - diagnosticNode, - }); - return new ContainerResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } -} + if (resourceId) { + logger$k.warning(`${msiName$5}: user defined managed Identity by resource Id not supported. The argument resourceId might be ignored by the service.`); + } + logger$k.info(`${msiName$5}: Using the endpoint coming form the environment variable MSI_ENDPOINT = ${process.env.MSI_ENDPOINT}.`); + const request = coreRestPipeline.createPipelineRequest(Object.assign(Object.assign({ abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions$4(scopes, clientId, resourceId)), { + // Generally, MSI endpoints use the HTTP protocol, without transport layer security (TLS). + allowInsecureConnection: true })); + const tokenResponse = await identityClient.sendTokenRequest(request); + return (tokenResponse && tokenResponse.accessToken) || null; + }, +}; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -function validateOffer(body) { - if (body.throughput) { - if (body.maxThroughput) { - console.log("should be erroring"); - throw new Error("Cannot specify `throughput` with `maxThroughput`"); +const msiName$4 = "ManagedIdentityCredential - IMDS"; +const logger$j = credentialLogger(msiName$4); +/** + * Generates the options used on the request for an access token. + */ +function prepareRequestOptions$3(scopes, clientId, resourceId, options) { + var _a; + const resource = mapScopesToResource(scopes); + if (!resource) { + throw new Error(`${msiName$4}: Multiple scopes are not supported.`); + } + const { skipQuery, skipMetadataHeader } = options || {}; + let query = ""; + // Pod Identity will try to process this request even if the Metadata header is missing. + // We can exclude the request query to ensure no IMDS endpoint tries to process the ping request. + if (!skipQuery) { + const queryParameters = { + resource, + "api-version": imdsApiVersion, + }; + if (clientId) { + queryParameters.client_id = clientId; } - if (body.autoUpgradePolicy) { - throw new Error("Cannot specify autoUpgradePolicy with throughput. Use `maxThroughput` instead"); + if (resourceId) { + queryParameters.msi_res_id = resourceId; } + const params = new URLSearchParams(queryParameters); + query = `?${params.toString()}`; + } + const url = new URL(imdsEndpointPath, (_a = process.env.AZURE_POD_IDENTITY_AUTHORITY_HOST) !== null && _a !== void 0 ? _a : imdsHost); + const rawHeaders = { + Accept: "application/json", + Metadata: "true", + }; + // Remove the Metadata header to invoke a request error from some IMDS endpoints. + if (skipMetadataHeader) { + delete rawHeaders.Metadata; } + return { + // In this case, the `?` should be added in the "query" variable `skipQuery` is not set. + url: `${url}${query}`, + method: "GET", + headers: coreRestPipeline.createHttpHeaders(rawHeaders), + }; } - +// 800ms -> 1600ms -> 3200ms +const imdsMsiRetryConfig = { + maxRetries: 3, + startDelayInMs: 800, + intervalIncrement: 2, +}; /** - * Operations for creating new containers, and reading/querying all containers - * - * @see {@link Container} for reading, replacing, or deleting an existing container; use `.container(id)`. - * - * Note: all these operations make calls against a fixed budget. - * You should design your system such that these calls scale sublinearly with your application. - * For instance, do not call `containers.readAll()` before every single `item.read()` call, to ensure the container exists; - * do this once on application start up. + * Defines how to determine whether the Azure IMDS MSI is available, and also how to retrieve a token from the Azure IMDS MSI. */ -class Containers { - constructor(database, clientContext) { - this.database = database; - this.clientContext = clientContext; - } - query(query, options) { - const path = getPathFromLink(this.database.url, exports.ResourceType.container); - const id = getIdFromLink(this.database.url); - return new QueryIterator(this.clientContext, query, options, (diagNode, innerOptions) => { - return this.clientContext.queryFeed({ - path, - resourceType: exports.ResourceType.container, - resourceId: id, - resultFn: (result) => result.DocumentCollections, - query, - options: innerOptions, - diagnosticNode: diagNode, - }); - }); - } - /** - * Creates a container. - * - * A container is a named logical container for items. - * - * A database may contain zero or more named containers and each container consists of - * zero or more JSON items. - * - * Being schema-free, the items in a container do not need to share the same structure or fields. - * - * - * Since containers are application resources, they can be authorized using either the - * master key or resource keys. - * - * @param body - Represents the body of the container. - * @param options - Use to set options like response page size, continuation tokens, etc. - */ - async create(body, options = {}) { - return withDiagnostics(async (diagnosticNode) => { - return this.createInternal(diagnosticNode, body, options); - }, this.clientContext); - } - /** - * @hidden - */ - async createInternal(diagnosticNode, body, options = {}) { - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.database.url, exports.ResourceType.container); - const id = getIdFromLink(this.database.url); - validateOffer(body); - if (body.maxThroughput) { - const autoscaleParams = { - maxThroughput: body.maxThroughput, - }; - if (body.autoUpgradePolicy) { - autoscaleParams.autoUpgradePolicy = body.autoUpgradePolicy; - } - const autoscaleHeader = JSON.stringify(autoscaleParams); - options.initialHeaders = Object.assign({}, options.initialHeaders, { - [Constants$1.HttpHeaders.AutoscaleSettings]: autoscaleHeader, - }); - delete body.maxThroughput; - delete body.autoUpgradePolicy; - } - if (body.throughput) { - options.initialHeaders = Object.assign({}, options.initialHeaders, { - [Constants$1.HttpHeaders.OfferThroughput]: body.throughput, - }); - delete body.throughput; +const imdsMsi = { + name: "imdsMsi", + async isAvailable({ scopes, identityClient, clientId, resourceId, getTokenOptions = {}, }) { + const resource = mapScopesToResource(scopes); + if (!resource) { + logger$j.info(`${msiName$4}: Unavailable. Multiple scopes are not supported.`); + return false; } - if (typeof body.partitionKey === "string") { - if (!body.partitionKey.startsWith("/")) { - throw new Error("Partition key must start with '/'"); - } - body.partitionKey = { - paths: [body.partitionKey], - }; + // if the PodIdentityEndpoint environment variable was set no need to probe the endpoint, it can be assumed to exist + if (process.env.AZURE_POD_IDENTITY_AUTHORITY_HOST) { + return true; } - // If they don't specify a partition key, use the default path - if (!body.partitionKey || !body.partitionKey.paths) { - body.partitionKey = { - paths: [DEFAULT_PARTITION_KEY_PATH], - }; + if (!identityClient) { + throw new Error("Missing IdentityClient"); } - const response = await this.clientContext.create({ - body, - path, - resourceType: exports.ResourceType.container, - resourceId: id, - diagnosticNode, - options, + const requestOptions = prepareRequestOptions$3(resource, clientId, resourceId, { + skipMetadataHeader: true, + skipQuery: true, }); - const ref = new Container(this.database, response.result.id, this.clientContext); - return new ContainerResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); - } - /** - * Checks if a Container exists, and, if it doesn't, creates it. - * This will make a read operation based on the id in the `body`, then if it is not found, a create operation. - * You should confirm that the output matches the body you passed in for non-default properties (i.e. indexing policy/etc.) - * - * A container is a named logical container for items. - * - * A database may contain zero or more named containers and each container consists of - * zero or more JSON items. - * - * Being schema-free, the items in a container do not need to share the same structure or fields. - * - * - * Since containers are application resources, they can be authorized using either the - * master key or resource keys. - * - * @param body - Represents the body of the container. - * @param options - Use to set options like response page size, continuation tokens, etc. - */ - async createIfNotExists(body, options) { - if (!body || body.id === null || body.id === undefined) { - throw new Error("body parameter must be an object with an id property"); - } - /* - 1. Attempt to read the Container (based on an assumption that most containers will already exist, so its faster) - 2. If it fails with NotFound error, attempt to create the container. Else, return the read results. - */ - return withDiagnostics(async (diagnosticNode) => { + return tracingClient.withSpan("ManagedIdentityCredential-pingImdsEndpoint", getTokenOptions, async (options) => { + var _a, _b; + requestOptions.tracingOptions = options.tracingOptions; + // Create a request with a timeout since we expect that + // not having a "Metadata" header should cause an error to be + // returned quickly from the endpoint, proving its availability. + const request = coreRestPipeline.createPipelineRequest(requestOptions); + // Default to 1000 if the default of 0 is used. + // Negative values can still be used to disable the timeout. + request.timeout = ((_a = options.requestOptions) === null || _a === void 0 ? void 0 : _a.timeout) || 1000; + // This MSI uses the imdsEndpoint to get the token, which only uses http:// + request.allowInsecureConnection = true; + let response; try { - const readResponse = await this.database - .container(body.id) - .readInternal(diagnosticNode, options); - return readResponse; + logger$j.info(`${msiName$4}: Pinging the Azure IMDS endpoint`); + response = await identityClient.sendRequest(request); } catch (err) { - if (err.code === StatusCodes.NotFound) { - const createResponse = await this.createInternal(diagnosticNode, body, options); - // Must merge the headers to capture RU costskaty - mergeHeaders(createResponse.headers, err.headers); - return createResponse; + // If the request failed, or Node.js was unable to establish a connection, + // or the host was down, we'll assume the IMDS endpoint isn't available. + if (coreUtil.isError(err)) { + logger$j.verbose(`${msiName$4}: Caught error ${err.name}: ${err.message}`); } - else { - throw err; + // This is a special case for Docker Desktop which responds with a 403 with a message that contains "A socket operation was attempted to an unreachable network" + // rather than just timing out, as expected. + logger$j.info(`${msiName$4}: The Azure IMDS endpoint is unavailable`); + return false; + } + if (response.status === 403) { + if ((_b = response.bodyAsText) === null || _b === void 0 ? void 0 : _b.includes("A socket operation was attempted to an unreachable network")) { + logger$j.info(`${msiName$4}: The Azure IMDS endpoint is unavailable`); + logger$j.info(`${msiName$4}: ${response.bodyAsText}`); + return false; } } - }, this.clientContext); + // If we received any response, the endpoint is available + logger$j.info(`${msiName$4}: The Azure IMDS endpoint is available`); + return true; + }); + }, + async getToken(configuration, getTokenOptions = {}) { + const { identityClient, scopes, clientId, resourceId } = configuration; + if (process.env.AZURE_POD_IDENTITY_AUTHORITY_HOST) { + logger$j.info(`${msiName$4}: Using the Azure IMDS endpoint coming from the environment variable AZURE_POD_IDENTITY_AUTHORITY_HOST=${process.env.AZURE_POD_IDENTITY_AUTHORITY_HOST}.`); + } + else { + logger$j.info(`${msiName$4}: Using the default Azure IMDS endpoint ${imdsHost}.`); + } + let nextDelayInMs = imdsMsiRetryConfig.startDelayInMs; + for (let retries = 0; retries < imdsMsiRetryConfig.maxRetries; retries++) { + try { + const request = coreRestPipeline.createPipelineRequest(Object.assign(Object.assign({ abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions$3(scopes, clientId, resourceId)), { allowInsecureConnection: true })); + const tokenResponse = await identityClient.sendTokenRequest(request); + return (tokenResponse && tokenResponse.accessToken) || null; + } + catch (error) { + if (error.statusCode === 404) { + await coreUtil.delay(nextDelayInMs); + nextDelayInMs *= imdsMsiRetryConfig.intervalIncrement; + continue; + } + throw error; + } + } + throw new AuthenticationError(404, `${msiName$4}: Failed to retrieve IMDS token after ${imdsMsiRetryConfig.maxRetries} retries.`); + }, +}; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const msiName$3 = "ManagedIdentityCredential - Azure Arc MSI"; +const logger$i = credentialLogger(msiName$3); +/** + * Generates the options used on the request for an access token. + */ +function prepareRequestOptions$2(scopes, clientId, resourceId) { + const resource = mapScopesToResource(scopes); + if (!resource) { + throw new Error(`${msiName$3}: Multiple scopes are not supported.`); } - /** - * Read all containers. - * @param options - Use to set options like response page size, continuation tokens, etc. - * @returns {@link QueryIterator} Allows you to return all containers in an array or iterate over them one at a time. - * @example Read all containers to array. - * ```typescript - * const {body: containerList} = await client.database("").containers.readAll().fetchAll(); - * ``` - */ - readAll(options) { - return this.query(undefined, options); + const queryParameters = { + resource, + "api-version": azureArcAPIVersion, + }; + if (clientId) { + queryParameters.client_id = clientId; } -} - -class PermissionResponse extends ResourceResponse { - constructor(resource, headers, statusCode, permission, diagnostics) { - super(resource, headers, statusCode, diagnostics); - this.permission = permission; + if (resourceId) { + queryParameters.msi_res_id = resourceId; + } + // This error should not bubble up, since we verify that this environment variable is defined in the isAvailable() method defined below. + if (!process.env.IDENTITY_ENDPOINT) { + throw new Error(`${msiName$3}: Missing environment variable: IDENTITY_ENDPOINT`); } + const query = new URLSearchParams(queryParameters); + return coreRestPipeline.createPipelineRequest({ + // Should be similar to: http://localhost:40342/metadata/identity/oauth2/token + url: `${process.env.IDENTITY_ENDPOINT}?${query.toString()}`, + method: "GET", + headers: coreRestPipeline.createHttpHeaders({ + Accept: "application/json", + Metadata: "true", + }), + }); } - /** - * Use to read, replace, or delete a given {@link Permission} by id. - * - * @see {@link Permissions} to create, upsert, query, or read all Permissions. + * Retrieves the file contents at the given path using promises. + * Useful since `fs`'s readFileSync locks the thread, and to avoid extra dependencies. */ -class Permission { - /** - * Returns a reference URL to the resource. Used for linking in Permissions. - */ - get url() { - return createPermissionUri(this.user.database.id, this.user.id, this.id); - } - /** - * @hidden - * @param user - The parent {@link User}. - * @param id - The id of the given {@link Permission}. - */ - constructor(user, id, clientContext) { - this.user = user; - this.id = id; - this.clientContext = clientContext; - } - /** - * Read the {@link PermissionDefinition} of the given {@link Permission}. - */ - async read(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.read({ - path, - resourceType: exports.ResourceType.permission, - resourceId: id, - options, - diagnosticNode, - }); - return new PermissionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); +function readFileAsync$1(path, options) { + return new Promise((resolve, reject) => fs.readFile(path, options, (err, data) => { + if (err) { + reject(err); + } + resolve(data); + })); +} +/** + * Does a request to the authentication provider that results in a file path. + */ +async function filePathRequest(identityClient, requestPrepareOptions) { + const response = await identityClient.sendRequest(coreRestPipeline.createPipelineRequest(requestPrepareOptions)); + if (response.status !== 401) { + let message = ""; + if (response.bodyAsText) { + message = ` Response: ${response.bodyAsText}`; + } + throw new AuthenticationError(response.status, `${msiName$3}: To authenticate with Azure Arc MSI, status code 401 is expected on the first request. ${message}`); } - /** - * Replace the given {@link Permission} with the specified {@link PermissionDefinition}. - * @param body - The specified {@link PermissionDefinition}. - */ - async replace(body, options) { - return withDiagnostics(async (diagnosticNode) => { - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.replace({ - body, - path, - resourceType: exports.ResourceType.permission, - resourceId: id, - options, - diagnosticNode, - }); - return new PermissionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + const authHeader = response.headers.get("www-authenticate") || ""; + try { + return authHeader.split("=").slice(1)[0]; } - /** - * Delete the given {@link Permission}. - */ - async delete(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.delete({ - path, - resourceType: exports.ResourceType.permission, - resourceId: id, - options, - diagnosticNode, - }); - return new PermissionResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + catch (e) { + throw Error(`Invalid www-authenticate header format: ${authHeader}`); } } +/** + * Defines how to determine whether the Azure Arc MSI is available, and also how to retrieve a token from the Azure Arc MSI. + */ +const arcMsi = { + name: "arc", + async isAvailable({ scopes }) { + const resource = mapScopesToResource(scopes); + if (!resource) { + logger$i.info(`${msiName$3}: Unavailable. Multiple scopes are not supported.`); + return false; + } + const result = Boolean(process.env.IMDS_ENDPOINT && process.env.IDENTITY_ENDPOINT); + if (!result) { + logger$i.info(`${msiName$3}: The environment variables needed are: IMDS_ENDPOINT and IDENTITY_ENDPOINT`); + } + return result; + }, + async getToken(configuration, getTokenOptions = {}) { + var _a; + const { identityClient, scopes, clientId, resourceId } = configuration; + if (clientId) { + logger$i.warning(`${msiName$3}: user-assigned identities not supported. The argument clientId might be ignored by the service.`); + } + if (resourceId) { + logger$i.warning(`${msiName$3}: user defined managed Identity by resource Id is not supported. Argument resourceId will be ignored.`); + } + logger$i.info(`${msiName$3}: Authenticating.`); + const requestOptions = Object.assign(Object.assign({ disableJsonStringifyOnBody: true, deserializationMapper: undefined, abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions$2(scopes, clientId, resourceId)), { allowInsecureConnection: true }); + const filePath = await filePathRequest(identityClient, requestOptions); + if (!filePath) { + throw new Error(`${msiName$3}: Failed to find the token file.`); + } + const key = await readFileAsync$1(filePath, { encoding: "utf-8" }); + (_a = requestOptions.headers) === null || _a === void 0 ? void 0 : _a.set("Authorization", `Basic ${key}`); + const request = coreRestPipeline.createPipelineRequest(Object.assign(Object.assign({}, requestOptions), { + // Generally, MSI endpoints use the HTTP protocol, without transport layer security (TLS). + allowInsecureConnection: true })); + const tokenResponse = await identityClient.sendTokenRequest(request); + return (tokenResponse && tokenResponse.accessToken) || null; + }, +}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Use to create, replace, query, and read all Permissions. - * - * @see {@link Permission} to read, replace, or delete a specific permission by id. + * MSAL client assertion client. Calls to MSAL's confidential application's `acquireTokenByClientCredential` during `doGetToken`. + * @internal */ -class Permissions { - /** - * @hidden - * @param user - The parent {@link User}. - */ - constructor(user, clientContext) { - this.user = user; - this.clientContext = clientContext; +class MsalClientAssertion extends MsalNode { + constructor(options) { + super(options); + this.requiresConfidential = true; + this.getAssertion = options.getAssertion; } - query(query, options) { - const path = getPathFromLink(this.user.url, exports.ResourceType.permission); - const id = getIdFromLink(this.user.url); - return new QueryIterator(this.clientContext, query, options, (diagnosticNode, innerOptions) => { - return this.clientContext.queryFeed({ - path, - resourceType: exports.ResourceType.permission, - resourceId: id, - resultFn: (result) => result.Permissions, - query, - options: innerOptions, - diagnosticNode, + async doGetToken(scopes, options = {}) { + try { + const assertion = await this.getAssertion(); + const result = await this.getApp("confidential", options.enableCae).acquireTokenByClientCredential({ + scopes, + correlationId: options.correlationId, + azureRegion: this.azureRegion, + authority: options.authority, + claims: options.claims, + clientAssertion: assertion, }); - }); - } - /** - * Read all permissions. - * @example Read all permissions to array. - * ```typescript - * const {body: permissionList} = await user.permissions.readAll().fetchAll(); - * ``` - */ - readAll(options) { - return this.query(undefined, options); + // The Client Credential flow does not return an account, + // so each time getToken gets called, we will have to acquire a new token through the service. + return this.handleResult(scopes, this.clientId, result || undefined); + } + catch (err) { + let err2 = err; + if (err === null || err === undefined) { + err2 = new Error(JSON.stringify(err)); + } + else { + err2 = coreUtil.isError(err) ? err : new Error(String(err)); + } + throw this.handleError(scopes, err2, options); + } } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const logger$h = credentialLogger("ClientAssertionCredential"); +/** + * Authenticates a service principal with a JWT assertion. + */ +class ClientAssertionCredential { /** - * Create a permission. + * Creates an instance of the ClientAssertionCredential with the details + * needed to authenticate against Microsoft Entra ID with a client + * assertion provided by the developer through the `getAssertion` function parameter. * - * A permission represents a per-User Permission to access a specific resource - * e.g. Item or Container. - * @param body - Represents the body of the permission. + * @param tenantId - The Microsoft Entra tenant (directory) ID. + * @param clientId - The client (application) ID of an App Registration in the tenant. + * @param getAssertion - A function that retrieves the assertion for the credential to use. + * @param options - Options for configuring the client which makes the authentication request. */ - async create(body, options) { - return withDiagnostics(async (diagnosticNode) => { - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.user.url, exports.ResourceType.permission); - const id = getIdFromLink(this.user.url); - const response = await this.clientContext.create({ - body, - path, - resourceType: exports.ResourceType.permission, - resourceId: id, - diagnosticNode, - options, - }); - const ref = new Permission(this.user, response.result.id, this.clientContext); - return new PermissionResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); - }, this.clientContext); + constructor(tenantId, clientId, getAssertion, options = {}) { + if (!tenantId || !clientId || !getAssertion) { + throw new Error("ClientAssertionCredential: tenantId, clientId, and clientAssertion are required parameters."); + } + this.tenantId = tenantId; + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + this.clientId = clientId; + this.options = options; + this.msalFlow = new MsalClientAssertion(Object.assign(Object.assign({}, options), { logger: logger$h, clientId: this.clientId, tenantId: this.tenantId, tokenCredentialOptions: this.options, getAssertion })); } /** - * Upsert a permission. + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. * - * A permission represents a per-User Permission to access a - * specific resource e.g. Item or Container. + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. */ - async upsert(body, options) { - return withDiagnostics(async (diagnosticNode) => { - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.user.url, exports.ResourceType.permission); - const id = getIdFromLink(this.user.url); - const response = await this.clientContext.upsert({ - body, - path, - resourceType: exports.ResourceType.permission, - resourceId: id, - options, - diagnosticNode, - }); - const ref = new Permission(this.user, response.result.id, this.clientContext); - return new PermissionResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } -} - -class UserResponse extends ResourceResponse { - constructor(resource, headers, statusCode, user, diagnostics) { - super(resource, headers, statusCode, diagnostics); - this.user = user; + async getToken(scopes, options = {}) { + return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { + newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$h); + const arrayScopes = Array.isArray(scopes) ? scopes : [scopes]; + return this.msalFlow.getToken(arrayScopes, newOptions); + }); } } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const credentialName$3 = "WorkloadIdentityCredential"; /** - * Used to read, replace, and delete Users. - * - * Additionally, you can access the permissions for a given user via `user.permission` and `user.permissions`. + * Contains the list of all supported environment variable names so that an + * appropriate error message can be generated when no credentials can be + * configured. * - * @see {@link Users} to create, upsert, query, or read all. + * @internal */ -class User { - /** - * Returns a reference URL to the resource. Used for linking in Permissions. - */ - get url() { - return createUserUri(this.database.id, this.id); - } - /** - * @hidden - * @param database - The parent {@link Database}. - */ - constructor(database, id, clientContext) { - this.database = database; - this.id = id; - this.clientContext = clientContext; - this.permissions = new Permissions(this, this.clientContext); - } +const SupportedWorkloadEnvironmentVariables = [ + "AZURE_TENANT_ID", + "AZURE_CLIENT_ID", + "AZURE_FEDERATED_TOKEN_FILE", +]; +const logger$g = credentialLogger(credentialName$3); +/** + * Workload Identity authentication is a feature in Azure that allows applications running on virtual machines (VMs) + * to access other Azure resources without the need for a service principal or managed identity. With Workload Identity + * authentication, applications authenticate themselves using their own identity, rather than using a shared service + * principal or managed identity. Under the hood, Workload Identity authentication uses the concept of Service Account + * Credentials (SACs), which are automatically created by Azure and stored securely in the VM. By using Workload + * Identity authentication, you can avoid the need to manage and rotate service principals or managed identities for + * each application on each VM. Additionally, because SACs are created automatically and managed by Azure, you don't + * need to worry about storing and securing sensitive credentials themselves. + * The WorkloadIdentityCredential supports Microsoft Entra Workload ID authentication on Azure Kubernetes and acquires + * a token using the SACs available in the Azure Kubernetes environment. + * Refer to Microsoft Entra + * Workload ID for more information. + */ +class WorkloadIdentityCredential { /** - * Operations to read, replace, or delete a specific Permission by id. + * WorkloadIdentityCredential supports Microsoft Entra Workload ID on Kubernetes. * - * See `client.permissions` for creating, upserting, querying, or reading all operations. + * @param options - The identity client options to use for authentication. */ - permission(id) { - return new Permission(this, id, this.clientContext); + constructor(options) { + this.azureFederatedTokenFileContent = undefined; + this.cacheDate = undefined; + // Logging environment variables for error details + const assignedEnv = processEnvVars(SupportedWorkloadEnvironmentVariables).assigned.join(", "); + logger$g.info(`Found the following environment variables: ${assignedEnv}`); + const workloadIdentityCredentialOptions = options !== null && options !== void 0 ? options : {}; + const tenantId = workloadIdentityCredentialOptions.tenantId || process.env.AZURE_TENANT_ID; + const clientId = workloadIdentityCredentialOptions.clientId || process.env.AZURE_CLIENT_ID; + this.federatedTokenFilePath = + workloadIdentityCredentialOptions.tokenFilePath || process.env.AZURE_FEDERATED_TOKEN_FILE; + if (tenantId) { + checkTenantId(logger$g, tenantId); + } + if (clientId && tenantId && this.federatedTokenFilePath) { + logger$g.info(`Invoking ClientAssertionCredential with tenant ID: ${tenantId}, clientId: ${workloadIdentityCredentialOptions.clientId} and federated token path: [REDACTED]`); + this.client = new ClientAssertionCredential(tenantId, clientId, this.readFileContents.bind(this), options); + } } /** - * Read the {@link UserDefinition} for the given {@link User}. + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. */ - async read(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.read({ - path, - resourceType: exports.ResourceType.user, - resourceId: id, - options, - diagnosticNode, - }); - return new UserResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + async getToken(scopes, options) { + if (!this.client) { + const errorMessage = `${credentialName$3}: is unavailable. tenantId, clientId, and federatedTokenFilePath are required parameters. + In DefaultAzureCredential and ManagedIdentityCredential, these can be provided as environment variables - + "AZURE_TENANT_ID", + "AZURE_CLIENT_ID", + "AZURE_FEDERATED_TOKEN_FILE". See the troubleshooting guide for more information: https://aka.ms/azsdk/js/identity/workloadidentitycredential/troubleshoot `; + logger$g.info(errorMessage); + throw new CredentialUnavailableError(errorMessage); + } + logger$g.info("Invoking getToken() of Client Assertion Credential"); + return this.client.getToken(scopes, options); } - /** - * Replace the given {@link User}'s definition with the specified {@link UserDefinition}. - * @param body - The specified {@link UserDefinition} to replace the definition. - */ - async replace(body, options) { - return withDiagnostics(async (diagnosticNode) => { - const err = {}; - if (!isResourceValid(body, err)) { - throw err; + async readFileContents() { + // Cached assertions expire after 5 minutes + if (this.cacheDate !== undefined && Date.now() - this.cacheDate >= 1000 * 60 * 5) { + this.azureFederatedTokenFileContent = undefined; + } + if (!this.federatedTokenFilePath) { + throw new CredentialUnavailableError(`${credentialName$3}: is unavailable. Invalid file path provided ${this.federatedTokenFilePath}.`); + } + if (!this.azureFederatedTokenFileContent) { + const file = await promises.readFile(this.federatedTokenFilePath, "utf8"); + const value = file.trim(); + if (!value) { + throw new CredentialUnavailableError(`${credentialName$3}: is unavailable. No content on the file ${this.federatedTokenFilePath}.`); } - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.replace({ - body, - path, - resourceType: exports.ResourceType.user, - resourceId: id, - options, - diagnosticNode, - }); - return new UserResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); - } - /** - * Delete the given {@link User}. - */ - async delete(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.delete({ - path, - resourceType: exports.ResourceType.user, - resourceId: id, - options, - diagnosticNode, - }); - return new UserResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + else { + this.azureFederatedTokenFileContent = value; + this.cacheDate = Date.now(); + } + } + return this.azureFederatedTokenFileContent; } } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const msiName$2 = "ManagedIdentityCredential - Token Exchange"; +const logger$f = credentialLogger(msiName$2); /** - * Used to create, upsert, query, and read all users. - * - * @see {@link User} to read, replace, or delete a specific User by id. + * Defines how to determine whether the token exchange MSI is available, and also how to retrieve a token from the token exchange MSI. */ -class Users { - /** - * @hidden - * @param database - The parent {@link Database}. - */ - constructor(database, clientContext) { - this.database = database; - this.clientContext = clientContext; +function tokenExchangeMsi() { + return { + name: "tokenExchangeMsi", + async isAvailable({ clientId }) { + const env = process.env; + const result = Boolean((clientId || env.AZURE_CLIENT_ID) && + env.AZURE_TENANT_ID && + process.env.AZURE_FEDERATED_TOKEN_FILE); + if (!result) { + logger$f.info(`${msiName$2}: Unavailable. The environment variables needed are: AZURE_CLIENT_ID (or the client ID sent through the parameters), AZURE_TENANT_ID and AZURE_FEDERATED_TOKEN_FILE`); + } + return result; + }, + async getToken(configuration, getTokenOptions = {}) { + const { scopes, clientId } = configuration; + const identityClientTokenCredentialOptions = {}; + const workloadIdentityCredential = new WorkloadIdentityCredential(Object.assign(Object.assign({ clientId, tenantId: process.env.AZURE_TENANT_ID, tokenFilePath: process.env.AZURE_FEDERATED_TOKEN_FILE }, identityClientTokenCredentialOptions), { disableInstanceDiscovery: true })); + const token = await workloadIdentityCredential.getToken(scopes, getTokenOptions); + return token; + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// This MSI can be easily tested by deploying a container to Azure Service Fabric with the Dockerfile: +// +// FROM node:12 +// RUN wget https://host.any/path/bash.sh +// CMD ["bash", "bash.sh"] +// +// Where the bash script contains: +// +// curl --insecure $IDENTITY_ENDPOINT'?api-version=2019-07-01-preview&resource=https://vault.azure.net/' -H "Secret: $IDENTITY_HEADER" +// +const msiName$1 = "ManagedIdentityCredential - Fabric MSI"; +const logger$e = credentialLogger(msiName$1); +/** + * Generates the options used on the request for an access token. + */ +function prepareRequestOptions$1(scopes, clientId, resourceId) { + const resource = mapScopesToResource(scopes); + if (!resource) { + throw new Error(`${msiName$1}: Multiple scopes are not supported.`); } - query(query, options) { - const path = getPathFromLink(this.database.url, exports.ResourceType.user); - const id = getIdFromLink(this.database.url); - return new QueryIterator(this.clientContext, query, options, (diagnosticNode, innerOptions) => { - return this.clientContext.queryFeed({ - path, - resourceType: exports.ResourceType.user, - resourceId: id, - resultFn: (result) => result.Users, - query, - options: innerOptions, - diagnosticNode, - }); - }); + const queryParameters = { + resource, + "api-version": azureFabricVersion, + }; + if (clientId) { + queryParameters.client_id = clientId; } - /** - * Read all users.- - * @example Read all users to array. - * ```typescript - * const {body: usersList} = await database.users.readAll().fetchAll(); - * ``` - */ - readAll(options) { - return this.query(undefined, options); + if (resourceId) { + queryParameters.msi_res_id = resourceId; } - /** - * Create a database user with the specified {@link UserDefinition}. - * @param body - The specified {@link UserDefinition}. - */ - async create(body, options) { - return withDiagnostics(async (diagnosticNode) => { - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.database.url, exports.ResourceType.user); - const id = getIdFromLink(this.database.url); - const response = await this.clientContext.create({ - body, - path, - resourceType: exports.ResourceType.user, - resourceId: id, - options, - diagnosticNode, - }); - const ref = new User(this.database, response.result.id, this.clientContext); - return new UserResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); - }, this.clientContext); + const query = new URLSearchParams(queryParameters); + // This error should not bubble up, since we verify that this environment variable is defined in the isAvailable() method defined below. + if (!process.env.IDENTITY_ENDPOINT) { + throw new Error("Missing environment variable: IDENTITY_ENDPOINT"); } - /** - * Upsert a database user with a specified {@link UserDefinition}. - * @param body - The specified {@link UserDefinition}. - */ - async upsert(body, options) { - return withDiagnostics(async (diagnosticNode) => { - const err = {}; - if (!isResourceValid(body, err)) { - throw err; - } - const path = getPathFromLink(this.database.url, exports.ResourceType.user); - const id = getIdFromLink(this.database.url); - const response = await this.clientContext.upsert({ - body, - path, - resourceType: exports.ResourceType.user, - resourceId: id, - options, - diagnosticNode, - }); - const ref = new User(this.database, response.result.id, this.clientContext); - return new UserResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); - }, this.clientContext); + if (!process.env.IDENTITY_HEADER) { + throw new Error("Missing environment variable: IDENTITY_HEADER"); } + return { + url: `${process.env.IDENTITY_ENDPOINT}?${query.toString()}`, + method: "GET", + headers: coreRestPipeline.createHttpHeaders({ + Accept: "application/json", + secret: process.env.IDENTITY_HEADER, + }), + }; } +/** + * Defines how to determine whether the Azure Service Fabric MSI is available, and also how to retrieve a token from the Azure Service Fabric MSI. + */ +const fabricMsi = { + name: "fabricMsi", + async isAvailable({ scopes }) { + const resource = mapScopesToResource(scopes); + if (!resource) { + logger$e.info(`${msiName$1}: Unavailable. Multiple scopes are not supported.`); + return false; + } + const env = process.env; + const result = Boolean(env.IDENTITY_ENDPOINT && env.IDENTITY_HEADER && env.IDENTITY_SERVER_THUMBPRINT); + if (!result) { + logger$e.info(`${msiName$1}: Unavailable. The environment variables needed are: IDENTITY_ENDPOINT, IDENTITY_HEADER and IDENTITY_SERVER_THUMBPRINT`); + } + return result; + }, + async getToken(configuration, getTokenOptions = {}) { + const { scopes, identityClient, clientId, resourceId } = configuration; + if (resourceId) { + logger$e.warning(`${msiName$1}: user defined managed Identity by resource Id is not supported. Argument resourceId might be ignored by the service.`); + } + logger$e.info([ + `${msiName$1}:`, + "Using the endpoint and the secret coming from the environment variables:", + `IDENTITY_ENDPOINT=${process.env.IDENTITY_ENDPOINT},`, + "IDENTITY_HEADER=[REDACTED] and", + "IDENTITY_SERVER_THUMBPRINT=[REDACTED].", + ].join(" ")); + const request = coreRestPipeline.createPipelineRequest(Object.assign({ abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions$1(scopes, clientId, resourceId))); + request.agent = new https.Agent({ + // This is necessary because Service Fabric provides a self-signed certificate. + // The alternative path is to verify the certificate using the IDENTITY_SERVER_THUMBPRINT env variable. + rejectUnauthorized: false, + }); + const tokenResponse = await identityClient.sendTokenRequest(request); + return (tokenResponse && tokenResponse.accessToken) || null; + }, +}; -/** Response object for Database operations */ -class DatabaseResponse extends ResourceResponse { - constructor(resource, headers, statusCode, database, diagnostics) { - super(resource, headers, statusCode, diagnostics); - this.database = database; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const msiName = "ManagedIdentityCredential - AppServiceMSI 2019"; +const logger$d = credentialLogger(msiName); +/** + * Generates the options used on the request for an access token. + */ +function prepareRequestOptions(scopes, clientId, resourceId) { + const resource = mapScopesToResource(scopes); + if (!resource) { + throw new Error(`${msiName}: Multiple scopes are not supported.`); + } + const queryParameters = { + resource, + "api-version": "2019-08-01", + }; + if (clientId) { + queryParameters.client_id = clientId; + } + if (resourceId) { + queryParameters.mi_res_id = resourceId; + } + const query = new URLSearchParams(queryParameters); + // This error should not bubble up, since we verify that this environment variable is defined in the isAvailable() method defined below. + if (!process.env.IDENTITY_ENDPOINT) { + throw new Error(`${msiName}: Missing environment variable: IDENTITY_ENDPOINT`); + } + if (!process.env.IDENTITY_HEADER) { + throw new Error(`${msiName}: Missing environment variable: IDENTITY_HEADER`); } + return { + url: `${process.env.IDENTITY_ENDPOINT}?${query.toString()}`, + method: "GET", + headers: coreRestPipeline.createHttpHeaders({ + Accept: "application/json", + "X-IDENTITY-HEADER": process.env.IDENTITY_HEADER, + }), + }; } +/** + * Defines how to determine whether the Azure App Service MSI is available, and also how to retrieve a token from the Azure App Service MSI. + */ +const appServiceMsi2019 = { + name: "appServiceMsi2019", + async isAvailable({ scopes }) { + const resource = mapScopesToResource(scopes); + if (!resource) { + logger$d.info(`${msiName}: Unavailable. Multiple scopes are not supported.`); + return false; + } + const env = process.env; + const result = Boolean(env.IDENTITY_ENDPOINT && env.IDENTITY_HEADER); + if (!result) { + logger$d.info(`${msiName}: Unavailable. The environment variables needed are: IDENTITY_ENDPOINT and IDENTITY_HEADER.`); + } + return result; + }, + async getToken(configuration, getTokenOptions = {}) { + const { identityClient, scopes, clientId, resourceId } = configuration; + logger$d.info(`${msiName}: Using the endpoint and the secret coming form the environment variables: IDENTITY_ENDPOINT=${process.env.IDENTITY_ENDPOINT} and IDENTITY_HEADER=[REDACTED].`); + const request = coreRestPipeline.createPipelineRequest(Object.assign(Object.assign({ abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions(scopes, clientId, resourceId)), { + // Generally, MSI endpoints use the HTTP protocol, without transport layer security (TLS). + allowInsecureConnection: true })); + const tokenResponse = await identityClient.sendTokenRequest(request); + return (tokenResponse && tokenResponse.accessToken) || null; + }, +}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const logger$c = credentialLogger("ManagedIdentityCredential"); /** - * Operations for reading or deleting an existing database. - * - * @see {@link Databases} for creating new databases, and reading/querying all databases; use `client.databases`. + * Attempts authentication using a managed identity available at the deployment environment. + * This authentication type works in Azure VMs, App Service instances, Azure Functions applications, + * Azure Kubernetes Services, Azure Service Fabric instances and inside of the Azure Cloud Shell. * - * Note: all these operations make calls against a fixed budget. - * You should design your system such that these calls scale sublinearly with your application. - * For instance, do not call `database.read()` before every single `item.read()` call, to ensure the database exists; - * do this once on application start up. + * More information about configuring managed identities can be found here: + * https://learn.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/overview */ -class Database { +class ManagedIdentityCredential { /** - * Returns a reference URL to the resource. Used for linking in Permissions. + * @internal + * @hidden */ - get url() { - return createDatabaseUri(this.id); + constructor(clientIdOrOptions, options) { + var _a; + this.isEndpointUnavailable = null; + this.isAppTokenProviderInitialized = false; + let _options; + if (typeof clientIdOrOptions === "string") { + this.clientId = clientIdOrOptions; + _options = options; + } + else { + this.clientId = clientIdOrOptions === null || clientIdOrOptions === void 0 ? void 0 : clientIdOrOptions.clientId; + _options = clientIdOrOptions; + } + this.resourceId = _options === null || _options === void 0 ? void 0 : _options.resourceId; + // For JavaScript users. + if (this.clientId && this.resourceId) { + throw new Error(`${ManagedIdentityCredential.name} - Client Id and Resource Id can't be provided at the same time.`); + } + this.identityClient = new IdentityClient(_options); + this.isAvailableIdentityClient = new IdentityClient(Object.assign(Object.assign({}, _options), { retryOptions: { + maxRetries: 0, + } })); + /** authority host validation and metadata discovery to be skipped in managed identity + * since this wasn't done previously before adding token cache support + */ + this.confidentialApp = new msalCommon.ConfidentialClientApplication({ + auth: { + authority: "https://login.microsoftonline.com/managed_identity", + clientId: (_a = this.clientId) !== null && _a !== void 0 ? _a : DeveloperSignOnClientId, + clientSecret: "dummy-secret", + cloudDiscoveryMetadata: '{"tenant_discovery_endpoint":"https://login.microsoftonline.com/common/v2.0/.well-known/openid-configuration","api-version":"1.1","metadata":[{"preferred_network":"login.microsoftonline.com","preferred_cache":"login.windows.net","aliases":["login.microsoftonline.com","login.windows.net","login.microsoft.com","sts.windows.net"]},{"preferred_network":"login.partner.microsoftonline.cn","preferred_cache":"login.partner.microsoftonline.cn","aliases":["login.partner.microsoftonline.cn","login.chinacloudapi.cn"]},{"preferred_network":"login.microsoftonline.de","preferred_cache":"login.microsoftonline.de","aliases":["login.microsoftonline.de"]},{"preferred_network":"login.microsoftonline.us","preferred_cache":"login.microsoftonline.us","aliases":["login.microsoftonline.us","login.usgovcloudapi.net"]},{"preferred_network":"login-us.microsoftonline.com","preferred_cache":"login-us.microsoftonline.com","aliases":["login-us.microsoftonline.com"]}]}', + authorityMetadata: '{"token_endpoint":"https://login.microsoftonline.com/common/oauth2/v2.0/token","token_endpoint_auth_methods_supported":["client_secret_post","private_key_jwt","client_secret_basic"],"jwks_uri":"https://login.microsoftonline.com/common/discovery/v2.0/keys","response_modes_supported":["query","fragment","form_post"],"subject_types_supported":["pairwise"],"id_token_signing_alg_values_supported":["RS256"],"response_types_supported":["code","id_token","code id_token","id_token token"],"scopes_supported":["openid","profile","email","offline_access"],"issuer":"https://login.microsoftonline.com/{tenantid}/v2.0","request_uri_parameter_supported":false,"userinfo_endpoint":"https://graph.microsoft.com/oidc/userinfo","authorization_endpoint":"https://login.microsoftonline.com/common/oauth2/v2.0/authorize","device_authorization_endpoint":"https://login.microsoftonline.com/common/oauth2/v2.0/devicecode","http_logout_supported":true,"frontchannel_logout_supported":true,"end_session_endpoint":"https://login.microsoftonline.com/common/oauth2/v2.0/logout","claims_supported":["sub","iss","cloud_instance_name","cloud_instance_host_name","cloud_graph_host_name","msgraph_host","aud","exp","iat","auth_time","acr","nonce","preferred_username","name","tid","ver","at_hash","c_hash","email"],"kerberos_endpoint":"https://login.microsoftonline.com/common/kerberos","tenant_region_scope":null,"cloud_instance_name":"microsoftonline.com","cloud_graph_host_name":"graph.windows.net","msgraph_host":"graph.microsoft.com","rbac_url":"https://pas.windows.net"}', + clientCapabilities: [], + }, + system: { + loggerOptions: { + logLevel: getMSALLogLevel(logger$o.getLogLevel()), + }, + }, + }); } - /** Returns a new {@link Database} instance. - * - * Note: the intention is to get this object from {@link CosmosClient} via `client.database(id)`, not to instantiate it yourself. - */ - constructor(client, id, clientContext) { - this.client = client; - this.id = id; - this.clientContext = clientContext; - this.containers = new Containers(this, this.clientContext); - this.users = new Users(this, this.clientContext); + async cachedAvailableMSI(scopes, getTokenOptions) { + if (this.cachedMSI) { + return this.cachedMSI; + } + const MSIs = [ + arcMsi, + fabricMsi, + appServiceMsi2019, + appServiceMsi2017, + cloudShellMsi, + tokenExchangeMsi(), + imdsMsi, + ]; + for (const msi of MSIs) { + if (await msi.isAvailable({ + scopes, + identityClient: this.isAvailableIdentityClient, + clientId: this.clientId, + resourceId: this.resourceId, + getTokenOptions, + })) { + this.cachedMSI = msi; + return msi; + } + } + throw new CredentialUnavailableError(`${ManagedIdentityCredential.name} - No MSI credential available`); + } + async authenticateManagedIdentity(scopes, getTokenOptions) { + const { span, updatedOptions } = tracingClient.startSpan(`${ManagedIdentityCredential.name}.authenticateManagedIdentity`, getTokenOptions); + try { + // Determining the available MSI, and avoiding checking for other MSIs while the program is running. + const availableMSI = await this.cachedAvailableMSI(scopes, updatedOptions); + return availableMSI.getToken({ + identityClient: this.identityClient, + scopes, + clientId: this.clientId, + resourceId: this.resourceId, + }, updatedOptions); + } + catch (err) { + span.setStatus({ + status: "error", + error: err, + }); + throw err; + } + finally { + span.end(); + } } /** - * Used to read, replace, or delete a specific, existing {@link Database} by id. - * - * Use `.containers` creating new containers, or querying/reading all containers. + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * If an unexpected error occurs, an {@link AuthenticationError} will be thrown with the details of the failure. * - * @example Delete a container - * ```typescript - * await client.database("").container("").delete(); - * ``` + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. */ - container(id) { - return new Container(this, id, this.clientContext); + async getToken(scopes, options) { + let result = null; + const { span, updatedOptions } = tracingClient.startSpan(`${ManagedIdentityCredential.name}.getToken`, options); + try { + // isEndpointAvailable can be true, false, or null, + // If it's null, it means we don't yet know whether + // the endpoint is available and need to check for it. + if (this.isEndpointUnavailable !== true) { + const availableMSI = await this.cachedAvailableMSI(scopes, updatedOptions); + if (availableMSI.name === "tokenExchangeMsi") { + result = await this.authenticateManagedIdentity(scopes, updatedOptions); + } + else { + const appTokenParameters = { + correlationId: this.identityClient.getCorrelationId(), + tenantId: (options === null || options === void 0 ? void 0 : options.tenantId) || "managed_identity", + scopes: Array.isArray(scopes) ? scopes : [scopes], + claims: options === null || options === void 0 ? void 0 : options.claims, + }; + // Added a check to see if SetAppTokenProvider was already defined. + this.initializeSetAppTokenProvider(); + const authenticationResult = await this.confidentialApp.acquireTokenByClientCredential(Object.assign({}, appTokenParameters)); + result = this.handleResult(scopes, authenticationResult || undefined); + } + if (result === null) { + // If authenticateManagedIdentity returns null, + // it means no MSI endpoints are available. + // If so, we avoid trying to reach to them in future requests. + this.isEndpointUnavailable = true; + // It also means that the endpoint answered with either 200 or 201 (see the sendTokenRequest method), + // yet we had no access token. For this reason, we'll throw once with a specific message: + const error = new CredentialUnavailableError("The managed identity endpoint was reached, yet no tokens were received."); + logger$c.getToken.info(formatError(scopes, error)); + throw error; + } + // Since `authenticateManagedIdentity` didn't throw, and the result was not null, + // We will assume that this endpoint is reachable from this point forward, + // and avoid pinging again to it. + this.isEndpointUnavailable = false; + } + else { + // We've previously determined that the endpoint was unavailable, + // either because it was unreachable or permanently unable to authenticate. + const error = new CredentialUnavailableError("The managed identity endpoint is not currently available"); + logger$c.getToken.info(formatError(scopes, error)); + throw error; + } + logger$c.getToken.info(formatSuccess(scopes)); + return result; + } + catch (err) { + // CredentialUnavailable errors are expected to reach here. + // We intend them to bubble up, so that DefaultAzureCredential can catch them. + if (err.name === "AuthenticationRequiredError") { + throw err; + } + // Expected errors to reach this point: + // - Errors coming from a method unexpectedly breaking. + // - When identityClient.sendTokenRequest throws, in which case + // if the status code was 400, it means that the endpoint is working, + // but no identity is available. + span.setStatus({ + status: "error", + error: err, + }); + // If either the network is unreachable, + // we can safely assume the credential is unavailable. + if (err.code === "ENETUNREACH") { + const error = new CredentialUnavailableError(`${ManagedIdentityCredential.name}: Unavailable. Network unreachable. Message: ${err.message}`); + logger$c.getToken.info(formatError(scopes, error)); + throw error; + } + // If either the host was unreachable, + // we can safely assume the credential is unavailable. + if (err.code === "EHOSTUNREACH") { + const error = new CredentialUnavailableError(`${ManagedIdentityCredential.name}: Unavailable. No managed identity endpoint found. Message: ${err.message}`); + logger$c.getToken.info(formatError(scopes, error)); + throw error; + } + // If err.statusCode has a value of 400, it comes from sendTokenRequest, + // and it means that the endpoint is working, but that no identity is available. + if (err.statusCode === 400) { + throw new CredentialUnavailableError(`${ManagedIdentityCredential.name}: The managed identity endpoint is indicating there's no available identity. Message: ${err.message}`); + } + // This is a special case for Docker Desktop which responds with a 403 with a message that contains "A socket operation was attempted to an unreachable network" + // rather than just timing out, as expected. + if (err.statusCode === 403 || err.code === 403) { + if (err.message.includes("A socket operation was attempted to an unreachable network")) { + const error = new CredentialUnavailableError(`${ManagedIdentityCredential.name}: Unavailable. Network unreachable. Message: ${err.message}`); + logger$c.getToken.info(formatError(scopes, error)); + throw error; + } + } + // If the error has no status code, we can assume there was no available identity. + // This will throw silently during any ChainedTokenCredential. + if (err.statusCode === undefined) { + throw new CredentialUnavailableError(`${ManagedIdentityCredential.name}: Authentication failed. Message ${err.message}`); + } + // Any other error should break the chain. + throw new AuthenticationError(err.statusCode, { + error: `${ManagedIdentityCredential.name} authentication failed.`, + error_description: err.message, + }); + } + finally { + // Finally is always called, both if we return and if we throw in the above try/catch. + span.end(); + } } /** - * Used to read, replace, or delete a specific, existing {@link User} by id. - * - * Use `.users` for creating new users, or querying/reading all users. + * Handles the MSAL authentication result. + * If the result has an account, we update the local account reference. + * If the token received is invalid, an error will be thrown depending on what's missing. */ - user(id) { - return new User(this, id, this.clientContext); - } - /** Read the definition of the given Database. */ - async read(options) { - return withDiagnostics(async (diagnosticNode) => { - return this.readInternal(diagnosticNode, options); - }, this.clientContext); + handleResult(scopes, result, getTokenOptions) { + this.ensureValidMsalToken(scopes, result, getTokenOptions); + logger$c.getToken.info(formatSuccess(scopes)); + return { + token: result.accessToken, + expiresOnTimestamp: result.expiresOn.getTime(), + }; } /** - * @hidden + * Ensures the validity of the MSAL token + * @internal */ - async readInternal(diagnosticNode, options) { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.read({ - path, - resourceType: exports.ResourceType.database, - resourceId: id, - options, - diagnosticNode, - }); - return new DatabaseResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - } - /** Delete the given Database. */ - async delete(options) { - return withDiagnostics(async (diagnosticNode) => { - const path = getPathFromLink(this.url); - const id = getIdFromLink(this.url); - const response = await this.clientContext.delete({ - path, - resourceType: exports.ResourceType.database, - resourceId: id, - options, - diagnosticNode, + ensureValidMsalToken(scopes, msalToken, getTokenOptions) { + const error = (message) => { + logger$c.getToken.info(message); + return new AuthenticationRequiredError({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + getTokenOptions, + message, }); - return new DatabaseResponse(response.result, response.headers, response.code, this, getEmptyCosmosDiagnostics()); - }, this.clientContext); + }; + if (!msalToken) { + throw error("No response"); + } + if (!msalToken.expiresOn) { + throw error(`Response had no "expiresOn" property.`); + } + if (!msalToken.accessToken) { + throw error(`Response had no "accessToken" property.`); + } } - /** - * Gets offer on database. If none exists, returns an OfferResponse with undefined. - */ - async readOffer(options = {}) { - return withDiagnostics(async (diagnosticNode) => { - const { resource: record } = await withMetadataDiagnostics(async (node) => { - return this.readInternal(node); - }, diagnosticNode, exports.MetadataLookUpType.DatabaseLookUp); - const path = "/offers"; - const url = record._self; - const response = await this.clientContext.queryFeed({ - path, - resourceId: "", - resourceType: exports.ResourceType.offer, - query: `SELECT * from root where root.resource = "${url}"`, - resultFn: (result) => result.Offers, - options, - diagnosticNode, + initializeSetAppTokenProvider() { + if (!this.isAppTokenProviderInitialized) { + this.confidentialApp.SetAppTokenProvider(async (appTokenProviderParameters) => { + logger$c.info(`SetAppTokenProvider invoked with parameters- ${JSON.stringify(appTokenProviderParameters)}`); + const getTokenOptions = Object.assign({}, appTokenProviderParameters); + logger$c.info(`authenticateManagedIdentity invoked with scopes- ${JSON.stringify(appTokenProviderParameters.scopes)} and getTokenOptions - ${JSON.stringify(getTokenOptions)}`); + const resultToken = await this.authenticateManagedIdentity(appTokenProviderParameters.scopes, getTokenOptions); + if (resultToken) { + logger$c.info(`SetAppTokenProvider will save the token in cache`); + const expiresInSeconds = (resultToken === null || resultToken === void 0 ? void 0 : resultToken.expiresOnTimestamp) + ? Math.floor((resultToken.expiresOnTimestamp - Date.now()) / 1000) + : 0; + return { + accessToken: resultToken === null || resultToken === void 0 ? void 0 : resultToken.token, + expiresInSeconds, + }; + } + else { + logger$c.info(`SetAppTokenProvider token has "no_access_token_returned" as the saved token`); + return { + accessToken: "no_access_token_returned", + expiresInSeconds: 0, + }; + } }); - const offer = response.result[0] - ? new Offer(this.client, response.result[0].id, this.clientContext) - : undefined; - return new OfferResponse(response.result[0], response.headers, response.code, getEmptyCosmosDiagnostics(), offer); - }, this.clientContext); + this.isAppTokenProviderInitialized = true; + } } } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Operations for creating new databases, and reading/querying all databases - * - * @see {@link Database} for reading or deleting an existing database; use `client.database(id)`. - * - * Note: all these operations make calls against a fixed budget. - * You should design your system such that these calls scale sublinearly with your application. - * For instance, do not call `databases.readAll()` before every single `item.read()` call, to ensure the database exists; - * do this once on application start up. + * Ensures the scopes value is an array. + * @internal */ -class Databases { - /** - * @hidden - * @param client - The parent {@link CosmosClient} for the Database. - */ - constructor(client, clientContext) { - this.client = client; - this.clientContext = clientContext; - } - query(query, options) { - const cb = (diagNode, innerOptions) => { - return this.clientContext.queryFeed({ - path: "/dbs", - resourceType: exports.ResourceType.database, - resourceId: "", - resultFn: (result) => result.Databases, - query, - options: innerOptions, - diagnosticNode: diagNode, - }); - }; - return new QueryIterator(this.clientContext, query, options, cb); +function ensureScopes(scopes) { + return Array.isArray(scopes) ? scopes : [scopes]; +} +/** + * Throws if the received scope is not valid. + * @internal + */ +function ensureValidScopeForDevTimeCreds(scope, logger) { + if (!scope.match(/^[0-9a-zA-Z-_.:/]+$/)) { + const error = new Error("Invalid scope was specified by the user or calling client"); + logger.getToken.info(formatError(scope, error)); + throw error; } +} +/** + * Returns the resource out of a scope. + * @internal + */ +function getScopeResource(scope) { + return scope.replace(/\/.default$/, ""); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Mockable reference to the CLI credential cliCredentialFunctions + * @internal + */ +const cliCredentialInternals = { /** - * Send a request for creating a database. - * - * A database manages users, permissions and a set of containers. - * Each Azure Cosmos DB Database Account is able to support multiple independent named databases, - * with the database being the logical container for data. - * - * Each Database consists of one or more containers, each of which in turn contain one or more - * documents. Since databases are an administrative resource, the Service Master Key will be - * required in order to access and successfully complete any action using the User APIs. - * - * @param body - The {@link DatabaseDefinition} that represents the {@link Database} to be created. - * @param options - Use to set options like response page size, continuation tokens, etc. + * @internal */ - async create(body, options = {}) { - return withDiagnostics(async (diagnosticNode) => { - return this.createInternal(diagnosticNode, body, options); - }, this.clientContext); - } + getSafeWorkingDir() { + if (process.platform === "win32") { + if (!process.env.SystemRoot) { + throw new Error("Azure CLI credential expects a 'SystemRoot' environment variable"); + } + return process.env.SystemRoot; + } + else { + return "/bin"; + } + }, /** - * @hidden + * Gets the access token from Azure CLI + * @param resource - The resource to use when getting the token + * @internal */ - async createInternal(diagnosticNode, body, options = {}) { - const err = {}; - if (!isResourceValid(body, err)) { - throw err; + async getAzureCliAccessToken(resource, tenantId, timeout) { + let tenantSection = []; + if (tenantId) { + tenantSection = ["--tenant", tenantId]; } - validateOffer(body); - if (body.maxThroughput) { - const autoscaleParams = { - maxThroughput: body.maxThroughput, - }; - if (body.autoUpgradePolicy) { - autoscaleParams.autoUpgradePolicy = body.autoUpgradePolicy; + return new Promise((resolve, reject) => { + try { + child_process.execFile("az", [ + "account", + "get-access-token", + "--output", + "json", + "--resource", + resource, + ...tenantSection, + ], { cwd: cliCredentialInternals.getSafeWorkingDir(), shell: true, timeout }, (error, stdout, stderr) => { + resolve({ stdout: stdout, stderr: stderr, error }); + }); + } + catch (err) { + reject(err); } - const autoscaleHeaders = JSON.stringify(autoscaleParams); - options.initialHeaders = Object.assign({}, options.initialHeaders, { - [Constants$1.HttpHeaders.AutoscaleSettings]: autoscaleHeaders, - }); - delete body.maxThroughput; - delete body.autoUpgradePolicy; - } - if (body.throughput) { - options.initialHeaders = Object.assign({}, options.initialHeaders, { - [Constants$1.HttpHeaders.OfferThroughput]: body.throughput, - }); - delete body.throughput; - } - const path = "/dbs"; // TODO: constant - const response = await this.clientContext.create({ - body, - path, - resourceType: exports.ResourceType.database, - resourceId: undefined, - diagnosticNode, - options, }); - const ref = new Database(this.client, body.id, this.clientContext); - return new DatabaseResponse(response.result, response.headers, response.code, ref, getEmptyCosmosDiagnostics()); - } + }, +}; +const logger$b = credentialLogger("AzureCliCredential"); +/** + * This credential will use the currently logged-in user login information + * via the Azure CLI ('az') commandline tool. + * To do so, it will read the user access token and expire time + * with Azure CLI command "az account get-access-token". + */ +class AzureCliCredential { /** - * Check if a database exists, and if it doesn't, create it. - * This will make a read operation based on the id in the `body`, then if it is not found, a create operation. + * Creates an instance of the {@link AzureCliCredential}. * - * A database manages users, permissions and a set of containers. - * Each Azure Cosmos DB Database Account is able to support multiple independent named databases, - * with the database being the logical container for data. + * To use this credential, ensure that you have already logged + * in via the 'az' tool using the command "az login" from the commandline. * - * Each Database consists of one or more containers, each of which in turn contain one or more - * documents. Since databases are an an administrative resource, the Service Master Key will be - * required in order to access and successfully complete any action using the User APIs. + * @param options - Options, to optionally allow multi-tenant requests. + */ + constructor(options) { + if (options === null || options === void 0 ? void 0 : options.tenantId) { + checkTenantId(logger$b, options === null || options === void 0 ? void 0 : options.tenantId); + this.tenantId = options === null || options === void 0 ? void 0 : options.tenantId; + } + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + this.timeout = options === null || options === void 0 ? void 0 : options.processTimeoutInMs; + } + /** + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. * - * @param body - The {@link DatabaseDefinition} that represents the {@link Database} to be created. - * @param options - Additional options for the request + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. */ - async createIfNotExists(body, options) { - if (!body || body.id === null || body.id === undefined) { - throw new Error("body parameter must be an object with an id property"); + async getToken(scopes, options = {}) { + const tenantId = processMultiTenantRequest(this.tenantId, options, this.additionallyAllowedTenantIds); + if (tenantId) { + checkTenantId(logger$b, tenantId); } - /* - 1. Attempt to read the Database (based on an assumption that most databases will already exist, so its faster) - 2. If it fails with NotFound error, attempt to create the db. Else, return the read results. - */ - return withDiagnostics(async (diagnosticNode) => { + const scope = typeof scopes === "string" ? scopes : scopes[0]; + logger$b.getToken.info(`Using the scope ${scope}`); + return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async () => { + var _a, _b, _c, _d; try { - const readResponse = await this.client - .database(body.id) - .readInternal(diagnosticNode, options); - return readResponse; - } - catch (err) { - if (err.code === StatusCodes.NotFound) { - const createResponse = await this.createInternal(diagnosticNode, body, options); - // Must merge the headers to capture RU costskaty - mergeHeaders(createResponse.headers, err.headers); - return createResponse; + ensureValidScopeForDevTimeCreds(scope, logger$b); + const resource = getScopeResource(scope); + const obj = await cliCredentialInternals.getAzureCliAccessToken(resource, tenantId, this.timeout); + const specificScope = (_a = obj.stderr) === null || _a === void 0 ? void 0 : _a.match("(.*)az login --scope(.*)"); + const isLoginError = ((_b = obj.stderr) === null || _b === void 0 ? void 0 : _b.match("(.*)az login(.*)")) && !specificScope; + const isNotInstallError = ((_c = obj.stderr) === null || _c === void 0 ? void 0 : _c.match("az:(.*)not found")) || ((_d = obj.stderr) === null || _d === void 0 ? void 0 : _d.startsWith("'az' is not recognized")); + if (isNotInstallError) { + const error = new CredentialUnavailableError("Azure CLI could not be found. Please visit https://aka.ms/azure-cli for installation instructions and then, once installed, authenticate to your Azure account using 'az login'."); + logger$b.getToken.info(formatError(scopes, error)); + throw error; } - else { - throw err; + if (isLoginError) { + const error = new CredentialUnavailableError("Please run 'az login' from a command prompt to authenticate before using this credential."); + logger$b.getToken.info(formatError(scopes, error)); + throw error; + } + try { + const responseData = obj.stdout; + const response = JSON.parse(responseData); + logger$b.getToken.info(formatSuccess(scopes)); + const returnValue = { + token: response.accessToken, + expiresOnTimestamp: new Date(response.expiresOn).getTime(), + }; + return returnValue; + } + catch (e) { + if (obj.stderr) { + throw new CredentialUnavailableError(obj.stderr); + } + throw e; } } - }, this.clientContext); - } - // TODO: DatabaseResponse for QueryIterator? - /** - * Reads all databases. - * @param options - Use to set options like response page size, continuation tokens, etc. - * @returns {@link QueryIterator} Allows you to return all databases in an array or iterate over them one at a time. - * @example Read all databases to array. - * ```typescript - * const {body: databaseList} = await client.databases.readAll().fetchAll(); - * ``` - */ - readAll(options) { - return this.query(undefined, options); + catch (err) { + const error = err.name === "CredentialUnavailableError" + ? err + : new CredentialUnavailableError(err.message || "Unknown error while trying to retrieve the access token"); + logger$b.getToken.info(formatError(scopes, error)); + throw error; + } + }); } } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Used to specify which type of events to execute this plug in on. - * - * @hidden + * Easy to mock childProcess utils. + * @internal */ -exports.PluginOn = void 0; -(function (PluginOn) { - /** - * Will be executed per network request - */ - PluginOn["request"] = "request"; +const processUtils = { /** - * Will be executed per API operation + * Promisifying childProcess.execFile + * @internal */ - PluginOn["operation"] = "operation"; -})(exports.PluginOn || (exports.PluginOn = {})); + execFile(file, params, options) { + return new Promise((resolve, reject) => { + child_process__namespace.execFile(file, params, options, (error, stdout, stderr) => { + if (Buffer.isBuffer(stdout)) { + stdout = stdout.toString("utf8"); + } + if (Buffer.isBuffer(stderr)) { + stderr = stderr.toString("utf8"); + } + if (stderr || error) { + reject(stderr ? new Error(stderr) : error); + } + else { + resolve(stdout); + } + }); + }); + }, +}; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const logger$a = credentialLogger("AzurePowerShellCredential"); +const isWindows = process.platform === "win32"; /** + * Returns a platform-appropriate command name by appending ".exe" on Windows. + * * @internal */ -async function executePlugins(diagnosticNode, requestContext, next, on) { - if (!requestContext.plugins) { - return next(requestContext, diagnosticNode, undefined); - } - let level = 0; - const _ = (inner) => { - if (++level >= inner.plugins.length) { - return next(requestContext, diagnosticNode, undefined); - } - else if (inner.plugins[level].on !== on) { - return _(requestContext); - } - else { - return inner.plugins[level].plugin(inner, diagnosticNode, _); - } - }; - if (requestContext.plugins[level].on !== on) { - return _(requestContext); +function formatCommand(commandName) { + if (isWindows) { + return `${commandName}.exe`; } else { - return requestContext.plugins[level].plugin(requestContext, diagnosticNode, _); + return commandName; } } - -/** - * @hidden - */ -// Windows Socket Error Codes -const WindowsInterruptedFunctionCall = 10004; /** - * @hidden - */ -const WindowsFileHandleNotValid = 10009; -/** - * @hidden - */ -const WindowsPermissionDenied = 10013; -/** - * @hidden - */ -const WindowsBadAddress = 10014; -/** - * @hidden + * Receives a list of commands to run, executes them, then returns the outputs. + * If anything fails, an error is thrown. + * @internal */ -const WindowsInvalidArgumnet = 10022; +async function runCommands(commands, timeout) { + const results = []; + for (const command of commands) { + const [file, ...parameters] = command; + const result = (await processUtils.execFile(file, parameters, { + encoding: "utf8", + timeout, + })); + results.push(result); + } + return results; +} /** - * @hidden + * Known PowerShell errors + * @internal */ -const WindowsResourceTemporarilyUnavailable = 10035; +const powerShellErrors = { + login: "Run Connect-AzAccount to login", + installed: "The specified module 'Az.Accounts' with version '2.2.0' was not loaded because no valid module file was found in any module directory", +}; /** - * @hidden + * Messages to use when throwing in this credential. + * @internal */ -const WindowsOperationNowInProgress = 10036; +const powerShellPublicErrorMessages = { + login: "Please run 'Connect-AzAccount' from PowerShell to authenticate before using this credential.", + installed: `The 'Az.Account' module >= 2.2.0 is not installed. Install the Azure Az PowerShell module with: "Install-Module -Name Az -Scope CurrentUser -Repository PSGallery -Force".`, + troubleshoot: `To troubleshoot, visit https://aka.ms/azsdk/js/identity/powershellcredential/troubleshoot.`, +}; +// PowerShell Azure User not logged in error check. +const isLoginError = (err) => err.message.match(`(.*)${powerShellErrors.login}(.*)`); +// Az Module not Installed in Azure PowerShell check. +const isNotInstalledError = (err) => err.message.match(powerShellErrors.installed); /** - * @hidden + * The PowerShell commands to be tried, in order. + * + * @internal */ -const WindowsAddressAlreadyInUse = 10048; +const commandStack = [formatCommand("pwsh")]; +if (isWindows) { + commandStack.push(formatCommand("powershell")); +} /** - * @hidden + * This credential will use the currently logged-in user information from the + * Azure PowerShell module. To do so, it will read the user access token and + * expire time with Azure PowerShell command `Get-AzAccessToken -ResourceUrl {ResourceScope}` */ -const WindowsConnectionResetByPeer = 10054; -/** - * @hidden - */ -const WindowsCannotSendAfterSocketShutdown = 10058; -/** - * @hidden - */ -const WindowsConnectionTimedOut = 10060; -/** - * @hidden - */ -const WindowsConnectionRefused = 10061; -/** - * @hidden - */ -const WindowsNameTooLong = 10063; -/** - * @hidden - */ -const WindowsHostIsDown = 10064; -/** - * @hidden - */ -const WindowsNoRouteTohost = 10065; -/** - * @hidden - */ -// Linux Error Codes -/** - * @hidden - */ -const LinuxConnectionReset = "ECONNRESET"; -// Node Error Codes -/** - * @hidden - */ -const BrokenPipe = "EPIPE"; -/** - * @hidden - */ -const CONNECTION_ERROR_CODES = [ - WindowsInterruptedFunctionCall, - WindowsFileHandleNotValid, - WindowsPermissionDenied, - WindowsBadAddress, - WindowsInvalidArgumnet, - WindowsResourceTemporarilyUnavailable, - WindowsOperationNowInProgress, - WindowsAddressAlreadyInUse, - WindowsConnectionResetByPeer, - WindowsCannotSendAfterSocketShutdown, - WindowsConnectionTimedOut, - WindowsConnectionRefused, - WindowsNameTooLong, - WindowsHostIsDown, - WindowsNoRouteTohost, - LinuxConnectionReset, - TimeoutErrorCode, - BrokenPipe, -]; -/** - * @hidden - */ -function needsRetry(operationType, code) { - if ((operationType === exports.OperationType.Read || operationType === exports.OperationType.Query) && - CONNECTION_ERROR_CODES.indexOf(code) !== -1) { - return true; - } - else { - return false; - } -} -/** - * This class implements the default connection retry policy for requests. - * @hidden - */ -class DefaultRetryPolicy { - constructor(operationType) { - this.operationType = operationType; - this.maxTries = 10; - this.currentRetryAttemptCount = 0; - this.retryAfterInMs = 1000; - } +class AzurePowerShellCredential { /** - * Determines whether the request should be retried or not. - * @param err - Error returned by the request. + * Creates an instance of the {@link AzurePowerShellCredential}. + * + * To use this credential: + * - Install the Azure Az PowerShell module with: + * `Install-Module -Name Az -Scope CurrentUser -Repository PSGallery -Force`. + * - You have already logged in to Azure PowerShell using the command + * `Connect-AzAccount` from the command line. + * + * @param options - Options, to optionally allow multi-tenant requests. */ - async shouldRetry(err, diagnosticNode) { - if (err) { - if (this.currentRetryAttemptCount < this.maxTries && - needsRetry(this.operationType, err.code)) { - diagnosticNode.addData({ successfulRetryPolicy: "default" }); - this.currentRetryAttemptCount++; - return true; - } + constructor(options) { + if (options === null || options === void 0 ? void 0 : options.tenantId) { + checkTenantId(logger$a, options === null || options === void 0 ? void 0 : options.tenantId); + this.tenantId = options === null || options === void 0 ? void 0 : options.tenantId; } - return false; + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + this.timeout = options === null || options === void 0 ? void 0 : options.processTimeoutInMs; } -} - -/** - * This class implements the retry policy for endpoint discovery. - * @hidden - */ -class EndpointDiscoveryRetryPolicy { /** - * @param globalEndpointManager - The GlobalEndpointManager instance. + * Gets the access token from Azure PowerShell + * @param resource - The resource to use when getting the token */ - constructor(globalEndpointManager, operationType) { - this.globalEndpointManager = globalEndpointManager; - this.operationType = operationType; - this.maxTries = EndpointDiscoveryRetryPolicy.maxTries; - this.currentRetryAttemptCount = 0; - this.retryAfterInMs = EndpointDiscoveryRetryPolicy.retryAfterInMs; + async getAzurePowerShellAccessToken(resource, tenantId, timeout) { + // Clone the stack to avoid mutating it while iterating + for (const powerShellCommand of [...commandStack]) { + try { + await runCommands([[powerShellCommand, "/?"]], timeout); + } + catch (e) { + // Remove this credential from the original stack so that we don't try it again. + commandStack.shift(); + continue; + } + let tenantSection = ""; + if (tenantId) { + tenantSection = `-TenantId "${tenantId}"`; + } + const results = await runCommands([ + [ + powerShellCommand, + "-NoProfile", + "-NonInteractive", + "-Command", + "Import-Module Az.Accounts -MinimumVersion 2.2.0 -PassThru", + ], + [ + powerShellCommand, + "-NoProfile", + "-NonInteractive", + "-Command", + `Get-AzAccessToken ${tenantSection} -ResourceUrl "${resource}" | ConvertTo-Json`, + ], + ]); + const result = results[1]; + try { + return JSON.parse(result); + } + catch (e) { + throw new Error(`Unable to parse the output of PowerShell. Received output: ${result}`); + } + } + throw new Error(`Unable to execute PowerShell. Ensure that it is installed in your system`); } /** - * Determines whether the request should be retried or not. - * @param err - Error returned by the request. + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If the authentication cannot be performed through PowerShell, a {@link CredentialUnavailableError} will be thrown. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this TokenCredential implementation might make. */ - async shouldRetry(err, diagnosticNode, retryContext, locationEndpoint) { - if (!err) { - return false; - } - if (!retryContext || !locationEndpoint) { - return false; - } - if (!this.globalEndpointManager.enableEndpointDiscovery) { - return false; - } - if (this.currentRetryAttemptCount >= this.maxTries) { - return false; - } - this.currentRetryAttemptCount++; - if (isReadRequest(this.operationType)) { - await this.globalEndpointManager.markCurrentLocationUnavailableForRead(diagnosticNode, locationEndpoint); - } - else { - await this.globalEndpointManager.markCurrentLocationUnavailableForWrite(diagnosticNode, locationEndpoint); - } - retryContext.retryCount = this.currentRetryAttemptCount; - retryContext.clearSessionTokenNotAvailable = false; - retryContext.retryRequestOnPreferredLocations = false; - diagnosticNode.addData({ successfulRetryPolicy: "endpointDiscovery" }); - return true; + async getToken(scopes, options = {}) { + return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async () => { + const tenantId = processMultiTenantRequest(this.tenantId, options, this.additionallyAllowedTenantIds); + const scope = typeof scopes === "string" ? scopes : scopes[0]; + if (tenantId) { + checkTenantId(logger$a, tenantId); + } + try { + ensureValidScopeForDevTimeCreds(scope, logger$a); + logger$a.getToken.info(`Using the scope ${scope}`); + const resource = getScopeResource(scope); + const response = await this.getAzurePowerShellAccessToken(resource, tenantId, this.timeout); + logger$a.getToken.info(formatSuccess(scopes)); + return { + token: response.Token, + expiresOnTimestamp: new Date(response.ExpiresOn).getTime(), + }; + } + catch (err) { + if (isNotInstalledError(err)) { + const error = new CredentialUnavailableError(powerShellPublicErrorMessages.installed); + logger$a.getToken.info(formatError(scope, error)); + throw error; + } + else if (isLoginError(err)) { + const error = new CredentialUnavailableError(powerShellPublicErrorMessages.login); + logger$a.getToken.info(formatError(scope, error)); + throw error; + } + const error = new CredentialUnavailableError(`${err}. ${powerShellPublicErrorMessages.troubleshoot}`); + logger$a.getToken.info(formatError(scope, error)); + throw error; + } + }); } } -EndpointDiscoveryRetryPolicy.maxTries = 120; // TODO: Constant? -EndpointDiscoveryRetryPolicy.retryAfterInMs = 1000; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * This class implements the resource throttle retry policy for requests. - * @hidden + * @internal */ -class ResourceThrottleRetryPolicy { +const logger$9 = credentialLogger("ChainedTokenCredential"); +/** + * Enables multiple `TokenCredential` implementations to be tried in order + * until one of the getToken methods returns an access token. + */ +class ChainedTokenCredential { /** - * @param maxTries - Max number of retries to be performed for a request. - * @param fixedRetryIntervalInMs - Fixed retry interval in milliseconds to wait between each - * retry ignoring the retryAfter returned as part of the response. - * @param timeoutInSeconds - Max wait time in seconds to wait for a request while the - * retries are happening. + * Creates an instance of ChainedTokenCredential using the given credentials. + * + * @param sources - `TokenCredential` implementations to be tried in order. + * + * Example usage: + * ```javascript + * const firstCredential = new ClientSecretCredential(tenantId, clientId, clientSecret); + * const secondCredential = new ClientSecretCredential(tenantId, anotherClientId, anotherSecret); + * const credentialChain = new ChainedTokenCredential(firstCredential, secondCredential); + * ``` */ - constructor(maxTries = 9, fixedRetryIntervalInMs = 0, timeoutInSeconds = 30) { - this.maxTries = maxTries; - this.fixedRetryIntervalInMs = fixedRetryIntervalInMs; - /** Current retry attempt count. */ - this.currentRetryAttemptCount = 0; - /** Cummulative wait time in milliseconds for a request while the retries are happening. */ - this.cummulativeWaitTimeinMs = 0; - /** Retry interval in milliseconds to wait before the next request will be sent. */ - this.retryAfterInMs = 0; - this.timeoutInMs = timeoutInSeconds * 1000; - this.currentRetryAttemptCount = 0; - this.cummulativeWaitTimeinMs = 0; + constructor(...sources) { + this._sources = []; + this._sources = sources; } /** - * Determines whether the request should be retried or not. - * @param err - Error returned by the request. + * Returns the first access token returned by one of the chained + * `TokenCredential` implementations. Throws an {@link AggregateAuthenticationError} + * when one or more credentials throws an {@link AuthenticationError} and + * no credentials have returned an access token. + * + * This method is called automatically by Azure SDK client libraries. You may call this method + * directly, but you must also handle token caching and token refreshing. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * `TokenCredential` implementation might make. */ - async shouldRetry(err, diagnosticNode) { - // TODO: any custom error object - if (err) { - if (this.currentRetryAttemptCount < this.maxTries) { - this.currentRetryAttemptCount++; - this.retryAfterInMs = 0; - if (this.fixedRetryIntervalInMs) { - this.retryAfterInMs = this.fixedRetryIntervalInMs; - } - else if (err.retryAfterInMs) { - this.retryAfterInMs = err.retryAfterInMs; + async getToken(scopes, options = {}) { + const { token } = await this.getTokenInternal(scopes, options); + return token; + } + async getTokenInternal(scopes, options = {}) { + let token = null; + let successfulCredential; + const errors = []; + return tracingClient.withSpan("ChainedTokenCredential.getToken", options, async (updatedOptions) => { + for (let i = 0; i < this._sources.length && token === null; i++) { + try { + token = await this._sources[i].getToken(scopes, updatedOptions); + successfulCredential = this._sources[i]; } - if (this.cummulativeWaitTimeinMs < this.timeoutInMs) { - this.cummulativeWaitTimeinMs += this.retryAfterInMs; - diagnosticNode.addData({ successfulRetryPolicy: "resourceThrottle" }); - return true; + catch (err) { + if (err.name === "CredentialUnavailableError" || + err.name === "AuthenticationRequiredError") { + errors.push(err); + } + else { + logger$9.getToken.info(formatError(scopes, err)); + throw err; + } } } - } - return false; + if (!token && errors.length > 0) { + const err = new AggregateAuthenticationError(errors, "ChainedTokenCredential authentication failed."); + logger$9.getToken.info(formatError(scopes, err)); + throw err; + } + logger$9.getToken.info(`Result for ${successfulCredential.constructor.name}: ${formatSuccess(scopes)}`); + if (token === null) { + throw new CredentialUnavailableError("Failed to retrieve a valid token"); + } + return { token, successfulCredential }; + }); } } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const readFileAsync = util.promisify(fs.readFile); /** - * This class implements the retry policy for session consistent reads. - * @hidden + * Tries to asynchronously load a certificate from the given path. + * + * @param configuration - Either the PEM value or the path to the certificate. + * @param sendCertificateChain - Option to include x5c header for SubjectName and Issuer name authorization. + * @returns - The certificate parts, or `undefined` if the certificate could not be loaded. + * @internal */ -class SessionRetryPolicy { - /** - * @param globalEndpointManager - The GlobalEndpointManager instance. - */ - constructor(globalEndpointManager, resourceType, operationType, connectionPolicy) { - this.globalEndpointManager = globalEndpointManager; - this.resourceType = resourceType; - this.operationType = operationType; - this.connectionPolicy = connectionPolicy; - /** Current retry attempt count. */ - this.currentRetryAttemptCount = 0; - /** Retry interval in milliseconds. */ - this.retryAfterInMs = 0; +async function parseCertificate(configuration, sendCertificateChain) { + const certificateParts = {}; + const certificate = configuration + .certificate; + const certificatePath = configuration + .certificatePath; + certificateParts.certificateContents = + certificate || (await readFileAsync(certificatePath, "utf8")); + if (sendCertificateChain) { + certificateParts.x5c = certificateParts.certificateContents; } - /** - * Determines whether the request should be retried or not. - * @param err - Error returned by the request. - * @param callback - The callback function which takes bool argument which specifies whether the request - * will be retried or not. - */ - async shouldRetry(err, diagnosticNode, retryContext) { - if (!err) { - return false; - } - if (!retryContext) { - return false; - } - if (!this.connectionPolicy.enableEndpointDiscovery) { - return false; + const certificatePattern = /(-+BEGIN CERTIFICATE-+)(\n\r?|\r\n?)([A-Za-z0-9+/\n\r]+=*)(\n\r?|\r\n?)(-+END CERTIFICATE-+)/g; + const publicKeys = []; + // Match all possible certificates, in the order they are in the file. These will form the chain that is used for x5c + let match; + do { + match = certificatePattern.exec(certificateParts.certificateContents); + if (match) { + publicKeys.push(match[3]); } - if (this.globalEndpointManager.canUseMultipleWriteLocations(this.resourceType, this.operationType)) { - // If we can write to multiple locations, we should against every write endpoint until we succeed - const endpoints = isReadRequest(this.operationType) - ? await this.globalEndpointManager.getReadEndpoints() - : await this.globalEndpointManager.getWriteEndpoints(); - if (this.currentRetryAttemptCount > endpoints.length) { - return false; + } while (match); + if (publicKeys.length === 0) { + throw new Error("The file at the specified path does not contain a PEM-encoded certificate."); + } + certificateParts.thumbprint = crypto.createHash("sha1") + .update(Buffer.from(publicKeys[0], "base64")) + .digest("hex") + .toUpperCase(); + return certificateParts; +} +/** + * MSAL client certificate client. Calls to MSAL's confidential application's `acquireTokenByClientCredential` during `doGetToken`. + * @internal + */ +class MsalClientCertificate extends MsalNode { + constructor(options) { + super(options); + this.requiresConfidential = true; + this.configuration = options.configuration; + this.sendCertificateChain = options.sendCertificateChain; + } + // Changing the MSAL configuration asynchronously + async init(options) { + try { + const parts = await parseCertificate(this.configuration, this.sendCertificateChain); + let privateKey; + if (this.configuration.certificatePassword !== undefined) { + const privateKeyObject = crypto.createPrivateKey({ + key: parts.certificateContents, + passphrase: this.configuration.certificatePassword, + format: "pem", + }); + privateKey = privateKeyObject + .export({ + format: "pem", + type: "pkcs8", + }) + .toString(); } else { - this.currentRetryAttemptCount++; - retryContext.retryCount++; - retryContext.retryRequestOnPreferredLocations = this.currentRetryAttemptCount > 1; - retryContext.clearSessionTokenNotAvailable = - this.currentRetryAttemptCount === endpoints.length; - diagnosticNode.addData({ successfulRetryPolicy: "session" }); - return true; + privateKey = parts.certificateContents; } + this.msalConfig.auth.clientCertificate = { + thumbprint: parts.thumbprint, + privateKey: privateKey, + x5c: parts.x5c, + }; } - else { - if (this.currentRetryAttemptCount > 1) { - return false; - } - else { - this.currentRetryAttemptCount++; - retryContext.retryCount++; - retryContext.retryRequestOnPreferredLocations = false; // Forces all operations to primary write endpoint - retryContext.clearSessionTokenNotAvailable = true; - diagnosticNode.addData({ successfulRetryPolicy: "session" }); - return true; - } + catch (error) { + this.logger.info(formatError("", error)); + throw error; + } + return super.init(options); + } + async doGetToken(scopes, options = {}) { + try { + const clientCredReq = { + scopes, + correlationId: options.correlationId, + azureRegion: this.azureRegion, + authority: options.authority, + claims: options.claims, + }; + const result = await this.getApp("confidential", options.enableCae).acquireTokenByClientCredential(clientCredReq); + // Even though we're providing the same default in memory persistence cache that we use for DeviceCodeCredential, + // The Client Credential flow does not return the account information from the authentication service, + // so each time getToken gets called, we will have to acquire a new token through the service. + return this.handleResult(scopes, this.clientId, result || undefined); + } + catch (err) { + throw this.handleError(scopes, err, options); } } } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const credentialName$2 = "ClientCertificateCredential"; +const logger$8 = credentialLogger(credentialName$2); /** - * This class TimeoutFailoverRetryPolicy handles retries for read operations - * (including data plane,metadata, and query plan) in case of request timeouts - * (TimeoutError) or service unavailability (503 status code) by performing failover - * and retrying on other regions. - * @hidden + * Enables authentication to Microsoft Entra ID using a PEM-encoded + * certificate that is assigned to an App Registration. More information + * on how to configure certificate authentication can be found here: + * + * https://learn.microsoft.com/en-us/azure/active-directory/develop/active-directory-certificate-credentials#register-your-certificate-with-azure-ad + * */ -class TimeoutFailoverRetryPolicy { - constructor(globalEndpointManager, headers, methodType, resourceType, operationType, enableEndPointDiscovery) { - this.globalEndpointManager = globalEndpointManager; - this.headers = headers; - this.methodType = methodType; - this.resourceType = resourceType; - this.operationType = operationType; - this.enableEndPointDiscovery = enableEndPointDiscovery; - this.maxRetryAttemptCount = 120; - this.maxServiceUnavailableRetryCount = 1; - this.retryAfterInMs = 0; - this.failoverRetryCount = 0; - } - /** - * Checks if a timeout request is valid for the timeout failover retry policy. - * A valid request should be a data plane, metadata, or query plan request. - * @returns - */ - isValidRequestForTimeoutError() { - const isQuery = Constants$1.HttpHeaders.IsQuery in this.headers; - const isQueryPlan = Constants$1.HttpHeaders.IsQueryPlan in this.headers; - if (this.methodType === exports.HTTPMethod.get || isQuery || isQueryPlan) { - return true; - } - return false; - } - async shouldRetry(err, diagnosticNode, retryContext, locationEndpoint) { - if (!err) { - return false; - } - if (!retryContext || !locationEndpoint) { - return false; - } - // Check if the error is a timeout error (TimeoutErrorCode) and if it is not a valid HTTP network timeout request - if (err.code === TimeoutErrorCode && !this.isValidRequestForTimeoutError()) { - return false; - } - if (!this.enableEndPointDiscovery) { - return false; - } - if (err.code === StatusCodes.ServiceUnavailable && - this.failoverRetryCount >= this.maxServiceUnavailableRetryCount) { - return false; +class ClientCertificateCredential { + constructor(tenantId, clientId, certificatePathOrConfiguration, options = {}) { + if (!tenantId || !clientId) { + throw new Error(`${credentialName$2}: tenantId and clientId are required parameters.`); } - if (this.failoverRetryCount >= this.maxRetryAttemptCount) { - return false; + this.tenantId = tenantId; + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + const configuration = Object.assign({}, (typeof certificatePathOrConfiguration === "string" + ? { + certificatePath: certificatePathOrConfiguration, + } + : certificatePathOrConfiguration)); + const certificate = configuration + .certificate; + const certificatePath = configuration.certificatePath; + if (!configuration || !(certificate || certificatePath)) { + throw new Error(`${credentialName$2}: Provide either a PEM certificate in string form, or the path to that certificate in the filesystem. To troubleshoot, visit https://aka.ms/azsdk/js/identity/serviceprincipalauthentication/troubleshoot.`); } - const canUseMultipleWriteLocations = this.globalEndpointManager.canUseMultipleWriteLocations(this.resourceType, this.operationType); - const readRequest = isReadRequest(this.operationType); - if (!canUseMultipleWriteLocations && !readRequest) { - // Write requests on single master cannot be retried, no other regions available - return false; + if (certificate && certificatePath) { + throw new Error(`${credentialName$2}: To avoid unexpected behaviors, providing both the contents of a PEM certificate and the path to a PEM certificate is forbidden. To troubleshoot, visit https://aka.ms/azsdk/js/identity/serviceprincipalauthentication/troubleshoot.`); } - this.failoverRetryCount++; - // Setting the retryLocationIndex to the next available location for retry. - // The retryLocationIndex is determined based on the failoverRetryCount, starting from zero. - retryContext.retryLocationServerIndex = await this.findEndpointIndex(this.failoverRetryCount); - diagnosticNode.addData({ successfulRetryPolicy: "timeout-failover" }); - return true; + this.msalFlow = new MsalClientCertificate(Object.assign(Object.assign({}, options), { configuration, + logger: logger$8, + clientId, + tenantId, sendCertificateChain: options.sendCertificateChain, tokenCredentialOptions: options })); } /** - * Determines index of endpoint to be used for retry based upon failoverRetryCount and avalable locations - * @param failoverRetryCount - count of failovers - * @returns + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. */ - async findEndpointIndex(failoverRetryCount) { - // count of preferred locations specified by user - const preferredLocationsCount = this.globalEndpointManager.preferredLocationsCount; - const readRequest = isReadRequest(this.operationType); - let endpointIndex = 0; - // If preferredLocationsCount is not zero, it indicates that the user has specified preferred locations. - if (preferredLocationsCount !== 0) { - // The endpointIndex is set based on the preferred location and the failover retry count. - endpointIndex = failoverRetryCount % preferredLocationsCount; - } - else { - // In the absence of preferred locations, the endpoint selection is based on the failover count and the number of available locations. - if (readRequest) { - const getReadEndpoints = await this.globalEndpointManager.getReadEndpoints(); - if (getReadEndpoints && getReadEndpoints.length > 0) { - endpointIndex = failoverRetryCount % getReadEndpoints.length; - } - } - else { - const getWriteEndpoints = await this.globalEndpointManager.getWriteEndpoints(); - if (getWriteEndpoints && getWriteEndpoints.length > 0) { - endpointIndex = failoverRetryCount % getWriteEndpoints.length; - } - } - } - return endpointIndex; + async getToken(scopes, options = {}) { + return tracingClient.withSpan(`${credentialName$2}.getToken`, options, async (newOptions) => { + newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$8); + const arrayScopes = Array.isArray(scopes) ? scopes : [scopes]; + return this.msalFlow.getToken(arrayScopes, newOptions); + }); } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * @hidden + * MSAL client secret client. Calls to MSAL's confidential application's `acquireTokenByClientCredential` during `doGetToken`. + * @internal */ -async function execute({ diagnosticNode, retryContext = { retryCount: 0 }, retryPolicies, requestContext, executeRequest, }) { - // TODO: any response - return addDignosticChild(async (localDiagnosticNode) => { - localDiagnosticNode.addData({ requestAttempNumber: retryContext.retryCount }); - if (!retryPolicies) { - retryPolicies = { - endpointDiscoveryRetryPolicy: new EndpointDiscoveryRetryPolicy(requestContext.globalEndpointManager, requestContext.operationType), - resourceThrottleRetryPolicy: new ResourceThrottleRetryPolicy(requestContext.connectionPolicy.retryOptions.maxRetryAttemptCount, requestContext.connectionPolicy.retryOptions.fixedRetryIntervalInMilliseconds, requestContext.connectionPolicy.retryOptions.maxWaitTimeInSeconds), - sessionReadRetryPolicy: new SessionRetryPolicy(requestContext.globalEndpointManager, requestContext.resourceType, requestContext.operationType, requestContext.connectionPolicy), - defaultRetryPolicy: new DefaultRetryPolicy(requestContext.operationType), - timeoutFailoverRetryPolicy: new TimeoutFailoverRetryPolicy(requestContext.globalEndpointManager, requestContext.headers, requestContext.method, requestContext.resourceType, requestContext.operationType, requestContext.connectionPolicy.enableEndpointDiscovery), - }; - } - if (retryContext && retryContext.clearSessionTokenNotAvailable) { - requestContext.client.clearSessionToken(requestContext.path); - delete requestContext.headers["x-ms-session-token"]; - } - if (retryContext && retryContext.retryLocationServerIndex) { - requestContext.endpoint = await requestContext.globalEndpointManager.resolveServiceEndpoint(localDiagnosticNode, requestContext.resourceType, requestContext.operationType, retryContext.retryLocationServerIndex); - } - else { - requestContext.endpoint = await requestContext.globalEndpointManager.resolveServiceEndpoint(localDiagnosticNode, requestContext.resourceType, requestContext.operationType); - } - const startTimeUTCInMs = getCurrentTimestampInMs(); +class MsalClientSecret extends MsalNode { + constructor(options) { + super(options); + this.requiresConfidential = true; + this.msalConfig.auth.clientSecret = options.clientSecret; + } + async doGetToken(scopes, options = {}) { try { - const response = await executeRequest(localDiagnosticNode, requestContext); - response.headers[Constants$1.ThrottleRetryCount] = - retryPolicies.resourceThrottleRetryPolicy.currentRetryAttemptCount; - response.headers[Constants$1.ThrottleRetryWaitTimeInMs] = - retryPolicies.resourceThrottleRetryPolicy.cummulativeWaitTimeinMs; - return response; + const result = await this.getApp("confidential", options.enableCae).acquireTokenByClientCredential({ + scopes, + correlationId: options.correlationId, + azureRegion: this.azureRegion, + authority: options.authority, + claims: options.claims, + }); + // The Client Credential flow does not return an account, + // so each time getToken gets called, we will have to acquire a new token through the service. + return this.handleResult(scopes, this.clientId, result || undefined); } catch (err) { - // TODO: any error - let retryPolicy = null; - const headers = err.headers || {}; - if (err.code === StatusCodes.ENOTFOUND || - err.code === "REQUEST_SEND_ERROR" || - (err.code === StatusCodes.Forbidden && - (err.substatus === SubStatusCodes.DatabaseAccountNotFound || - err.substatus === SubStatusCodes.WriteForbidden))) { - retryPolicy = retryPolicies.endpointDiscoveryRetryPolicy; - } - else if (err.code === StatusCodes.TooManyRequests) { - retryPolicy = retryPolicies.resourceThrottleRetryPolicy; - } - else if (err.code === StatusCodes.NotFound && - err.substatus === SubStatusCodes.ReadSessionNotAvailable) { - retryPolicy = retryPolicies.sessionReadRetryPolicy; - } - else if (err.code === StatusCodes.ServiceUnavailable || err.code === TimeoutErrorCode) { - retryPolicy = retryPolicies.timeoutFailoverRetryPolicy; - } - else { - retryPolicy = retryPolicies.defaultRetryPolicy; - } - const results = await retryPolicy.shouldRetry(err, localDiagnosticNode, retryContext, requestContext.endpoint); - if (!results) { - headers[Constants$1.ThrottleRetryCount] = - retryPolicies.resourceThrottleRetryPolicy.currentRetryAttemptCount; - headers[Constants$1.ThrottleRetryWaitTimeInMs] = - retryPolicies.resourceThrottleRetryPolicy.cummulativeWaitTimeinMs; - err.headers = Object.assign(Object.assign({}, err.headers), headers); - throw err; - } - else { - requestContext.retryCount++; - const newUrl = results[1]; // TODO: any hack - if (newUrl !== undefined) { - requestContext.endpoint = newUrl; - } - localDiagnosticNode.recordFailedNetworkCall(startTimeUTCInMs, requestContext, retryContext.retryCount, err.code, err.subsstatusCode, headers); - await sleep(retryPolicy.retryAfterInMs); - return execute({ - diagnosticNode, - executeRequest, - requestContext, - retryContext, - retryPolicies, - }); - } + throw this.handleError(scopes, err, options); } - }, diagnosticNode, exports.DiagnosticNodeType.HTTP_REQUEST); + } } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const logger$7 = credentialLogger("ClientSecretCredential"); /** - * @hidden - */ -let defaultHttpsAgent; -const https = __nccwpck_require__(95687); // eslint-disable-line @typescript-eslint/no-require-imports -const tls = __nccwpck_require__(24404); // eslint-disable-line @typescript-eslint/no-require-imports -// minVersion only available in Node 10+ -if (tls.DEFAULT_MIN_VERSION) { - defaultHttpsAgent = new https.Agent({ - keepAlive: true, - minVersion: "TLSv1.2", - }); -} -else { - // Remove when Node 8 support has been dropped - defaultHttpsAgent = new https.Agent({ - keepAlive: true, - secureProtocol: "TLSv1_2_method", - }); -} -const http = __nccwpck_require__(13685); // eslint-disable-line @typescript-eslint/no-require-imports -/** - * @internal + * Enables authentication to Microsoft Entra ID using a client secret + * that was generated for an App Registration. More information on how + * to configure a client secret can be found here: + * + * https://learn.microsoft.com/azure/active-directory/develop/quickstart-configure-app-access-web-apis#add-credentials-to-your-web-application + * */ -const defaultHttpAgent = new http.Agent({ - keepAlive: true, -}); - -// Copyright (c) Microsoft Corporation. -let cachedHttpClient; -function getCachedDefaultHttpClient() { - if (!cachedHttpClient) { - cachedHttpClient = coreRestPipeline.createDefaultHttpClient(); +class ClientSecretCredential { + /** + * Creates an instance of the ClientSecretCredential with the details + * needed to authenticate against Microsoft Entra ID with a client + * secret. + * + * @param tenantId - The Microsoft Entra tenant (directory) ID. + * @param clientId - The client (application) ID of an App Registration in the tenant. + * @param clientSecret - A client secret that was generated for the App Registration. + * @param options - Options for configuring the client which makes the authentication request. + */ + constructor(tenantId, clientId, clientSecret, options = {}) { + if (!tenantId || !clientId || !clientSecret) { + throw new Error("ClientSecretCredential: tenantId, clientId, and clientSecret are required parameters. To troubleshoot, visit https://aka.ms/azsdk/js/identity/serviceprincipalauthentication/troubleshoot."); + } + this.tenantId = tenantId; + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + this.msalFlow = new MsalClientSecret(Object.assign(Object.assign({}, options), { logger: logger$7, + clientId, + tenantId, + clientSecret, tokenCredentialOptions: options })); + } + /** + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + async getToken(scopes, options = {}) { + return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { + newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$7); + const arrayScopes = ensureScopes(scopes); + return this.msalFlow.getToken(arrayScopes, newOptions); + }); } - return cachedHttpClient; } // Copyright (c) Microsoft Corporation. -const logger$1 = logger$5.createClientLogger("RequestHandler"); -async function executeRequest(diagnosticNode, requestContext) { - return executePlugins(diagnosticNode, requestContext, httpRequest, exports.PluginOn.request); -} +// Licensed under the MIT license. /** - * @hidden + * MSAL username and password client. Calls to the MSAL's public application's `acquireTokenByUsernamePassword` during `doGetToken`. + * @internal */ -async function httpRequest(requestContext, diagnosticNode) { - const controller = new nodeAbortController.AbortController(); - const signal = controller.signal; - // Wrap users passed abort events and call our own internal abort() - const userSignal = requestContext.options && requestContext.options.abortSignal; - if (userSignal) { - if (userSignal.aborted) { - controller.abort(); - } - else { - userSignal.addEventListener("abort", () => { - controller.abort(); - }); - } - } - const timeout = setTimeout(() => { - controller.abort(); - }, requestContext.connectionPolicy.requestTimeout); - let response; - if (requestContext.body) { - requestContext.body = bodyFromData(requestContext.body); - } - const httpsClient = getCachedDefaultHttpClient(); - const url = prepareURL(requestContext.endpoint, requestContext.path); - const reqHeaders = coreRestPipeline.createHttpHeaders(requestContext.headers); - const pipelineRequest = coreRestPipeline.createPipelineRequest({ - url, - headers: reqHeaders, - method: requestContext.method, - abortSignal: signal, - body: requestContext.body, - }); - if (requestContext.requestAgent) { - pipelineRequest.agent = requestContext.requestAgent; - } - else { - const parsedUrl = new URL(url); - pipelineRequest.agent = parsedUrl.protocol === "http" ? defaultHttpAgent : defaultHttpsAgent; - } - const startTimeUTCInMs = getCurrentTimestampInMs(); - try { - if (requestContext.pipeline) { - response = await requestContext.pipeline.sendRequest(httpsClient, pipelineRequest); - } - else { - response = await httpsClient.sendRequest(pipelineRequest); - } - } - catch (error) { - if (error.name === "AbortError") { - // If the user passed signal caused the abort, cancel the timeout and rethrow the error - if (userSignal && userSignal.aborted === true) { - clearTimeout(timeout); - throw error; - } - // If the user didn't cancel, it must be an abort we called due to timeout - throw new TimeoutError(`Timeout Error! Request took more than ${requestContext.connectionPolicy.requestTimeout} ms`); - } - throw error; +class MsalUsernamePassword extends MsalNode { + constructor(options) { + super(options); + this.username = options.username; + this.password = options.password; } - clearTimeout(timeout); - const result = response.status === 204 || response.status === 304 || response.bodyAsText === "" - ? null - : JSON.parse(response.bodyAsText); - const responseHeaders = response.headers.toJSON(); - const substatus = responseHeaders[Constants$1.HttpHeaders.SubStatus] - ? parseInt(responseHeaders[Constants$1.HttpHeaders.SubStatus], 10) - : undefined; - diagnosticNode.recordSuccessfulNetworkCall(startTimeUTCInMs, requestContext, response, substatus, url); - if (response.status >= 400) { - const errorResponse = new ErrorResponse(result.message); - logger$1.warning(response.status + - " " + - requestContext.endpoint + - " " + - requestContext.path + - " " + - result.message); - errorResponse.code = response.status; - errorResponse.body = result; - errorResponse.headers = responseHeaders; - if (Constants$1.HttpHeaders.ActivityId in responseHeaders) { - errorResponse.activityId = responseHeaders[Constants$1.HttpHeaders.ActivityId]; - } - if (Constants$1.HttpHeaders.SubStatus in responseHeaders) { - errorResponse.substatus = substatus; + async doGetToken(scopes, options) { + try { + const requestOptions = { + scopes, + username: this.username, + password: this.password, + correlationId: options === null || options === void 0 ? void 0 : options.correlationId, + authority: options === null || options === void 0 ? void 0 : options.authority, + claims: options === null || options === void 0 ? void 0 : options.claims, + }; + const result = await this.getApp("public", options === null || options === void 0 ? void 0 : options.enableCae).acquireTokenByUsernamePassword(requestOptions); + return this.handleResult(scopes, this.clientId, result || undefined); } - if (Constants$1.HttpHeaders.RetryAfterInMs in responseHeaders) { - errorResponse.retryAfterInMs = parseInt(responseHeaders[Constants$1.HttpHeaders.RetryAfterInMs], 10); - Object.defineProperty(errorResponse, "retryAfterInMilliseconds", { - get: () => { - return errorResponse.retryAfterInMs; - }, - }); + catch (error) { + throw this.handleError(scopes, error, options); } - throw errorResponse; } - return { - headers: responseHeaders, - result, - code: response.status, - substatus, - }; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const logger$6 = credentialLogger("UsernamePasswordCredential"); /** - * @hidden + * Enables authentication to Microsoft Entra ID with a user's + * username and password. This credential requires a high degree of + * trust so you should only use it when other, more secure credential + * types can't be used. */ -async function request(requestContext, diagnosticNode) { - if (requestContext.body) { - requestContext.body = bodyFromData(requestContext.body); - if (!requestContext.body) { - throw new Error("parameter data must be a javascript object, string, or Buffer"); +class UsernamePasswordCredential { + /** + * Creates an instance of the UsernamePasswordCredential with the details + * needed to authenticate against Microsoft Entra ID with a username + * and password. + * + * @param tenantId - The Microsoft Entra tenant (directory). + * @param clientId - The client (application) ID of an App Registration in the tenant. + * @param username - The user account's e-mail address (user name). + * @param password - The user account's account password + * @param options - Options for configuring the client which makes the authentication request. + */ + constructor(tenantId, clientId, username, password, options = {}) { + if (!tenantId || !clientId || !username || !password) { + throw new Error("UsernamePasswordCredential: tenantId, clientId, username and password are required parameters. To troubleshoot, visit https://aka.ms/azsdk/js/identity/usernamepasswordcredential/troubleshoot."); } + this.tenantId = tenantId; + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + this.msalFlow = new MsalUsernamePassword(Object.assign(Object.assign({}, options), { logger: logger$6, + clientId, + tenantId, + username, + password, tokenCredentialOptions: options || {} })); } - return addDignosticChild(async (childNode) => { - return execute({ - diagnosticNode: childNode, - requestContext, - executeRequest, + /** + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * If the user provided the option `disableAutomaticAuthentication`, + * once the token can't be retrieved silently, + * this method won't attempt to request user interaction to retrieve the token. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + async getToken(scopes, options = {}) { + return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { + newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$6); + const arrayScopes = ensureScopes(scopes); + return this.msalFlow.getToken(arrayScopes, newOptions); }); - }, diagnosticNode, exports.DiagnosticNodeType.REQUEST_ATTEMPTS); -} -const RequestHandler = { - request, -}; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function atob(str) { - return Buffer.from(str, "base64").toString("binary"); + } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Models vector clock bases session token. Session token has the following format: - * `{Version}#{GlobalLSN}#{RegionId1}={LocalLsn1}#{RegionId2}={LocalLsn2}....#{RegionIdN}={LocalLsnN}` - * 'Version' captures the configuration number of the partition which returned this session token. - * 'Version' is incremented everytime topology of the partition is updated (say due to Add/Remove/Failover). - * - * The choice of separators '#' and '=' is important. Separators ';' and ',' are used to delimit - * per-partitionKeyRange session token - * @hidden + * Contains the list of all supported environment variable names so that an + * appropriate error message can be generated when no credentials can be + * configured. * + * @internal */ -class VectorSessionToken { - constructor(version, globalLsn, localLsnByregion, sessionToken) { - this.version = version; - this.globalLsn = globalLsn; - this.localLsnByregion = localLsnByregion; - this.sessionToken = sessionToken; - if (!this.sessionToken) { - const regionAndLocalLsn = []; - for (const [key, value] of this.localLsnByregion.entries()) { - regionAndLocalLsn.push(`${key}${VectorSessionToken.REGION_PROGRESS_SEPARATOR}${value}`); - } - const regionProgress = regionAndLocalLsn.join(VectorSessionToken.SEGMENT_SEPARATOR); - if (regionProgress === "") { - this.sessionToken = `${this.version}${VectorSessionToken.SEGMENT_SEPARATOR}${this.globalLsn}`; - } - else { - this.sessionToken = `${this.version}${VectorSessionToken.SEGMENT_SEPARATOR}${this.globalLsn}${VectorSessionToken.SEGMENT_SEPARATOR}${regionProgress}`; - } +const AllSupportedEnvironmentVariables = [ + "AZURE_TENANT_ID", + "AZURE_CLIENT_ID", + "AZURE_CLIENT_SECRET", + "AZURE_CLIENT_CERTIFICATE_PATH", + "AZURE_CLIENT_CERTIFICATE_PASSWORD", + "AZURE_USERNAME", + "AZURE_PASSWORD", + "AZURE_ADDITIONALLY_ALLOWED_TENANTS", +]; +function getAdditionallyAllowedTenants() { + var _a; + const additionallyAllowedValues = (_a = process.env.AZURE_ADDITIONALLY_ALLOWED_TENANTS) !== null && _a !== void 0 ? _a : ""; + return additionallyAllowedValues.split(";"); +} +const credentialName$1 = "EnvironmentCredential"; +const logger$5 = credentialLogger(credentialName$1); +/** + * Enables authentication to Microsoft Entra ID using a client secret or certificate, or as a user + * with a username and password. + */ +class EnvironmentCredential { + /** + * Creates an instance of the EnvironmentCredential class and decides what credential to use depending on the available environment variables. + * + * Required environment variables: + * - `AZURE_TENANT_ID`: The Microsoft Entra tenant (directory) ID. + * - `AZURE_CLIENT_ID`: The client (application) ID of an App Registration in the tenant. + * + * If setting the AZURE_TENANT_ID, then you can also set the additionally allowed tenants + * - `AZURE_ADDITIONALLY_ALLOWED_TENANTS`: For multi-tenant applications, specifies additional tenants for which the credential may acquire tokens with a single semicolon delimited string. Use * to allow all tenants. + * + * Environment variables used for client credential authentication: + * - `AZURE_CLIENT_SECRET`: A client secret that was generated for the App Registration. + * - `AZURE_CLIENT_CERTIFICATE_PATH`: The path to a PEM certificate to use during the authentication, instead of the client secret. + * - `AZURE_CLIENT_CERTIFICATE_PASSWORD`: (optional) password for the certificate file. + * + * Alternatively, users can provide environment variables for username and password authentication: + * - `AZURE_USERNAME`: Username to authenticate with. + * - `AZURE_PASSWORD`: Password to authenticate with. + * + * If the environment variables required to perform the authentication are missing, a {@link CredentialUnavailableError} will be thrown. + * If the authentication fails, or if there's an unknown error, an {@link AuthenticationError} will be thrown. + * + * @param options - Options for configuring the client which makes the authentication request. + */ + constructor(options) { + // Keep track of any missing environment variables for error details + this._credential = undefined; + const assigned = processEnvVars(AllSupportedEnvironmentVariables).assigned.join(", "); + logger$5.info(`Found the following environment variables: ${assigned}`); + const tenantId = process.env.AZURE_TENANT_ID, clientId = process.env.AZURE_CLIENT_ID, clientSecret = process.env.AZURE_CLIENT_SECRET; + const additionallyAllowedTenantIds = getAdditionallyAllowedTenants(); + const newOptions = Object.assign(Object.assign({}, options), { additionallyAllowedTenantIds }); + if (tenantId) { + checkTenantId(logger$5, tenantId); } - } - static create(sessionToken) { - const [versionStr, globalLsnStr, ...regionSegments] = sessionToken.split(VectorSessionToken.SEGMENT_SEPARATOR); - const version = parseInt(versionStr, 10); - const globalLsn = parseFloat(globalLsnStr); - if (typeof version !== "number" || typeof globalLsn !== "number") { - return null; + if (tenantId && clientId && clientSecret) { + logger$5.info(`Invoking ClientSecretCredential with tenant ID: ${tenantId}, clientId: ${clientId} and clientSecret: [REDACTED]`); + this._credential = new ClientSecretCredential(tenantId, clientId, clientSecret, newOptions); + return; } - const lsnByRegion = new Map(); - for (const regionSegment of regionSegments) { - const [regionIdStr, localLsnStr] = regionSegment.split(VectorSessionToken.REGION_PROGRESS_SEPARATOR); - if (!regionIdStr || !localLsnStr) { - return null; + const certificatePath = process.env.AZURE_CLIENT_CERTIFICATE_PATH; + const certificatePassword = process.env.AZURE_CLIENT_CERTIFICATE_PASSWORD; + if (tenantId && clientId && certificatePath) { + logger$5.info(`Invoking ClientCertificateCredential with tenant ID: ${tenantId}, clientId: ${clientId} and certificatePath: ${certificatePath}`); + this._credential = new ClientCertificateCredential(tenantId, clientId, { certificatePath, certificatePassword }, newOptions); + return; + } + const username = process.env.AZURE_USERNAME; + const password = process.env.AZURE_PASSWORD; + if (tenantId && clientId && username && password) { + logger$5.info(`Invoking UsernamePasswordCredential with tenant ID: ${tenantId}, clientId: ${clientId} and username: ${username}`); + this._credential = new UsernamePasswordCredential(tenantId, clientId, username, password, newOptions); + } + } + /** + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - Optional parameters. See {@link GetTokenOptions}. + */ + async getToken(scopes, options = {}) { + return tracingClient.withSpan(`${credentialName$1}.getToken`, options, async (newOptions) => { + if (this._credential) { + try { + const result = await this._credential.getToken(scopes, newOptions); + logger$5.getToken.info(formatSuccess(scopes)); + return result; + } + catch (err) { + const authenticationError = new AuthenticationError(400, { + error: `${credentialName$1} authentication failed. To troubleshoot, visit https://aka.ms/azsdk/js/identity/environmentcredential/troubleshoot.`, + error_description: err.message.toString().split("More details:").join(""), + }); + logger$5.getToken.info(formatError(scopes, authenticationError)); + throw authenticationError; + } } - const regionId = parseInt(regionIdStr, 10); - let localLsn; + throw new CredentialUnavailableError(`${credentialName$1} is unavailable. No underlying credential could be used. To troubleshoot, visit https://aka.ms/azsdk/js/identity/environmentcredential/troubleshoot.`); + }); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Mockable reference to the Developer CLI credential cliCredentialFunctions + * @internal + */ +const developerCliCredentialInternals = { + /** + * @internal + */ + getSafeWorkingDir() { + if (process.platform === "win32") { + if (!process.env.SystemRoot) { + throw new Error("Azure Developer CLI credential expects a 'SystemRoot' environment variable"); + } + return process.env.SystemRoot; + } + else { + return "/bin"; + } + }, + /** + * Gets the access token from Azure Developer CLI + * @param scopes - The scopes to use when getting the token + * @internal + */ + async getAzdAccessToken(scopes, tenantId, timeout) { + let tenantSection = []; + if (tenantId) { + tenantSection = ["--tenant-id", tenantId]; + } + return new Promise((resolve, reject) => { try { - localLsn = localLsnStr; + child_process.execFile("azd", [ + "auth", + "token", + "--output", + "json", + ...scopes.reduce((previous, current) => previous.concat("--scope", current), []), + ...tenantSection, + ], { + cwd: developerCliCredentialInternals.getSafeWorkingDir(), + timeout, + }, (error, stdout, stderr) => { + resolve({ stdout, stderr, error }); + }); } catch (err) { - // TODO: log error - return null; - } - if (typeof regionId !== "number") { - return null; + reject(err); } - lsnByRegion.set(regionId, localLsn); + }); + }, +}; +const logger$4 = credentialLogger("AzureDeveloperCliCredential"); +/** + * Azure Developer CLI is a command-line interface tool that allows developers to create, manage, and deploy + * resources in Azure. It's built on top of the Azure CLI and provides additional functionality specific + * to Azure developers. It allows users to authenticate as a user and/or a service principal against + * Microsoft Entra ID. The + * AzureDeveloperCliCredential authenticates in a development environment and acquires a token on behalf of + * the logged-in user or service principal in the Azure Developer CLI. It acts as the Azure Developer CLI logged in user or + * service principal and executes an Azure CLI command underneath to authenticate the application against + * Microsoft Entra ID. + * + *

Configure AzureDeveloperCliCredential

+ * + * To use this credential, the developer needs to authenticate locally in Azure Developer CLI using one of the + * commands below: + * + *
    + *
  1. Run "azd auth login" in Azure Developer CLI to authenticate interactively as a user.
  2. + *
  3. Run "azd auth login --client-id clientID --client-secret clientSecret + * --tenant-id tenantID" to authenticate as a service principal.
  4. + *
+ * + * You may need to repeat this process after a certain time period, depending on the refresh token validity in your + * organization. Generally, the refresh token validity period is a few weeks to a few months. + * AzureDeveloperCliCredential will prompt you to sign in again. + */ +class AzureDeveloperCliCredential { + /** + * Creates an instance of the {@link AzureDeveloperCliCredential}. + * + * To use this credential, ensure that you have already logged + * in via the 'azd' tool using the command "azd auth login" from the commandline. + * + * @param options - Options, to optionally allow multi-tenant requests. + */ + constructor(options) { + if (options === null || options === void 0 ? void 0 : options.tenantId) { + checkTenantId(logger$4, options === null || options === void 0 ? void 0 : options.tenantId); + this.tenantId = options === null || options === void 0 ? void 0 : options.tenantId; } - return new VectorSessionToken(version, globalLsn, lsnByRegion, sessionToken); - } - equals(other) { - return !other - ? false - : this.version === other.version && - this.globalLsn === other.globalLsn && - this.areRegionProgressEqual(other.localLsnByregion); + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + this.timeout = options === null || options === void 0 ? void 0 : options.processTimeoutInMs; } - merge(other) { - if (other == null) { - throw new Error("other (Vector Session Token) must not be null"); + /** + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + async getToken(scopes, options = {}) { + const tenantId = processMultiTenantRequest(this.tenantId, options, this.additionallyAllowedTenantIds); + if (tenantId) { + checkTenantId(logger$4, tenantId); } - if (this.version === other.version && - this.localLsnByregion.size !== other.localLsnByregion.size) { - throw new Error(`Compared session tokens ${this.sessionToken} and ${other.sessionToken} have unexpected regions`); + let scopeList; + if (typeof scopes === "string") { + scopeList = [scopes]; } - const [higherVersionSessionToken, lowerVersionSessionToken] = this.version < other.version ? [other, this] : [this, other]; - const highestLocalLsnByRegion = new Map(); - for (const [regionId, highLocalLsn] of higherVersionSessionToken.localLsnByregion.entries()) { - const lowLocalLsn = lowerVersionSessionToken.localLsnByregion.get(regionId); - if (lowLocalLsn) { - highestLocalLsnByRegion.set(regionId, max(highLocalLsn, lowLocalLsn)); - } - else if (this.version === other.version) { - throw new Error(`Compared session tokens have unexpected regions. Session 1: ${this.sessionToken} - Session 2: ${this.sessionToken}`); + else { + scopeList = scopes; + } + logger$4.getToken.info(`Using the scopes ${scopes}`); + return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async () => { + var _a, _b, _c, _d; + try { + scopeList.forEach((scope) => { + ensureValidScopeForDevTimeCreds(scope, logger$4); + }); + const obj = await developerCliCredentialInternals.getAzdAccessToken(scopeList, tenantId, this.timeout); + const isNotLoggedInError = ((_a = obj.stderr) === null || _a === void 0 ? void 0 : _a.match("not logged in, run `azd login` to login")) || + ((_b = obj.stderr) === null || _b === void 0 ? void 0 : _b.match("not logged in, run `azd auth login` to login")); + const isNotInstallError = ((_c = obj.stderr) === null || _c === void 0 ? void 0 : _c.match("azd:(.*)not found")) || + ((_d = obj.stderr) === null || _d === void 0 ? void 0 : _d.startsWith("'azd' is not recognized")); + if (isNotInstallError || (obj.error && obj.error.code === "ENOENT")) { + const error = new CredentialUnavailableError("Azure Developer CLI couldn't be found. To mitigate this issue, see the troubleshooting guidelines at https://aka.ms/azsdk/js/identity/azdevclicredential/troubleshoot."); + logger$4.getToken.info(formatError(scopes, error)); + throw error; + } + if (isNotLoggedInError) { + const error = new CredentialUnavailableError("Please run 'azd auth login' from a command prompt to authenticate before using this credential. For more information, see the troubleshooting guidelines at https://aka.ms/azsdk/js/identity/azdevclicredential/troubleshoot."); + logger$4.getToken.info(formatError(scopes, error)); + throw error; + } + try { + const resp = JSON.parse(obj.stdout); + logger$4.getToken.info(formatSuccess(scopes)); + return { + token: resp.token, + expiresOnTimestamp: new Date(resp.expiresOn).getTime(), + }; + } + catch (e) { + if (obj.stderr) { + throw new CredentialUnavailableError(obj.stderr); + } + throw e; + } } - else { - highestLocalLsnByRegion.set(regionId, highLocalLsn); + catch (err) { + const error = err.name === "CredentialUnavailableError" + ? err + : new CredentialUnavailableError(err.message || "Unknown error while trying to retrieve the access token"); + logger$4.getToken.info(formatError(scopes, error)); + throw error; } - } - return new VectorSessionToken(Math.max(this.version, other.version), Math.max(this.globalLsn, other.globalLsn), highestLocalLsnByRegion); - } - toString() { - return this.sessionToken; + }); } - areRegionProgressEqual(other) { - if (this.localLsnByregion.size !== other.size) { - return false; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A shim around ManagedIdentityCredential that adapts it to accept + * `DefaultAzureCredentialOptions`. + * + * @internal + */ +class DefaultManagedIdentityCredential extends ManagedIdentityCredential { + // Constructor overload with just the other default options + // Last constructor overload with Union of all options not required since the above two constructor overloads have optional properties + constructor(options) { + var _a, _b, _c; + const managedIdentityClientId = (_a = options === null || options === void 0 ? void 0 : options.managedIdentityClientId) !== null && _a !== void 0 ? _a : process.env.AZURE_CLIENT_ID; + const workloadIdentityClientId = (_b = options === null || options === void 0 ? void 0 : options.workloadIdentityClientId) !== null && _b !== void 0 ? _b : managedIdentityClientId; + const managedResourceId = options === null || options === void 0 ? void 0 : options.managedIdentityResourceId; + const workloadFile = process.env.AZURE_FEDERATED_TOKEN_FILE; + const tenantId = (_c = options === null || options === void 0 ? void 0 : options.tenantId) !== null && _c !== void 0 ? _c : process.env.AZURE_TENANT_ID; + // ManagedIdentityCredential throws if both the resourceId and the clientId are provided. + if (managedResourceId) { + const managedIdentityResourceIdOptions = Object.assign(Object.assign({}, options), { resourceId: managedResourceId }); + super(managedIdentityResourceIdOptions); } - for (const [regionId, localLsn] of this.localLsnByregion.entries()) { - const otherLocalLsn = other.get(regionId); - if (localLsn !== otherLocalLsn) { - return false; - } + else if (workloadFile && workloadIdentityClientId) { + const workloadIdentityCredentialOptions = Object.assign(Object.assign({}, options), { tenantId: tenantId }); + super(workloadIdentityClientId, workloadIdentityCredentialOptions); + } + else if (managedIdentityClientId) { + const managedIdentityClientOptions = Object.assign(Object.assign({}, options), { clientId: managedIdentityClientId }); + super(managedIdentityClientOptions); + } + else { + super(options); } - return true; } } -VectorSessionToken.SEGMENT_SEPARATOR = "#"; -VectorSessionToken.REGION_PROGRESS_SEPARATOR = "="; /** - * @hidden + * A shim around WorkloadIdentityCredential that adapts it to accept + * `DefaultAzureCredentialOptions`. + * + * @internal */ -function max(int1, int2) { - // NOTE: This only works for positive numbers - if (int1.length === int2.length) { - return int1 > int2 ? int1 : int2; +class DefaultWorkloadIdentityCredential extends WorkloadIdentityCredential { + // Constructor overload with just the other default options + // Last constructor overload with Union of all options not required since the above two constructor overloads have optional properties + constructor(options) { + var _a, _b, _c; + const managedIdentityClientId = (_a = options === null || options === void 0 ? void 0 : options.managedIdentityClientId) !== null && _a !== void 0 ? _a : process.env.AZURE_CLIENT_ID; + const workloadIdentityClientId = (_b = options === null || options === void 0 ? void 0 : options.workloadIdentityClientId) !== null && _b !== void 0 ? _b : managedIdentityClientId; + const workloadFile = process.env.AZURE_FEDERATED_TOKEN_FILE; + const tenantId = (_c = options === null || options === void 0 ? void 0 : options.tenantId) !== null && _c !== void 0 ? _c : process.env.AZURE_TENANT_ID; + if (workloadFile && workloadIdentityClientId) { + const workloadIdentityCredentialOptions = Object.assign(Object.assign({}, options), { tenantId, clientId: workloadIdentityClientId, tokenFilePath: workloadFile }); + super(workloadIdentityCredentialOptions); + } + else if (tenantId) { + const workloadIdentityClientTenantOptions = Object.assign(Object.assign({}, options), { tenantId }); + super(workloadIdentityClientTenantOptions); + } + else { + super(options); + } } - else if (int1.length > int2.length) { - return int1; +} +class DefaultAzureDeveloperCliCredential extends AzureDeveloperCliCredential { + constructor(options) { + super(Object.assign({ processTimeoutInMs: options === null || options === void 0 ? void 0 : options.processTimeoutInMs }, options)); } - else { - return int2; +} +class DefaultAzureCliCredential extends AzureCliCredential { + constructor(options) { + super(Object.assign({ processTimeoutInMs: options === null || options === void 0 ? void 0 : options.processTimeoutInMs }, options)); + } +} +class DefaultAzurePowershellCredential extends AzurePowerShellCredential { + constructor(options) { + super(Object.assign({ processTimeoutInMs: options === null || options === void 0 ? void 0 : options.processTimeoutInMs }, options)); + } +} +const defaultCredentials = [ + EnvironmentCredential, + DefaultWorkloadIdentityCredential, + DefaultManagedIdentityCredential, + DefaultAzureCliCredential, + DefaultAzurePowershellCredential, + DefaultAzureDeveloperCliCredential, +]; +/** + * Provides a default {@link ChainedTokenCredential} configuration that should + * work for most applications that use the Azure SDK. + */ +class DefaultAzureCredential extends ChainedTokenCredential { + constructor(options) { + super(...defaultCredentials.map((ctor) => new ctor(options))); } } // Copyright (c) Microsoft Corporation. -/** @hidden */ -class SessionContainer { - constructor(collectionNameToCollectionResourceId = new Map(), collectionResourceIdToSessionTokens = new Map()) { - this.collectionNameToCollectionResourceId = collectionNameToCollectionResourceId; - this.collectionResourceIdToSessionTokens = collectionResourceIdToSessionTokens; - } - get(request) { - if (!request) { - throw new Error("request cannot be null"); - } - const collectionName = getContainerLink(trimSlashes(request.resourceAddress)); - const rangeIdToTokenMap = this.getPartitionKeyRangeIdToTokenMap(collectionName); - return SessionContainer.getCombinedSessionTokenString(rangeIdToTokenMap); +// Licensed under the MIT license. +/** + * A call to open(), but mockable + * @internal + */ +const interactiveBrowserMockable = { + open, +}; +/** + * This MSAL client sets up a web server to listen for redirect callbacks, then calls to the MSAL's public application's `acquireTokenByDeviceCode` during `doGetToken` + * to trigger the authentication flow, and then respond based on the values obtained from the redirect callback + * @internal + */ +class MsalOpenBrowser extends MsalNode { + constructor(options) { + var _a, _b; + super(options); + this.loginHint = options.loginHint; + this.errorTemplate = (_a = options.browserCustomizationOptions) === null || _a === void 0 ? void 0 : _a.errorMessage; + this.successTemplate = (_b = options.browserCustomizationOptions) === null || _b === void 0 ? void 0 : _b.successMessage; + this.logger = credentialLogger("Node.js MSAL Open Browser"); } - remove(request) { - let collectionResourceId; - const resourceAddress = trimSlashes(request.resourceAddress); - const collectionName = getContainerLink(resourceAddress); - if (collectionName) { - collectionResourceId = this.collectionNameToCollectionResourceId.get(collectionName); - this.collectionNameToCollectionResourceId.delete(collectionName); + async doGetToken(scopes, options) { + var _a; + try { + const interactiveRequest = { + openBrowser: async (url) => { + await interactiveBrowserMockable.open(url, { wait: true, newInstance: true }); + }, + scopes, + authority: options === null || options === void 0 ? void 0 : options.authority, + claims: options === null || options === void 0 ? void 0 : options.claims, + correlationId: options === null || options === void 0 ? void 0 : options.correlationId, + loginHint: this.loginHint, + errorTemplate: this.errorTemplate, + successTemplate: this.successTemplate, + }; + if (hasNativeBroker() && this.enableBroker) { + this.logger.verbose("Authentication will resume through the broker"); + if (this.parentWindowHandle) { + interactiveRequest.windowHandle = Buffer.from(this.parentWindowHandle); + } + else { + // error should have been thrown from within the constructor of InteractiveBrowserCredential + this.logger.warning("Parent window handle is not specified for the broker. This may cause unexpected behavior. Please provide the parentWindowHandle."); + } + if (this.enableMsaPassthrough) { + ((_a = interactiveRequest.tokenQueryParameters) !== null && _a !== void 0 ? _a : (interactiveRequest.tokenQueryParameters = {}))["msal_request_type"] = + "consumer_passthrough"; + } + } + if (hasNativeBroker() && !this.enableBroker) { + this.logger.verbose("Authentication will resume normally without the broker, since it's not enabled"); + } + const result = await this.getApp("public", options === null || options === void 0 ? void 0 : options.enableCae).acquireTokenInteractive(interactiveRequest); + if (result.fromNativeBroker) { + this.logger.verbose(`This result is returned from native broker`); + } + return this.handleResult(scopes, this.clientId, result || undefined); } - if (collectionResourceId !== undefined) { - this.collectionResourceIdToSessionTokens.delete(collectionResourceId); + catch (err) { + throw this.handleError(scopes, err, options); } } - set(request, resHeaders) { - // TODO: we check the master logic a few different places. Might not need it. - if (!resHeaders || - SessionContainer.isReadingFromMaster(request.resourceType, request.operationType)) { - return; - } - const sessionTokenString = resHeaders[Constants$1.HttpHeaders.SessionToken]; - if (!sessionTokenString) { - return; - } - const containerName = this.getContainerName(request, resHeaders); - const ownerId = !request.isNameBased - ? request.resourceId - : resHeaders[Constants$1.HttpHeaders.OwnerId] || request.resourceId; - if (!ownerId) { - return; - } - if (containerName && this.validateOwnerID(ownerId)) { - if (!this.collectionResourceIdToSessionTokens.has(ownerId)) { - this.collectionResourceIdToSessionTokens.set(ownerId, new Map()); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const logger$3 = credentialLogger("InteractiveBrowserCredential"); +/** + * Enables authentication to Microsoft Entra ID inside of the web browser + * using the interactive login flow. + */ +class InteractiveBrowserCredential { + /** + * Creates an instance of InteractiveBrowserCredential with the details needed. + * + * This credential uses the [Authorization Code Flow](https://learn.microsoft.com/azure/active-directory/develop/v2-oauth2-auth-code-flow). + * On Node.js, it will open a browser window while it listens for a redirect response from the authentication service. + * On browsers, it authenticates via popups. The `loginStyle` optional parameter can be set to `redirect` to authenticate by redirecting the user to an Azure secure login page, which then will redirect the user back to the web application where the authentication started. + * + * For Node.js, if a `clientId` is provided, the Microsoft Entra application will need to be configured to have a "Mobile and desktop applications" redirect endpoint. + * Follow our guide on [setting up Redirect URIs for Desktop apps that calls to web APIs](https://learn.microsoft.com/azure/active-directory/develop/scenario-desktop-app-registration#redirect-uris). + * + * @param options - Options for configuring the client which makes the authentication requests. + */ + constructor(options) { + var _a, _b, _c; + const redirectUri = typeof options.redirectUri === "function" + ? options.redirectUri() + : options.redirectUri || "http://localhost"; + this.tenantId = options === null || options === void 0 ? void 0 : options.tenantId; + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + const ibcNodeOptions = options; + if ((_a = ibcNodeOptions === null || ibcNodeOptions === void 0 ? void 0 : ibcNodeOptions.brokerOptions) === null || _a === void 0 ? void 0 : _a.enabled) { + if (!((_b = ibcNodeOptions === null || ibcNodeOptions === void 0 ? void 0 : ibcNodeOptions.brokerOptions) === null || _b === void 0 ? void 0 : _b.parentWindowHandle)) { + throw new Error("In order to do WAM authentication, `parentWindowHandle` under `brokerOptions` is a required parameter"); } - if (!this.collectionNameToCollectionResourceId.has(containerName)) { - this.collectionNameToCollectionResourceId.set(containerName, ownerId); + else { + this.msalFlow = new MsalOpenBrowser(Object.assign(Object.assign({}, options), { tokenCredentialOptions: options, logger: logger$3, + redirectUri, browserCustomizationOptions: ibcNodeOptions === null || ibcNodeOptions === void 0 ? void 0 : ibcNodeOptions.browserCustomizationOptions, brokerOptions: { + enabled: true, + parentWindowHandle: ibcNodeOptions.brokerOptions.parentWindowHandle, + legacyEnableMsaPassthrough: (_c = ibcNodeOptions.brokerOptions) === null || _c === void 0 ? void 0 : _c.legacyEnableMsaPassthrough, + } })); } - const containerSessionContainer = this.collectionResourceIdToSessionTokens.get(ownerId); - SessionContainer.compareAndSetToken(sessionTokenString, containerSessionContainer); } - } - validateOwnerID(ownerId) { - // If ownerId contains exactly 8 bytes it represents a unique database+collection identifier. Otherwise it represents another resource - // The first 4 bytes are the database. The last 4 bytes are the collection. - // Cosmos rids potentially contain "-" which is an invalid character in the browser atob implementation - // See https://en.wikipedia.org/wiki/Base64#Filenames - return atob(ownerId.replace(/-/g, "/")).length === 8; - } - getPartitionKeyRangeIdToTokenMap(collectionName) { - let rangeIdToTokenMap = null; - if (collectionName && this.collectionNameToCollectionResourceId.has(collectionName)) { - rangeIdToTokenMap = this.collectionResourceIdToSessionTokens.get(this.collectionNameToCollectionResourceId.get(collectionName)); + else { + this.msalFlow = new MsalOpenBrowser(Object.assign(Object.assign({}, options), { tokenCredentialOptions: options, logger: logger$3, + redirectUri, browserCustomizationOptions: ibcNodeOptions === null || ibcNodeOptions === void 0 ? void 0 : ibcNodeOptions.browserCustomizationOptions })); } - return rangeIdToTokenMap; + this.disableAutomaticAuthentication = options === null || options === void 0 ? void 0 : options.disableAutomaticAuthentication; } - static getCombinedSessionTokenString(tokens) { - if (!tokens || tokens.size === 0) { - return SessionContainer.EMPTY_SESSION_TOKEN; - } - let result = ""; - for (const [range, token] of tokens.entries()) { - result += - range + - SessionContainer.SESSION_TOKEN_PARTITION_SPLITTER + - token.toString() + - SessionContainer.SESSION_TOKEN_SEPARATOR; - } - return result.slice(0, -1); + /** + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * If the user provided the option `disableAutomaticAuthentication`, + * once the token can't be retrieved silently, + * this method won't attempt to request user interaction to retrieve the token. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + async getToken(scopes, options = {}) { + return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { + newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$3); + const arrayScopes = ensureScopes(scopes); + return this.msalFlow.getToken(arrayScopes, Object.assign(Object.assign({}, newOptions), { disableAutomaticAuthentication: this.disableAutomaticAuthentication })); + }); } - static compareAndSetToken(newTokenString, containerSessionTokens) { - if (!newTokenString) { - return; - } - const partitionsParts = newTokenString.split(SessionContainer.SESSION_TOKEN_SEPARATOR); - for (const partitionPart of partitionsParts) { - const newTokenParts = partitionPart.split(SessionContainer.SESSION_TOKEN_PARTITION_SPLITTER); - if (newTokenParts.length !== 2) { - return; - } - const range = newTokenParts[0]; - const newToken = VectorSessionToken.create(newTokenParts[1]); - const tokenForRange = !containerSessionTokens.get(range) - ? newToken - : containerSessionTokens.get(range).merge(newToken); - containerSessionTokens.set(range, tokenForRange); - } + /** + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * If the token can't be retrieved silently, this method will require user interaction to retrieve the token. + * + * On Node.js, this credential has [Proof Key for Code Exchange (PKCE)](https://datatracker.ietf.org/doc/html/rfc7636) enabled by default. + * PKCE is a security feature that mitigates authentication code interception attacks. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + async authenticate(scopes, options = {}) { + return tracingClient.withSpan(`${this.constructor.name}.authenticate`, options, async (newOptions) => { + const arrayScopes = ensureScopes(scopes); + await this.msalFlow.getToken(arrayScopes, newOptions); + return this.msalFlow.getActiveAccount(); + }); } - // TODO: have a assert if the type doesn't mastch known types - static isReadingFromMaster(resourceType, operationType) { - if (resourceType === Constants$1.Path.OffersPathSegment || - resourceType === Constants$1.Path.DatabasesPathSegment || - resourceType === Constants$1.Path.UsersPathSegment || - resourceType === Constants$1.Path.PermissionsPathSegment || - resourceType === Constants$1.Path.TopologyPathSegment || - resourceType === Constants$1.Path.DatabaseAccountPathSegment || - resourceType === Constants$1.Path.PartitionKeyRangesPathSegment || - (resourceType === Constants$1.Path.CollectionsPathSegment && - operationType === exports.OperationType.Query)) { - return true; - } - return false; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * MSAL device code client. Calls to the MSAL's public application's `acquireTokenByDeviceCode` during `doGetToken`. + * @internal + */ +class MsalDeviceCode extends MsalNode { + constructor(options) { + super(options); + this.userPromptCallback = options.userPromptCallback; } - getContainerName(request, headers) { - let ownerFullName = headers[Constants$1.HttpHeaders.OwnerFullName]; - if (!ownerFullName) { - ownerFullName = trimSlashes(request.resourceAddress); + async doGetToken(scopes, options) { + try { + const requestOptions = { + deviceCodeCallback: this.userPromptCallback, + scopes, + cancel: false, + correlationId: options === null || options === void 0 ? void 0 : options.correlationId, + authority: options === null || options === void 0 ? void 0 : options.authority, + claims: options === null || options === void 0 ? void 0 : options.claims, + }; + const promise = this.getApp("public", options === null || options === void 0 ? void 0 : options.enableCae).acquireTokenByDeviceCode(requestOptions); + const deviceResponse = await this.withCancellation(promise, options === null || options === void 0 ? void 0 : options.abortSignal, () => { + requestOptions.cancel = true; + }); + return this.handleResult(scopes, this.clientId, deviceResponse || undefined); + } + catch (error) { + throw this.handleError(scopes, error, options); } - return getContainerLink(ownerFullName); } } -SessionContainer.EMPTY_SESSION_TOKEN = ""; -SessionContainer.SESSION_TOKEN_SEPARATOR = ","; -SessionContainer.SESSION_TOKEN_PARTITION_SPLITTER = ":"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -function checkURL(testString) { - return new URL(testString); -} -function sanitizeEndpoint(url) { - return new URL(url).href.replace(/\/$/, ""); +const logger$2 = credentialLogger("DeviceCodeCredential"); +/** + * Method that logs the user code from the DeviceCodeCredential. + * @param deviceCodeInfo - The device code. + */ +function defaultDeviceCodePromptCallback(deviceCodeInfo) { + console.log(deviceCodeInfo.message); } - -// Copyright (c) Microsoft Corporation. /** - * Implementation of DiagnosticWriter, which uses \@azure/logger to write - * diagnostics. - * @hidden + * Enables authentication to Microsoft Entra ID using a device code + * that the user can enter into https://microsoft.com/devicelogin. */ -class LogDiagnosticWriter { - constructor() { - this.logger = logger$5.createClientLogger("CosmosDBDiagnostics"); +class DeviceCodeCredential { + /** + * Creates an instance of DeviceCodeCredential with the details needed + * to initiate the device code authorization flow with Microsoft Entra ID. + * + * A message will be logged, giving users a code that they can use to authenticate once they go to https://microsoft.com/devicelogin + * + * Developers can configure how this message is shown by passing a custom `userPromptCallback`: + * + * ```js + * const credential = new DeviceCodeCredential({ + * tenantId: env.AZURE_TENANT_ID, + * clientId: env.AZURE_CLIENT_ID, + * userPromptCallback: (info) => { + * console.log("CUSTOMIZED PROMPT CALLBACK", info.message); + * } + * }); + * ``` + * + * @param options - Options for configuring the client which makes the authentication requests. + */ + constructor(options) { + this.tenantId = options === null || options === void 0 ? void 0 : options.tenantId; + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + this.msalFlow = new MsalDeviceCode(Object.assign(Object.assign({}, options), { logger: logger$2, userPromptCallback: (options === null || options === void 0 ? void 0 : options.userPromptCallback) || defaultDeviceCodePromptCallback, tokenCredentialOptions: options || {} })); + this.disableAutomaticAuthentication = options === null || options === void 0 ? void 0 : options.disableAutomaticAuthentication; } - async write(diagnosticsData) { - this.logger.verbose(diagnosticsData); + /** + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * If the user provided the option `disableAutomaticAuthentication`, + * once the token can't be retrieved silently, + * this method won't attempt to request user interaction to retrieve the token. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + async getToken(scopes, options = {}) { + return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { + newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$2); + const arrayScopes = ensureScopes(scopes); + return this.msalFlow.getToken(arrayScopes, Object.assign(Object.assign({}, newOptions), { disableAutomaticAuthentication: this.disableAutomaticAuthentication })); + }); + } + /** + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * If the token can't be retrieved silently, this method will require user interaction to retrieve the token. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + async authenticate(scopes, options = {}) { + return tracingClient.withSpan(`${this.constructor.name}.authenticate`, options, async (newOptions) => { + const arrayScopes = Array.isArray(scopes) ? scopes : [scopes]; + await this.msalFlow.getToken(arrayScopes, newOptions); + return this.msalFlow.getActiveAccount(); + }); } } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Implementation of a no-op DiagnosticWriter. - * @hidden + * This MSAL client sets up a web server to listen for redirect callbacks, then calls to the MSAL's public application's `acquireTokenByDeviceCode` during `doGetToken` + * to trigger the authentication flow, and then respond based on the values obtained from the redirect callback + * @internal */ -class NoOpDiagnosticWriter { - async write(_diagnosticsData) { - // No op +class MsalAuthorizationCode extends MsalNode { + constructor(options) { + super(options); + this.logger = credentialLogger("Node.js MSAL Authorization Code"); + this.redirectUri = options.redirectUri; + this.authorizationCode = options.authorizationCode; + if (options.clientSecret) { + this.msalConfig.auth.clientSecret = options.clientSecret; + } + } + async getAuthCodeUrl(options) { + await this.init(); + return this.getApp("confidentialFirst", options.enableCae).getAuthCodeUrl({ + scopes: options.scopes, + redirectUri: options.redirectUri, + }); + } + async doGetToken(scopes, options) { + try { + const result = await this.getApp("confidentialFirst", options === null || options === void 0 ? void 0 : options.enableCae).acquireTokenByCode({ + scopes, + redirectUri: this.redirectUri, + code: this.authorizationCode, + correlationId: options === null || options === void 0 ? void 0 : options.correlationId, + authority: options === null || options === void 0 ? void 0 : options.authority, + claims: options === null || options === void 0 ? void 0 : options.claims, + }); + // The Client Credential flow does not return an account, + // so each time getToken gets called, we will have to acquire a new token through the service. + return this.handleResult(scopes, this.clientId, result || undefined); + } + catch (err) { + throw this.handleError(scopes, err, options); + } } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -class DefaultDiagnosticFormatter { - format(cosmosDiagnostic) { - return JSON.stringify(cosmosDiagnostic); +const logger$1 = credentialLogger("AuthorizationCodeCredential"); +/** + * Enables authentication to Microsoft Entra ID using an authorization code + * that was obtained through the authorization code flow, described in more detail + * in the Microsoft Entra ID documentation: + * + * https://learn.microsoft.com/azure/active-directory/develop/v2-oauth2-auth-code-flow + */ +class AuthorizationCodeCredential { + /** + * @hidden + * @internal + */ + constructor(tenantId, clientId, clientSecretOrAuthorizationCode, authorizationCodeOrRedirectUri, redirectUriOrOptions, options) { + checkTenantId(logger$1, tenantId); + let clientSecret = clientSecretOrAuthorizationCode; + if (typeof redirectUriOrOptions === "string") { + // the clientId+clientSecret constructor + this.authorizationCode = authorizationCodeOrRedirectUri; + this.redirectUri = redirectUriOrOptions; + // in this case, options are good as they come + } + else { + // clientId only + this.authorizationCode = clientSecretOrAuthorizationCode; + this.redirectUri = authorizationCodeOrRedirectUri; + clientSecret = undefined; + options = redirectUriOrOptions; + } + // TODO: Validate tenant if provided + this.tenantId = tenantId; + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); + this.msalFlow = new MsalAuthorizationCode(Object.assign(Object.assign({}, options), { clientSecret, + clientId, + tenantId, tokenCredentialOptions: options || {}, logger: logger$1, redirectUri: this.redirectUri, authorizationCode: this.authorizationCode })); + } + /** + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + async getToken(scopes, options = {}) { + return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { + const tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds); + newOptions.tenantId = tenantId; + const arrayScopes = ensureScopes(scopes); + return this.msalFlow.getToken(arrayScopes, Object.assign(Object.assign({}, newOptions), { disableAutomaticAuthentication: this.disableAutomaticAuthentication })); + }); } } // Copyright (c) Microsoft Corporation. -const uuid = uuid$3.v4; -const logger = logger$5.createClientLogger("ClientContext"); -const QueryJsonContentType = "application/query+json"; -const HttpHeaders = Constants$1.HttpHeaders; +// Licensed under the MIT license. /** - * @hidden - * @hidden + * MSAL on behalf of flow. Calls to MSAL's confidential application's `acquireTokenOnBehalfOf` during `doGetToken`. + * @internal */ -class ClientContext { - constructor(cosmosClientOptions, globalEndpointManager, clientConfig, diagnosticLevel) { - this.cosmosClientOptions = cosmosClientOptions; - this.globalEndpointManager = globalEndpointManager; - this.clientConfig = clientConfig; - this.diagnosticLevel = diagnosticLevel; - this.connectionPolicy = cosmosClientOptions.connectionPolicy; - this.sessionContainer = new SessionContainer(); - this.partitionKeyDefinitionCache = {}; - this.pipeline = null; - if (cosmosClientOptions.aadCredentials) { - this.pipeline = coreRestPipeline.createEmptyPipeline(); - const hrefEndpoint = sanitizeEndpoint(cosmosClientOptions.endpoint); - const scope = `${hrefEndpoint}/.default`; - this.pipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ - credential: cosmosClientOptions.aadCredentials, - scopes: scope, - challengeCallbacks: { - async authorizeRequest({ request, getAccessToken }) { - const tokenResponse = await getAccessToken([scope], {}); - const AUTH_PREFIX = `type=aad&ver=1.0&sig=`; - const authorizationToken = `${AUTH_PREFIX}${tokenResponse.token}`; - request.headers.set("Authorization", authorizationToken); - }, - }, - })); +class MsalOnBehalfOf extends MsalNode { + constructor(options) { + super(options); + this.logger.info("Initialized MSAL's On-Behalf-Of flow"); + this.requiresConfidential = true; + this.userAssertionToken = options.userAssertionToken; + this.certificatePath = options.certificatePath; + this.sendCertificateChain = options.sendCertificateChain; + this.clientSecret = options.clientSecret; + } + // Changing the MSAL configuration asynchronously + async init(options) { + if (this.certificatePath) { + try { + const parts = await parseCertificate({ certificatePath: this.certificatePath }, this.sendCertificateChain); + this.msalConfig.auth.clientCertificate = { + thumbprint: parts.thumbprint, + privateKey: parts.certificateContents, + x5c: parts.x5c, + }; + } + catch (error) { + this.logger.info(formatError("", error)); + throw error; + } } - this.initializeDiagnosticSettings(diagnosticLevel); + else { + this.msalConfig.auth.clientSecret = this.clientSecret; + } + return super.init(options); } - /** @hidden */ - async read({ path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) { + async doGetToken(scopes, options = {}) { try { - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.get, path, operationType: exports.OperationType.Read, resourceId, - options, - resourceType, - partitionKey }); - diagnosticNode.addData({ - operationType: exports.OperationType.Read, - resourceType, + const result = await this.getApp("confidential", options.enableCae).acquireTokenOnBehalfOf({ + scopes, + correlationId: options.correlationId, + authority: options.authority, + claims: options.claims, + oboAssertion: this.userAssertionToken, }); - request.headers = await this.buildHeaders(request); - this.applySessionToken(request); - // read will use ReadEndpoint since it uses GET operation - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); - this.captureSessionToken(undefined, path, exports.OperationType.Read, response.headers); - return response; + return this.handleResult(scopes, this.clientId, result || undefined); } catch (err) { - this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); - throw err; + throw this.handleError(scopes, err, options); } } - async queryFeed({ path, resourceType, resourceId, resultFn, query, options, diagnosticNode, partitionKeyRangeId, partitionKey, startEpk, endEpk, }) { - // Query operations will use ReadEndpoint even though it uses - // GET(for queryFeed) and POST(for regular query operations) - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.get, path, operationType: exports.OperationType.Query, partitionKeyRangeId, - resourceId, - resourceType, - options, body: query, partitionKey }); - diagnosticNode.addData({ - operationType: exports.OperationType.Query, - resourceType, - }); - const requestId = uuid(); - if (query !== undefined) { - request.method = exports.HTTPMethod.post; - } - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - request.headers = await this.buildHeaders(request); - if (startEpk !== undefined && endEpk !== undefined) { - request.headers[HttpHeaders.StartEpk] = startEpk; - request.headers[HttpHeaders.EndEpk] = endEpk; - request.headers[HttpHeaders.ReadFeedKeyType] = "EffectivePartitionKeyRange"; - } - if (query !== undefined) { - request.headers[HttpHeaders.IsQuery] = "true"; - request.headers[HttpHeaders.ContentType] = QueryJsonContentType; - if (typeof query === "string") { - request.body = { query }; // Converts query text to query object. - } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const credentialName = "OnBehalfOfCredential"; +const logger = credentialLogger(credentialName); +/** + * Enables authentication to Microsoft Entra ID using the [On Behalf Of flow](https://learn.microsoft.com/azure/active-directory/develop/v2-oauth2-on-behalf-of-flow). + */ +class OnBehalfOfCredential { + constructor(options) { + this.options = options; + const { clientSecret } = options; + const { certificatePath } = options; + const { tenantId, clientId, userAssertionToken, additionallyAllowedTenants: additionallyAllowedTenantIds, } = options; + if (!tenantId || !clientId || !(clientSecret || certificatePath) || !userAssertionToken) { + throw new Error(`${credentialName}: tenantId, clientId, clientSecret (or certificatePath) and userAssertionToken are required parameters.`); } - this.applySessionToken(request); - logger.info("query " + - requestId + - " started" + - (request.partitionKeyRangeId ? " pkrid: " + request.partitionKeyRangeId : "")); - logger.verbose(request); - const start = Date.now(); - const response = await RequestHandler.request(request, diagnosticNode); - logger.info("query " + requestId + " finished - " + (Date.now() - start) + "ms"); - this.captureSessionToken(undefined, path, exports.OperationType.Query, response.headers); - return this.processQueryFeedResponse(response, !!query, resultFn); - } - async getQueryPlan(path, resourceType, resourceId, query, options = {}, diagnosticNode) { - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, path, operationType: exports.OperationType.Read, resourceId, - resourceType, - options, body: query }); - diagnosticNode.addData({ - operationType: exports.OperationType.Read, - resourceType, - }); - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - request.headers = await this.buildHeaders(request); - request.headers[HttpHeaders.IsQueryPlan] = "True"; - request.headers[HttpHeaders.QueryVersion] = "1.4"; - request.headers[HttpHeaders.SupportedQueryFeatures] = - "NonValueAggregate, Aggregate, Distinct, MultipleOrderBy, OffsetAndLimit, OrderBy, Top, CompositeAggregate, GroupBy, MultipleAggregates"; - request.headers[HttpHeaders.ContentType] = QueryJsonContentType; - if (typeof query === "string") { - request.body = { query }; // Converts query text to query object. - } - this.applySessionToken(request); - const response = await RequestHandler.request(request, diagnosticNode); - this.captureSessionToken(undefined, path, exports.OperationType.Query, response.headers); - return response; - } - queryPartitionKeyRanges(collectionLink, query, options) { - const path = getPathFromLink(collectionLink, exports.ResourceType.pkranges); - const id = getIdFromLink(collectionLink); - const cb = async (diagNode, innerOptions) => { - const response = await this.queryFeed({ - path, - resourceType: exports.ResourceType.pkranges, - resourceId: id, - resultFn: (result) => result.PartitionKeyRanges, - query, - options: innerOptions, - diagnosticNode: diagNode, - }); - return response; - }; - return new QueryIterator(this, query, options, cb); - } - async delete({ path, resourceType, resourceId, options = {}, partitionKey, method = exports.HTTPMethod.delete, diagnosticNode, }) { - try { - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: method, operationType: exports.OperationType.Delete, path, - resourceType, - options, - resourceId, - partitionKey }); - diagnosticNode.addData({ - operationType: exports.OperationType.Delete, - resourceType, - }); - request.headers = await this.buildHeaders(request); - this.applySessionToken(request); - // deleteResource will use WriteEndpoint since it uses DELETE operation - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); - if (parseLink(path).type !== "colls") { - this.captureSessionToken(undefined, path, exports.OperationType.Delete, response.headers); - } - else { - this.clearSessionToken(path); - } - return response; - } - catch (err) { - this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); - throw err; - } - } - async patch({ body, path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) { - try { - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.patch, operationType: exports.OperationType.Patch, path, - resourceType, - body, - resourceId, - options, - partitionKey }); - diagnosticNode.addData({ - operationType: exports.OperationType.Patch, - resourceType, - }); - request.headers = await this.buildHeaders(request); - this.applySessionToken(request); - // patch will use WriteEndpoint - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); - this.captureSessionToken(undefined, path, exports.OperationType.Patch, response.headers); - return response; - } - catch (err) { - this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); - throw err; - } - } - async create({ body, path, resourceType, resourceId, diagnosticNode, options = {}, partitionKey, }) { - try { - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Create, path, - resourceType, - resourceId, - body, - options, - partitionKey }); - diagnosticNode.addData({ - operationType: exports.OperationType.Create, - resourceType, - }); - request.headers = await this.buildHeaders(request); - // create will use WriteEndpoint since it uses POST operation - this.applySessionToken(request); - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); - this.captureSessionToken(undefined, path, exports.OperationType.Create, response.headers); - return response; - } - catch (err) { - this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); - throw err; - } - } - processQueryFeedResponse(res, isQuery, resultFn) { - if (isQuery) { - return { - result: resultFn(res.result), - headers: res.headers, - code: res.code, - }; - } - else { - const newResult = resultFn(res.result).map((body) => body); - return { - result: newResult, - headers: res.headers, - code: res.code, - }; - } - } - applySessionToken(requestContext) { - const request = this.getSessionParams(requestContext.path); - if (requestContext.headers && requestContext.headers[HttpHeaders.SessionToken]) { - return; - } - const sessionConsistency = requestContext.headers[HttpHeaders.ConsistencyLevel]; - if (!sessionConsistency) { - return; - } - if (sessionConsistency !== exports.ConsistencyLevel.Session) { - return; - } - if (request.resourceAddress) { - const sessionToken = this.sessionContainer.get(request); - if (sessionToken) { - requestContext.headers[HttpHeaders.SessionToken] = sessionToken; - } - } - } - async replace({ body, path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) { - try { - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.put, operationType: exports.OperationType.Replace, path, - resourceType, - body, - resourceId, - options, - partitionKey }); - diagnosticNode.addData({ - operationType: exports.OperationType.Replace, - resourceType, - }); - request.headers = await this.buildHeaders(request); - this.applySessionToken(request); - // replace will use WriteEndpoint since it uses PUT operation - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); - this.captureSessionToken(undefined, path, exports.OperationType.Replace, response.headers); - return response; - } - catch (err) { - this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); - throw err; - } - } - async upsert({ body, path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) { - try { - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Upsert, path, - resourceType, - body, - resourceId, - options, - partitionKey }); - diagnosticNode.addData({ - operationType: exports.OperationType.Upsert, - resourceType, - }); - request.headers = await this.buildHeaders(request); - request.headers[HttpHeaders.IsUpsert] = true; - this.applySessionToken(request); - // upsert will use WriteEndpoint since it uses POST operation - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); - this.captureSessionToken(undefined, path, exports.OperationType.Upsert, response.headers); - return response; - } - catch (err) { - this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); - throw err; - } - } - async execute({ sprocLink, params, options = {}, partitionKey, diagnosticNode, }) { - // Accept a single parameter or an array of parameters. - // Didn't add type annotation for this because we should legacy this behavior - if (params !== null && params !== undefined && !Array.isArray(params)) { - params = [params]; - } - const path = getPathFromLink(sprocLink); - const id = getIdFromLink(sprocLink); - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Execute, path, resourceType: exports.ResourceType.sproc, options, resourceId: id, body: params, partitionKey }); - diagnosticNode.addData({ - operationType: exports.OperationType.Execute, - resourceType: exports.ResourceType.sproc, - }); - request.headers = await this.buildHeaders(request); - // executeStoredProcedure will use WriteEndpoint since it uses POST operation - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); - return response; + this.tenantId = tenantId; + this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(additionallyAllowedTenantIds); + this.msalFlow = new MsalOnBehalfOf(Object.assign(Object.assign({}, this.options), { logger, tokenCredentialOptions: this.options })); } /** - * Gets the Database account information. - * @param options - `urlConnection` in the options is the endpoint url whose database account needs to be retrieved. - * If not present, current client's url will be used. + * Authenticates with Microsoft Entra ID and returns an access token if successful. + * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure the underlying network requests. */ - async getDatabaseAccount(diagnosticNode, options = {}) { - const endpoint = options.urlConnection || this.cosmosClientOptions.endpoint; - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { endpoint, method: exports.HTTPMethod.get, operationType: exports.OperationType.Read, path: "", resourceType: exports.ResourceType.none, options }); - diagnosticNode.addData({ - operationType: exports.OperationType.Read, - resourceType: exports.ResourceType.none, - }); - request.headers = await this.buildHeaders(request); - // await options.beforeOperation({ endpoint, request, headers: requestHeaders }); - const { result, headers, code, substatus, diagnostics } = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); - const databaseAccount = new DatabaseAccount(result, headers); - return { - result: databaseAccount, - headers, - diagnostics, - code: code, - substatus: substatus, - }; - } - getWriteEndpoint(diagnosticNode) { - return this.globalEndpointManager.getWriteEndpoint(diagnosticNode); - } - getReadEndpoint(diagnosticNode) { - return this.globalEndpointManager.getReadEndpoint(diagnosticNode); - } - getWriteEndpoints() { - return this.globalEndpointManager.getWriteEndpoints(); - } - getReadEndpoints() { - return this.globalEndpointManager.getReadEndpoints(); - } - async batch({ body, path, partitionKey, resourceId, options = {}, diagnosticNode, }) { - try { - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Batch, path, - body, resourceType: exports.ResourceType.item, resourceId, - options, - partitionKey }); - diagnosticNode.addData({ - operationType: exports.OperationType.Batch, - resourceType: exports.ResourceType.item, - }); - request.headers = await this.buildHeaders(request); - request.headers[HttpHeaders.IsBatchRequest] = true; - request.headers[HttpHeaders.IsBatchAtomic] = true; - this.applySessionToken(request); - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); - this.captureSessionToken(undefined, path, exports.OperationType.Batch, response.headers); - response.diagnostics = diagnosticNode.toDiagnostic(this.getClientConfig()); - return response; - } - catch (err) { - this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); - throw err; - } - } - async bulk({ body, path, partitionKeyRangeId, resourceId, bulkOptions = {}, options = {}, diagnosticNode, }) { - try { - const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: exports.HTTPMethod.post, operationType: exports.OperationType.Batch, path, - body, resourceType: exports.ResourceType.item, resourceId, - options }); - diagnosticNode.addData({ - operationType: exports.OperationType.Batch, - resourceType: exports.ResourceType.item, - }); - request.headers = await this.buildHeaders(request); - request.headers[HttpHeaders.IsBatchRequest] = true; - request.headers[HttpHeaders.PartitionKeyRangeID] = partitionKeyRangeId; - request.headers[HttpHeaders.IsBatchAtomic] = false; - request.headers[HttpHeaders.BatchContinueOnError] = bulkOptions.continueOnError || false; - this.applySessionToken(request); - request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType); - const response = await executePlugins(diagnosticNode, request, RequestHandler.request, exports.PluginOn.operation); - this.captureSessionToken(undefined, path, exports.OperationType.Batch, response.headers); - return response; - } - catch (err) { - this.captureSessionToken(err, path, exports.OperationType.Upsert, err.headers); - throw err; - } - } - captureSessionToken(err, path, operationType, resHeaders) { - const request = this.getSessionParams(path); - request.operationType = operationType; - if (!err || - (!this.isMasterResource(request.resourceType) && - (err.code === StatusCodes.PreconditionFailed || - err.code === StatusCodes.Conflict || - (err.code === StatusCodes.NotFound && - err.substatus !== SubStatusCodes.ReadSessionNotAvailable)))) { - this.sessionContainer.set(request, resHeaders); - } - } - clearSessionToken(path) { - const request = this.getSessionParams(path); - this.sessionContainer.remove(request); - } - recordDiagnostics(diagnostic) { - const formatted = this.diagnosticFormatter.format(diagnostic); - this.diagnosticWriter.write(formatted); - } - initializeDiagnosticSettings(diagnosticLevel) { - this.diagnosticFormatter = new DefaultDiagnosticFormatter(); - switch (diagnosticLevel) { - case exports.CosmosDbDiagnosticLevel.info: - this.diagnosticWriter = new NoOpDiagnosticWriter(); - break; - default: - this.diagnosticWriter = new LogDiagnosticWriter(); - } - } - // TODO: move - getSessionParams(resourceLink) { - const resourceId = null; - let resourceAddress = null; - const parserOutput = parseLink(resourceLink); - resourceAddress = parserOutput.objectBody.self; - const resourceType = parserOutput.type; - return { - resourceId, - resourceAddress, - resourceType, - isNameBased: true, - }; - } - isMasterResource(resourceType) { - if (resourceType === Constants$1.Path.OffersPathSegment || - resourceType === Constants$1.Path.DatabasesPathSegment || - resourceType === Constants$1.Path.UsersPathSegment || - resourceType === Constants$1.Path.PermissionsPathSegment || - resourceType === Constants$1.Path.TopologyPathSegment || - resourceType === Constants$1.Path.DatabaseAccountPathSegment || - resourceType === Constants$1.Path.PartitionKeyRangesPathSegment || - resourceType === Constants$1.Path.CollectionsPathSegment) { - return true; - } - return false; - } - buildHeaders(requestContext) { - return getHeaders({ - clientOptions: this.cosmosClientOptions, - defaultHeaders: Object.assign(Object.assign({}, this.cosmosClientOptions.defaultHeaders), requestContext.options.initialHeaders), - verb: requestContext.method, - path: requestContext.path, - resourceId: requestContext.resourceId, - resourceType: requestContext.resourceType, - options: requestContext.options, - partitionKeyRangeId: requestContext.partitionKeyRangeId, - useMultipleWriteLocations: this.connectionPolicy.useMultipleWriteLocations, - partitionKey: requestContext.partitionKey !== undefined - ? convertToInternalPartitionKey(requestContext.partitionKey) - : undefined, // TODO: Move this check from here to PartitionKey + async getToken(scopes, options = {}) { + return tracingClient.withSpan(`${credentialName}.getToken`, options, async (newOptions) => { + newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger); + const arrayScopes = ensureScopes(scopes); + return this.msalFlow.getToken(arrayScopes, newOptions); }); } - /** - * Returns collection of properties which are derived from the context for Request Creation. - * These properties have client wide scope, as opposed to request specific scope. - * @returns - */ - getContextDerivedPropsForRequestCreation() { - return { - globalEndpointManager: this.globalEndpointManager, - requestAgent: this.cosmosClientOptions.agent, - connectionPolicy: this.connectionPolicy, - client: this, - plugins: this.cosmosClientOptions.plugins, - pipeline: this.pipeline, - }; - } - getClientConfig() { - return this.clientConfig; - } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * @hidden + * Returns a new instance of the {@link DefaultAzureCredential}. */ -function getUserAgent(suffix) { - const ua = `${universalUserAgent.getUserAgent()} ${Constants$1.SDKName}/${Constants$1.SDKVersion}`; - if (suffix) { - return ua + " " + suffix; - } - return ua; +function getDefaultAzureCredential() { + return new DefaultAzureCredential(); } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function isNonEmptyString(variable) { - return typeof variable === "string" && variable.trim().length > 0; -} +exports.AggregateAuthenticationError = AggregateAuthenticationError; +exports.AggregateAuthenticationErrorName = AggregateAuthenticationErrorName; +exports.AuthenticationError = AuthenticationError; +exports.AuthenticationErrorName = AuthenticationErrorName; +exports.AuthenticationRequiredError = AuthenticationRequiredError; +exports.AuthorizationCodeCredential = AuthorizationCodeCredential; +exports.AzureCliCredential = AzureCliCredential; +exports.AzureDeveloperCliCredential = AzureDeveloperCliCredential; +exports.AzurePowerShellCredential = AzurePowerShellCredential; +exports.ChainedTokenCredential = ChainedTokenCredential; +exports.ClientAssertionCredential = ClientAssertionCredential; +exports.ClientCertificateCredential = ClientCertificateCredential; +exports.ClientSecretCredential = ClientSecretCredential; +exports.CredentialUnavailableError = CredentialUnavailableError; +exports.CredentialUnavailableErrorName = CredentialUnavailableErrorName; +exports.DefaultAzureCredential = DefaultAzureCredential; +exports.DeviceCodeCredential = DeviceCodeCredential; +exports.EnvironmentCredential = EnvironmentCredential; +exports.InteractiveBrowserCredential = InteractiveBrowserCredential; +exports.ManagedIdentityCredential = ManagedIdentityCredential; +exports.OnBehalfOfCredential = OnBehalfOfCredential; +exports.UsernamePasswordCredential = UsernamePasswordCredential; +exports.VisualStudioCodeCredential = VisualStudioCodeCredential; +exports.WorkloadIdentityCredential = WorkloadIdentityCredential; +exports.deserializeAuthenticationRecord = deserializeAuthenticationRecord; +exports.getDefaultAzureCredential = getDefaultAzureCredential; +exports.logger = logger$n; +exports.serializeAuthenticationRecord = serializeAuthenticationRecord; +exports.useIdentityPlugin = useIdentityPlugin; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 3233: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var os = __nccwpck_require__(22037); +var util = __nccwpck_require__(73837); + +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + +var util__default = /*#__PURE__*/_interopDefaultLegacy(util); // Copyright (c) Microsoft Corporation. -const DefaultDiagnosticLevelValue = exports.CosmosDbDiagnosticLevel.info; -const diagnosticLevelFromEnv = (typeof process !== "undefined" && - process.env && - process.env[Constants$1.CosmosDbDiagnosticLevelEnvVarName]) || - undefined; -const acceptableDiagnosticLevelValues = Object.values(exports.CosmosDbDiagnosticLevel).map((x) => x.toString()); -let cosmosDiagnosticLevel; -if (isNonEmptyString(diagnosticLevelFromEnv)) { - // avoid calling setDiagnosticLevel because we don't want a mis-set environment variable to crash - if (isCosmosDiagnosticLevel(diagnosticLevelFromEnv)) { - setDiagnosticLevel(diagnosticLevelFromEnv); - } - else { - console.error(`${Constants$1.CosmosDbDiagnosticLevelEnvVarName} set to unknown diagnostic level '${diagnosticLevelFromEnv}'; Setting Cosmos Db diagnostic level to info. Acceptable values: ${acceptableDiagnosticLevelValues.join(", ")}.`); - } -} -function setDiagnosticLevel(level) { - if (level && !isCosmosDiagnosticLevel(level)) { - throw new Error(`Unknown diagnostic level '${level}'. Acceptable values: ${acceptableDiagnosticLevelValues.join(",")}`); - } - cosmosDiagnosticLevel = level; -} -function getDiagnosticLevelFromEnvironment() { - return cosmosDiagnosticLevel; -} -function isCosmosDiagnosticLevel(diagnosticLevel) { - return acceptableDiagnosticLevelValues.includes(diagnosticLevel); -} -function determineDiagnosticLevel(diagnosticLevelFromClientConfig, diagnosticLevelFromEnvironment) { - const diagnosticLevelFromEnvOrClient = diagnosticLevelFromEnvironment !== null && diagnosticLevelFromEnvironment !== void 0 ? diagnosticLevelFromEnvironment : diagnosticLevelFromClientConfig; // Diagnostic Setting from environment gets first priority. - return diagnosticLevelFromEnvOrClient !== null && diagnosticLevelFromEnvOrClient !== void 0 ? diagnosticLevelFromEnvOrClient : DefaultDiagnosticLevelValue; // Diagnostic Setting supplied in Client config gets second priority. +function log(message, ...args) { + process.stderr.write(`${util__default["default"].format(message, ...args)}${os.EOL}`); } // Copyright (c) Microsoft Corporation. -/** - * @hidden - * This internal class implements the logic for endpoint management for geo-replicated database accounts. - */ -class GlobalEndpointManager { - /** - * @param options - The document client instance. - * @internal - */ - constructor(options, readDatabaseAccount) { - this.readDatabaseAccount = readDatabaseAccount; - this.writeableLocations = []; - this.readableLocations = []; - this.unavailableReadableLocations = []; - this.unavailableWriteableLocations = []; - this.options = options; - this.defaultEndpoint = options.endpoint; - this.enableEndpointDiscovery = options.connectionPolicy.enableEndpointDiscovery; - this.isRefreshing = false; - this.preferredLocations = this.options.connectionPolicy.preferredLocations; - this.preferredLocationsCount = this.preferredLocations ? this.preferredLocations.length : 0; - } - /** - * Gets the current read endpoint from the endpoint cache. - */ - async getReadEndpoint(diagnosticNode) { - return this.resolveServiceEndpoint(diagnosticNode, exports.ResourceType.item, exports.OperationType.Read); - } - /** - * Gets the current write endpoint from the endpoint cache. - */ - async getWriteEndpoint(diagnosticNode) { - return this.resolveServiceEndpoint(diagnosticNode, exports.ResourceType.item, exports.OperationType.Replace); - } - async getReadEndpoints() { - return this.readableLocations.map((loc) => loc.databaseAccountEndpoint); - } - async getWriteEndpoints() { - return this.writeableLocations.map((loc) => loc.databaseAccountEndpoint); - } - async markCurrentLocationUnavailableForRead(diagnosticNode, endpoint) { - await this.refreshEndpointList(diagnosticNode); - const location = this.readableLocations.find((loc) => loc.databaseAccountEndpoint === endpoint); - if (location) { - location.unavailable = true; - location.lastUnavailabilityTimestampInMs = Date.now(); - this.unavailableReadableLocations.push(location); - } - } - async markCurrentLocationUnavailableForWrite(diagnosticNode, endpoint) { - await this.refreshEndpointList(diagnosticNode); - const location = this.writeableLocations.find((loc) => loc.databaseAccountEndpoint === endpoint); - if (location) { - location.unavailable = true; - location.lastUnavailabilityTimestampInMs = Date.now(); - this.unavailableWriteableLocations.push(location); - } - } - canUseMultipleWriteLocations(resourceType, operationType) { - let canUse = this.options.connectionPolicy.useMultipleWriteLocations; - if (resourceType) { - canUse = - canUse && - (resourceType === exports.ResourceType.item || - (resourceType === exports.ResourceType.sproc && operationType === exports.OperationType.Execute)); - } - return canUse; - } - async resolveServiceEndpoint(diagnosticNode, resourceType, operationType, startServiceEndpointIndex = 0 // Represents the starting index for selecting servers. - ) { - // If endpoint discovery is disabled, always use the user provided endpoint - if (!this.options.connectionPolicy.enableEndpointDiscovery) { - diagnosticNode.addData({ readFromCache: true }, "default_endpoint"); - diagnosticNode.recordEndpointResolution(this.defaultEndpoint); - return this.defaultEndpoint; - } - // If getting the database account, always use the user provided endpoint - if (resourceType === exports.ResourceType.none) { - diagnosticNode.addData({ readFromCache: true }, "none_resource"); - diagnosticNode.recordEndpointResolution(this.defaultEndpoint); - return this.defaultEndpoint; - } - if (this.readableLocations.length === 0 || this.writeableLocations.length === 0) { - const resourceResponse = await withMetadataDiagnostics(async (metadataNode) => { - return this.readDatabaseAccount(metadataNode, { - urlConnection: this.defaultEndpoint, - }); - }, diagnosticNode, exports.MetadataLookUpType.DatabaseAccountLookUp); - this.writeableLocations = resourceResponse.resource.writableLocations; - this.readableLocations = resourceResponse.resource.readableLocations; - } - const locations = isReadRequest(operationType) - ? this.readableLocations - : this.writeableLocations; - let location; - // If we have preferred locations, try each one in order and use the first available one - if (this.preferredLocations && - this.preferredLocations.length > 0 && - startServiceEndpointIndex < this.preferredLocations.length) { - for (let i = startServiceEndpointIndex; i < this.preferredLocations.length; i++) { - const preferredLocation = this.preferredLocations[i]; - location = locations.find((loc) => loc.unavailable !== true && - normalizeEndpoint(loc.name) === normalizeEndpoint(preferredLocation)); - if (location) { - break; - } - } - } - // If no preferred locations or one did not match, just grab the first one that is available - if (!location) { - const startIndexValid = startServiceEndpointIndex >= 0 && startServiceEndpointIndex < locations.length; - const locationsToSearch = startIndexValid - ? locations.slice(startServiceEndpointIndex) - : locations; - location = locationsToSearch.find((loc) => { - return loc.unavailable !== true; - }); +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log, +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); } - location = location ? location : { name: "", databaseAccountEndpoint: this.defaultEndpoint }; - diagnosticNode.recordEndpointResolution(location.databaseAccountEndpoint); - return location.databaseAccountEndpoint; - } - /** - * Refreshes the endpoint list by clearning stale unavailability and then - * retrieving the writable and readable locations from the geo-replicated database account - * and then updating the locations cache. - * We skip the refreshing if enableEndpointDiscovery is set to False - */ - async refreshEndpointList(diagnosticNode) { - if (!this.isRefreshing && this.enableEndpointDiscovery) { - this.isRefreshing = true; - const databaseAccount = await this.getDatabaseAccountFromAnyEndpoint(diagnosticNode); - if (databaseAccount) { - this.refreshStaleUnavailableLocations(); - this.refreshEndpoints(databaseAccount); - } - this.isRefreshing = false; + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); } } - refreshEndpoints(databaseAccount) { - for (const location of databaseAccount.writableLocations) { - const existingLocation = this.writeableLocations.find((loc) => loc.name === location.name); - if (!existingLocation) { - this.writeableLocations.push(location); - } - } - for (const location of databaseAccount.readableLocations) { - const existingLocation = this.readableLocations.find((loc) => loc.name === location.name); - if (!existingLocation) { - this.readableLocations.push(location); - } - } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); } - refreshStaleUnavailableLocations() { - const now = Date.now(); - this.updateLocation(now, this.unavailableReadableLocations, this.readableLocations); - this.unavailableReadableLocations = this.cleanUnavailableLocationList(now, this.unavailableReadableLocations); - this.updateLocation(now, this.unavailableWriteableLocations, this.writeableLocations); - this.unavailableWriteableLocations = this.cleanUnavailableLocationList(now, this.unavailableWriteableLocations); +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; } - /** - * update the locationUnavailability to undefined if the location is available again - * @param now - current time - * @param unavailableLocations - list of unavailable locations - * @param allLocations - list of all locations - */ - updateLocation(now, unavailableLocations, allLocations) { - for (const location of unavailableLocations) { - const unavaialableLocation = allLocations.find((loc) => loc.name === location.name); - if (unavaialableLocation && - now - unavaialableLocation.lastUnavailabilityTimestampInMs > - Constants$1.LocationUnavailableExpirationTimeInMs) { - unavaialableLocation.unavailable = false; - } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; } } - cleanUnavailableLocationList(now, unavailableLocations) { - return unavailableLocations.filter((loc) => { - if (loc && - now - loc.lastUnavailabilityTimestampInMs >= Constants$1.LocationUnavailableExpirationTimeInMs) { - return false; - } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { return true; - }); - } - /** - * Gets the database account first by using the default endpoint, and if that doesn't returns - * use the endpoints for the preferred locations in the order they are specified to get - * the database account. - */ - async getDatabaseAccountFromAnyEndpoint(diagnosticNode) { - try { - const options = { urlConnection: this.defaultEndpoint }; - const { resource: databaseAccount } = await this.readDatabaseAccount(diagnosticNode, options); - return databaseAccount; - // If for any reason(non - globaldb related), we are not able to get the database - // account from the above call to readDatabaseAccount, - // we would try to get this information from any of the preferred locations that the user - // might have specified (by creating a locational endpoint) - // and keeping eating the exception until we get the database account and return None at the end, - // if we are not able to get that info from any endpoints } - catch (err) { - // TODO: Tracing + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend, + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; } - if (this.preferredLocations) { - for (const location of this.preferredLocations) { - try { - const locationalEndpoint = GlobalEndpointManager.getLocationalEndpoint(this.defaultEndpoint, location); - const options = { urlConnection: locationalEndpoint }; - const { resource: databaseAccount } = await this.readDatabaseAccount(diagnosticNode, options); - if (databaseAccount) { - return databaseAccount; - } - } - catch (err) { - // TODO: Tracing - } - } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; } + newDebugger.log(...args); } - /** - * Gets the locational endpoint using the location name passed to it using the default endpoint. - * - * @param defaultEndpoint - The default endpoint to use for the endpoint. - * @param locationName - The location name for the azure region like "East US". - */ - static getLocationalEndpoint(defaultEndpoint, locationName) { - // For defaultEndpoint like 'https://contoso.documents.azure.com:443/' parse it to generate URL format - // This defaultEndpoint should be global endpoint(and cannot be a locational endpoint) - // and we agreed to document that - const endpointUrl = new URL(defaultEndpoint); - // hostname attribute in endpointUrl will return 'contoso.documents.azure.com' - if (endpointUrl.hostname) { - const hostnameParts = endpointUrl.hostname.toString().toLowerCase().split("."); - if (hostnameParts) { - // globalDatabaseAccountName will return 'contoso' - const globalDatabaseAccountName = hostnameParts[0]; - // Prepare the locationalDatabaseAccountName as contoso-EastUS for location_name 'East US' - const locationalDatabaseAccountName = globalDatabaseAccountName + "-" + locationName.replace(" ", ""); - // Replace 'contoso' with 'contoso-EastUS' and - // return locationalEndpoint as https://contoso-EastUS.documents.azure.com:443/ - const locationalEndpoint = defaultEndpoint - .toLowerCase() - .replace(globalDatabaseAccountName, locationalDatabaseAccountName); - return locationalEndpoint; - } - } - return null; + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; } + return false; } -function normalizeEndpoint(endpoint) { - return endpoint.split(" ").join("").toLowerCase(); +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; } +var debug = debugObj; // Copyright (c) Microsoft Corporation. +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; /** - * Provides a client-side logical representation of the Azure Cosmos DB database account. - * This client is used to configure and execute requests in the Azure Cosmos DB database service. - * @example Instantiate a client and create a new database - * ```typescript - * const client = new CosmosClient({endpoint: "", auth: {masterKey: ""}}); - * await client.databases.create({id: ""}); - * ``` - * @example Instantiate a client with custom Connection Policy - * ```typescript - * const connectionPolicy = new ConnectionPolicy(); - * connectionPolicy.RequestTimeout = 10000; - * const client = new CosmosClient({ - * endpoint: "", - * auth: {masterKey: ""}, - * connectionPolicy - * }); - * ``` + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. */ -class CosmosClient { - constructor(optionsOrConnectionString) { - var _a, _b; - if (typeof optionsOrConnectionString === "string") { - optionsOrConnectionString = parseConnectionString(optionsOrConnectionString); - } - const endpoint = checkURL(optionsOrConnectionString.endpoint); - if (!endpoint) { - throw new Error("Invalid endpoint specified"); - } - const clientConfig = this.initializeClientConfigDiagnostic(optionsOrConnectionString); - optionsOrConnectionString.connectionPolicy = Object.assign({}, defaultConnectionPolicy, optionsOrConnectionString.connectionPolicy); - optionsOrConnectionString.defaultHeaders = optionsOrConnectionString.defaultHeaders || {}; - optionsOrConnectionString.defaultHeaders[Constants$1.HttpHeaders.CacheControl] = "no-cache"; - optionsOrConnectionString.defaultHeaders[Constants$1.HttpHeaders.Version] = - Constants$1.CurrentVersion; - if (optionsOrConnectionString.consistencyLevel !== undefined) { - optionsOrConnectionString.defaultHeaders[Constants$1.HttpHeaders.ConsistencyLevel] = - optionsOrConnectionString.consistencyLevel; - } - optionsOrConnectionString.defaultHeaders[Constants$1.HttpHeaders.UserAgent] = getUserAgent(optionsOrConnectionString.userAgentSuffix); - const globalEndpointManager = new GlobalEndpointManager(optionsOrConnectionString, async (diagnosticNode, opts) => this.getDatabaseAccountInternal(diagnosticNode, opts)); - this.clientContext = new ClientContext(optionsOrConnectionString, globalEndpointManager, clientConfig, determineDiagnosticLevel(optionsOrConnectionString.diagnosticLevel, getDiagnosticLevelFromEnvironment())); - if (((_a = optionsOrConnectionString.connectionPolicy) === null || _a === void 0 ? void 0 : _a.enableEndpointDiscovery) && - ((_b = optionsOrConnectionString.connectionPolicy) === null || _b === void 0 ? void 0 : _b.enableBackgroundEndpointRefreshing)) { - this.backgroundRefreshEndpointList(globalEndpointManager, optionsOrConnectionString.connectionPolicy.endpointRefreshRateInMs || - defaultConnectionPolicy.endpointRefreshRateInMs); - } - this.databases = new Databases(this, this.clientContext); - this.offers = new Offers(this, this.clientContext); - } - initializeClientConfigDiagnostic(optionsOrConnectionString) { - return { - endpoint: optionsOrConnectionString.endpoint, - resourceTokensConfigured: optionsOrConnectionString.resourceTokens !== undefined, - tokenProviderConfigured: optionsOrConnectionString.tokenProvider !== undefined, - aadCredentialsConfigured: optionsOrConnectionString.aadCredentials !== undefined, - connectionPolicyConfigured: optionsOrConnectionString.connectionPolicy !== undefined, - consistencyLevel: optionsOrConnectionString.consistencyLevel, - defaultHeaders: optionsOrConnectionString.defaultHeaders, - agentConfigured: optionsOrConnectionString.agent !== undefined, - userAgentSuffix: optionsOrConnectionString.userAgentSuffix, - diagnosticLevel: optionsOrConnectionString.diagnosticLevel, - pluginsConfigured: optionsOrConnectionString.plugins !== undefined, - sDKVersion: Constants$1.SDKVersion, - }; - } - /** - * Get information about the current {@link DatabaseAccount} (including which regions are supported, etc.) - */ - async getDatabaseAccount(options) { - return withDiagnostics(async (diagnosticNode) => { - return this.getDatabaseAccountInternal(diagnosticNode, options); - }, this.clientContext); - } - /** - * @hidden - */ - async getDatabaseAccountInternal(diagnosticNode, options) { - const response = await this.clientContext.getDatabaseAccount(diagnosticNode, options); - return new ResourceResponse(response.result, response.headers, response.code, getEmptyCosmosDiagnostics(), response.substatus); - } - /** - * Gets the currently used write endpoint url. Useful for troubleshooting purposes. - * - * The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints. - */ - async getWriteEndpoint() { - return withDiagnostics(async (diagnosticNode) => { - return this.clientContext.getWriteEndpoint(diagnosticNode); - }, this.clientContext); - } - /** - * Gets the currently used read endpoint. Useful for troubleshooting purposes. - * - * The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints. - */ - async getReadEndpoint() { - return withDiagnostics(async (diagnosticNode) => { - return this.clientContext.getReadEndpoint(diagnosticNode); - }, this.clientContext); - } - /** - * Gets the known write endpoints. Useful for troubleshooting purposes. - * - * The urls may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints. - */ - getWriteEndpoints() { - return this.clientContext.getWriteEndpoints(); - } - /** - * Gets the currently used read endpoint. Useful for troubleshooting purposes. - * - * The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints. - */ - getReadEndpoints() { - return this.clientContext.getReadEndpoints(); - } - /** - * Used for reading, updating, or deleting a existing database by id or accessing containers belonging to that database. - * - * This does not make a network call. Use `.read` to get info about the database after getting the {@link Database} object. - * - * @param id - The id of the database. - * @example Create a new container off of an existing database - * ```typescript - * const container = client.database("").containers.create(""); - * ``` - * - * @example Delete an existing database - * ```typescript - * await client.database("").delete(); - * ``` - */ - database(id) { - return new Database(this, id, this.clientContext); +const AzureLogger = debug("azure"); +AzureLogger.log = (...args) => { + debug.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); } - /** - * Used for reading, or updating a existing offer by id. - * @param id - The id of the offer. - */ - offer(id) { - return new Offer(this, id, this.clientContext); + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); } - /** - * Clears background endpoint refresher. Use client.dispose() when destroying the CosmosClient within another process. - */ - dispose() { - clearTimeout(this.endpointRefresher); +} +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); } - async backgroundRefreshEndpointList(globalEndpointManager, refreshRate) { - this.endpointRefresher = setInterval(() => { - try { - return withDiagnostics(async (diagnosticNode) => { - return globalEndpointManager.refreshEndpointList(diagnosticNode); - }, this.clientContext, exports.DiagnosticNodeType.BACKGROUND_REFRESH_THREAD); - } - catch (e) { - console.warn("Failed to refresh endpoints", e); - } - }, refreshRate); - if (this.endpointRefresher.unref && typeof this.endpointRefresher.unref === "function") { - this.endpointRefresher.unref(); + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); } } + debug.enable(enabledNamespaces.join(",")); } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -class SasTokenProperties { -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/// -function encodeUTF8(str) { - const bytes = new Uint8Array(str.length); - for (let i = 0; i < str.length; i++) { - bytes[i] = str.charCodeAt(i); - } - return bytes; -} - -// Copyright (c) Microsoft Corporation. /** - * Experimental internal only - * Generates the payload representing the permission configuration for the sas token. + * Retrieves the currently specified log level. */ -async function createAuthorizationSasToken(masterKey, sasTokenProperties) { - let resourcePrefixPath = ""; - if (typeof sasTokenProperties.databaseName === "string" && - sasTokenProperties.databaseName !== "") { - resourcePrefixPath += `/${Constants$1.Path.DatabasesPathSegment}/${sasTokenProperties.databaseName}`; - } - if (typeof sasTokenProperties.containerName === "string" && - sasTokenProperties.containerName !== "") { - if (sasTokenProperties.databaseName === "") { - throw new Error(`illegalArgumentException : ${sasTokenProperties.databaseName} \ - is an invalid database name`); - } - resourcePrefixPath += `/${Constants$1.Path.CollectionsPathSegment}/${sasTokenProperties.containerName}`; - } - if (typeof sasTokenProperties.resourceName === "string" && - sasTokenProperties.resourceName !== "") { - if (sasTokenProperties.containerName === "") { - throw new Error(`illegalArgumentException : ${sasTokenProperties.containerName} \ - is an invalid container name`); - } - switch (sasTokenProperties.resourceKind) { - case "ITEM": - resourcePrefixPath += `${Constants$1.Path.Root}${Constants$1.Path.DocumentsPathSegment}`; - break; - case "STORED_PROCEDURE": - resourcePrefixPath += `${Constants$1.Path.Root}${Constants$1.Path.StoredProceduresPathSegment}`; - break; - case "USER_DEFINED_FUNCTION": - resourcePrefixPath += `${Constants$1.Path.Root}${Constants$1.Path.UserDefinedFunctionsPathSegment}`; - break; - case "TRIGGER": - resourcePrefixPath += `${Constants$1.Path.Root}${Constants$1.Path.TriggersPathSegment}`; - break; - default: - throw new Error(`illegalArgumentException : ${sasTokenProperties.resourceKind} \ - is an invalid resource kind`); - } - resourcePrefixPath += `${Constants$1.Path.Root}${sasTokenProperties.resourceName}${Constants$1.Path.Root}`; - } - sasTokenProperties.resourcePath = resourcePrefixPath.toString(); - let partitionRanges = ""; - if (sasTokenProperties.partitionKeyValueRanges !== undefined && - sasTokenProperties.partitionKeyValueRanges.length > 0) { - if (typeof sasTokenProperties.resourceKind !== "string" && - sasTokenProperties.resourceKind !== "ITEM") { - throw new Error(`illegalArgumentException : ${sasTokenProperties.resourceKind} \ - is an invalid partition key value range`); - } - sasTokenProperties.partitionKeyValueRanges.forEach((range) => { - partitionRanges += `${encodeUTF8(range)},`; - }); - } - if (sasTokenProperties.controlPlaneReaderScope === 0) { - sasTokenProperties.controlPlaneReaderScope += exports.SasTokenPermissionKind.ContainerReadAny; - sasTokenProperties.controlPlaneWriterScope += exports.SasTokenPermissionKind.ContainerReadAny; - } - if (sasTokenProperties.dataPlaneReaderScope === 0 && - sasTokenProperties.dataPlaneWriterScope === 0) { - sasTokenProperties.dataPlaneReaderScope = exports.SasTokenPermissionKind.ContainerFullAccess; - sasTokenProperties.dataPlaneWriterScope = exports.SasTokenPermissionKind.ContainerFullAccess; - } - if (typeof sasTokenProperties.keyType !== "number" || - typeof sasTokenProperties.keyType === undefined) { - switch (sasTokenProperties.keyType) { - case CosmosKeyType.PrimaryMaster: - sasTokenProperties.keyType = 1; - break; - case CosmosKeyType.SecondaryMaster: - sasTokenProperties.keyType = 2; - break; - case CosmosKeyType.PrimaryReadOnly: - sasTokenProperties.keyType = 3; - break; - case CosmosKeyType.SecondaryReadOnly: - sasTokenProperties.keyType = 4; - break; - default: - throw new Error(`illegalArgumentException : ${sasTokenProperties.keyType} \ - is an invalid key type`); - } - } - const payload = sasTokenProperties.user + - "\n" + - sasTokenProperties.userTag + - "\n" + - sasTokenProperties.resourcePath + - "\n" + - partitionRanges + - "\n" + - utcsecondsSinceEpoch(sasTokenProperties.startTime).toString(16) + - "\n" + - utcsecondsSinceEpoch(sasTokenProperties.expiryTime).toString(16) + - "\n" + - sasTokenProperties.keyType + - "\n" + - sasTokenProperties.controlPlaneReaderScope.toString(16) + - "\n" + - sasTokenProperties.controlPlaneWriterScope.toString(16) + - "\n" + - sasTokenProperties.dataPlaneReaderScope.toString(16) + - "\n" + - sasTokenProperties.dataPlaneWriterScope.toString(16) + - "\n"; - const signedPayload = await hmac(masterKey, Buffer.from(payload).toString("base64")); - return "type=sas&ver=1.0&sig=" + signedPayload + ";" + Buffer.from(payload).toString("base64"); +function getLogLevel() { + return azureLogLevel; } +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100, +}; /** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. * @hidden */ -// TODO: utcMilllisecondsSinceEpoch -function utcsecondsSinceEpoch(date) { - return Math.round(date.getTime() / 1000); +function createClientLogger(namespace) { + const clientRootLogger = AzureLogger.extend(namespace); + patchLogMethod(AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose"), + }; +} +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level, + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug.disable(); + debug.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]); +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); } -Object.defineProperty(exports, "RestError", ({ - enumerable: true, - get: function () { return coreRestPipeline.RestError; } -})); -Object.defineProperty(exports, "AbortError", ({ - enumerable: true, - get: function () { return abortController.AbortError; } -})); -exports.BulkOperationType = BulkOperationType; -exports.ChangeFeedIterator = ChangeFeedIterator; -exports.ChangeFeedIteratorResponse = ChangeFeedIteratorResponse; -exports.ChangeFeedResponse = ChangeFeedResponse; -exports.ChangeFeedStartFrom = ChangeFeedStartFrom; -exports.ClientContext = ClientContext; -exports.ClientSideMetrics = ClientSideMetrics; -exports.Conflict = Conflict; -exports.ConflictResponse = ConflictResponse; -exports.Conflicts = Conflicts; -exports.Constants = Constants$1; -exports.Container = Container; -exports.ContainerResponse = ContainerResponse; -exports.Containers = Containers; -exports.CosmosClient = CosmosClient; -exports.CosmosDiagnostics = CosmosDiagnostics; -exports.DEFAULT_PARTITION_KEY_PATH = DEFAULT_PARTITION_KEY_PATH; -exports.Database = Database; -exports.DatabaseAccount = DatabaseAccount; -exports.DatabaseResponse = DatabaseResponse; -exports.Databases = Databases; -exports.DiagnosticNodeInternal = DiagnosticNodeInternal; -exports.ErrorResponse = ErrorResponse; -exports.FeedRange = FeedRange; -exports.FeedResponse = FeedResponse; -exports.GlobalEndpointManager = GlobalEndpointManager; -exports.Item = Item; -exports.ItemResponse = ItemResponse; -exports.Items = Items; -exports.Offer = Offer; -exports.OfferResponse = OfferResponse; -exports.Offers = Offers; -exports.PartitionKeyBuilder = PartitionKeyBuilder; -exports.PatchOperationType = PatchOperationType; -exports.Permission = Permission; -exports.PermissionResponse = PermissionResponse; -exports.Permissions = Permissions; -exports.QueryIterator = QueryIterator; -exports.QueryMetrics = QueryMetrics; -exports.QueryMetricsConstants = QueryMetricsConstants; -exports.QueryPreparationTimes = QueryPreparationTimes; -exports.ResourceResponse = ResourceResponse; -exports.RuntimeExecutionTimes = RuntimeExecutionTimes; -exports.SasTokenProperties = SasTokenProperties; -exports.Scripts = Scripts; -exports.StatusCodes = StatusCodes; -exports.StoredProcedure = StoredProcedure; -exports.StoredProcedureResponse = StoredProcedureResponse; -exports.StoredProcedures = StoredProcedures; -exports.TimeSpan = TimeSpan; -exports.TimeoutError = TimeoutError; -exports.Trigger = Trigger; -exports.TriggerResponse = TriggerResponse; -exports.Triggers = Triggers; -exports.User = User; -exports.UserDefinedFunction = UserDefinedFunction; -exports.UserDefinedFunctionResponse = UserDefinedFunctionResponse; -exports.UserDefinedFunctions = UserDefinedFunctions; -exports.UserResponse = UserResponse; -exports.Users = Users; -exports.createAuthorizationSasToken = createAuthorizationSasToken; -exports.setAuthorizationTokenHeaderUsingMasterKey = setAuthorizationTokenHeaderUsingMasterKey; +exports.AzureLogger = AzureLogger; +exports.createClientLogger = createClientLogger; +exports.getLogLevel = getLogLevel; +exports.setLogLevel = setLogLevel; //# sourceMappingURL=index.js.map /***/ }), -/***/ 81675: -/***/ ((module) => { - -/****************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, Symbol, Reflect, Promise, SuppressedError */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __esDecorate; -var __runInitializers; -var __propKey; -var __setFunctionName; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __classPrivateFieldIn; -var __createBinding; -var __addDisposableResource; -var __disposeResources; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { - function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } - var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; - var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; - var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); - var _, done = false; - for (var i = decorators.length - 1; i >= 0; i--) { - var context = {}; - for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context.access[p] = contextIn.access[p]; - context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); - if (kind === "accessor") { - if (result === void 0) continue; - if (result === null || typeof result !== "object") throw new TypeError("Object expected"); - if (_ = accept(result.get)) descriptor.get = _; - if (_ = accept(result.set)) descriptor.set = _; - if (_ = accept(result.init)) initializers.unshift(_); - } - else if (_ = accept(result)) { - if (kind === "field") initializers.unshift(_); - else descriptor[key] = _; - } - } - if (target) Object.defineProperty(target, contextIn.name, descriptor); - done = true; - }; - - __runInitializers = function (thisArg, initializers, value) { - var useValue = arguments.length > 2; - for (var i = 0; i < initializers.length; i++) { - value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); - } - return useValue ? value : void 0; - }; - - __propKey = function (x) { - return typeof x === "symbol" ? x : "".concat(x); - }; - - __setFunctionName = function (f, name, prefix) { - if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; - return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - __classPrivateFieldIn = function (state, receiver) { - if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); - return typeof state === "function" ? receiver === state : state.has(receiver); - }; - - __addDisposableResource = function (env, value, async) { - if (value !== null && value !== void 0) { - if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); - var dispose; - if (async) { - if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); - dispose = value[Symbol.asyncDispose]; - } - if (dispose === void 0) { - if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); - dispose = value[Symbol.dispose]; - } - if (typeof dispose !== "function") throw new TypeError("Object not disposable."); - env.stack.push({ value: value, dispose: dispose, async: async }); - } - else if (async) { - env.stack.push({ async: true }); - } - return value; - }; - - var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { - var e = new Error(message); - return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; - }; - - __disposeResources = function (env) { - function fail(e) { - env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; - env.hasError = true; - } - function next() { - while (env.stack.length) { - var rec = env.stack.pop(); - try { - var result = rec.dispose && rec.dispose.call(rec.value); - if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); - } - catch (e) { - fail(e); - } - } - if (env.hasError) throw env.error; - } - return next(); - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__esDecorate", __esDecorate); - exporter("__runInitializers", __runInitializers); - exporter("__propKey", __propKey); - exporter("__setFunctionName", __setFunctionName); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); - exporter("__classPrivateFieldIn", __classPrivateFieldIn); - exporter("__addDisposableResource", __addDisposableResource); - exporter("__disposeResources", __disposeResources); -}); - - -/***/ }), - -/***/ 50378: +/***/ 8786: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -70771,29 +67845,29 @@ Object.defineProperty(exports, "parse", ({ } })); -var _v = _interopRequireDefault(__nccwpck_require__(59054)); +var _v = _interopRequireDefault(__nccwpck_require__(12040)); -var _v2 = _interopRequireDefault(__nccwpck_require__(67261)); +var _v2 = _interopRequireDefault(__nccwpck_require__(86856)); -var _v3 = _interopRequireDefault(__nccwpck_require__(59921)); +var _v3 = _interopRequireDefault(__nccwpck_require__(3661)); -var _v4 = _interopRequireDefault(__nccwpck_require__(40447)); +var _v4 = _interopRequireDefault(__nccwpck_require__(45233)); -var _nil = _interopRequireDefault(__nccwpck_require__(18018)); +var _nil = _interopRequireDefault(__nccwpck_require__(56619)); -var _version = _interopRequireDefault(__nccwpck_require__(54389)); +var _version = _interopRequireDefault(__nccwpck_require__(84721)); -var _validate = _interopRequireDefault(__nccwpck_require__(73054)); +var _validate = _interopRequireDefault(__nccwpck_require__(8392)); -var _stringify = _interopRequireDefault(__nccwpck_require__(59704)); +var _stringify = _interopRequireDefault(__nccwpck_require__(82127)); -var _parse = _interopRequireDefault(__nccwpck_require__(72350)); +var _parse = _interopRequireDefault(__nccwpck_require__(20115)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /***/ }), -/***/ 70411: +/***/ 99057: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -70823,7 +67897,7 @@ exports["default"] = _default; /***/ }), -/***/ 18018: +/***/ 56619: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -70838,7 +67912,7 @@ exports["default"] = _default; /***/ }), -/***/ 72350: +/***/ 20115: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -70849,7 +67923,7 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _validate = _interopRequireDefault(__nccwpck_require__(73054)); +var _validate = _interopRequireDefault(__nccwpck_require__(8392)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -70890,7 +67964,7 @@ exports["default"] = _default; /***/ }), -/***/ 44976: +/***/ 61134: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -70905,7 +67979,7 @@ exports["default"] = _default; /***/ }), -/***/ 97165: +/***/ 58634: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -70936,7 +68010,7 @@ function rng() { /***/ }), -/***/ 19850: +/***/ 4764: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -70966,7 +68040,7 @@ exports["default"] = _default; /***/ }), -/***/ 59704: +/***/ 82127: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -70977,7 +68051,7 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _validate = _interopRequireDefault(__nccwpck_require__(73054)); +var _validate = _interopRequireDefault(__nccwpck_require__(8392)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -71012,7 +68086,7 @@ exports["default"] = _default; /***/ }), -/***/ 59054: +/***/ 12040: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -71023,9 +68097,9 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _rng = _interopRequireDefault(__nccwpck_require__(97165)); +var _rng = _interopRequireDefault(__nccwpck_require__(58634)); -var _stringify = _interopRequireDefault(__nccwpck_require__(59704)); +var _stringify = _interopRequireDefault(__nccwpck_require__(82127)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -71126,7 +68200,7 @@ exports["default"] = _default; /***/ }), -/***/ 67261: +/***/ 86856: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -71137,9 +68211,9 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _v = _interopRequireDefault(__nccwpck_require__(99740)); +var _v = _interopRequireDefault(__nccwpck_require__(40432)); -var _md = _interopRequireDefault(__nccwpck_require__(70411)); +var _md = _interopRequireDefault(__nccwpck_require__(99057)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -71149,7 +68223,7 @@ exports["default"] = _default; /***/ }), -/***/ 99740: +/***/ 40432: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -71161,9 +68235,9 @@ Object.defineProperty(exports, "__esModule", ({ exports["default"] = _default; exports.URL = exports.DNS = void 0; -var _stringify = _interopRequireDefault(__nccwpck_require__(59704)); +var _stringify = _interopRequireDefault(__nccwpck_require__(82127)); -var _parse = _interopRequireDefault(__nccwpck_require__(72350)); +var _parse = _interopRequireDefault(__nccwpck_require__(20115)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -71234,7 +68308,7 @@ function _default(name, version, hashfunc) { /***/ }), -/***/ 59921: +/***/ 3661: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -71245,9 +68319,9 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _rng = _interopRequireDefault(__nccwpck_require__(97165)); +var _rng = _interopRequireDefault(__nccwpck_require__(58634)); -var _stringify = _interopRequireDefault(__nccwpck_require__(59704)); +var _stringify = _interopRequireDefault(__nccwpck_require__(82127)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -71278,7 +68352,7 @@ exports["default"] = _default; /***/ }), -/***/ 40447: +/***/ 45233: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -71289,9 +68363,9 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _v = _interopRequireDefault(__nccwpck_require__(99740)); +var _v = _interopRequireDefault(__nccwpck_require__(40432)); -var _sha = _interopRequireDefault(__nccwpck_require__(19850)); +var _sha = _interopRequireDefault(__nccwpck_require__(4764)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -71301,7 +68375,7 @@ exports["default"] = _default; /***/ }), -/***/ 73054: +/***/ 8392: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -71312,7 +68386,7 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _regex = _interopRequireDefault(__nccwpck_require__(44976)); +var _regex = _interopRequireDefault(__nccwpck_require__(61134)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -71325,7 +68399,7 @@ exports["default"] = _default; /***/ }), -/***/ 54389: +/***/ 84721: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -71336,7 +68410,7 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _validate = _interopRequireDefault(__nccwpck_require__(73054)); +var _validate = _interopRequireDefault(__nccwpck_require__(8392)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -71353,7 +68427,7 @@ exports["default"] = _default; /***/ }), -/***/ 3084: +/***/ 84100: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -71361,22 +68435,23 @@ exports["default"] = _default; Object.defineProperty(exports, "__esModule", ({ value: true })); -var msalCommon = __nccwpck_require__(1985); -var logger$o = __nccwpck_require__(3233); -var abortController = __nccwpck_require__(52557); +var coreRestPipeline = __nccwpck_require__(88121); +var tslib = __nccwpck_require__(4351); +var coreAuth = __nccwpck_require__(39645); var coreUtil = __nccwpck_require__(51333); +var coreHttpCompat = __nccwpck_require__(25083); var coreClient = __nccwpck_require__(29729); -var coreRestPipeline = __nccwpck_require__(88121); -var coreTracing = __nccwpck_require__(94175); -var fs = __nccwpck_require__(57147); -var os = __nccwpck_require__(22037); -var path = __nccwpck_require__(71017); -var promises = __nccwpck_require__(73292); -var https = __nccwpck_require__(95687); -var child_process = __nccwpck_require__(32081); +var coreXml = __nccwpck_require__(17309); +var logger$1 = __nccwpck_require__(3233); +var abortController = __nccwpck_require__(1753); var crypto = __nccwpck_require__(6113); +var coreTracing = __nccwpck_require__(19363); +var stream = __nccwpck_require__(12781); +var coreLro = __nccwpck_require__(90334); +var events = __nccwpck_require__(82361); +var fs = __nccwpck_require__(57147); var util = __nccwpck_require__(73837); -var open = __nccwpck_require__(85768); +var buffer = __nccwpck_require__(14300); function _interopNamespaceDefault(e) { var n = Object.create(null); @@ -71395,5412 +68470,2477 @@ function _interopNamespaceDefault(e) { return Object.freeze(n); } -var msalCommon__namespace = /*#__PURE__*/_interopNamespaceDefault(msalCommon); -var child_process__namespace = /*#__PURE__*/_interopNamespaceDefault(child_process); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function isErrorResponse(errorResponse) { - return (errorResponse && - typeof errorResponse.error === "string" && - typeof errorResponse.error_description === "string"); -} -/** - * The Error.name value of an CredentialUnavailable - */ -const CredentialUnavailableErrorName = "CredentialUnavailableError"; -/** - * This signifies that the credential that was tried in a chained credential - * was not available to be used as the credential. Rather than treating this as - * an error that should halt the chain, it's caught and the chain continues - */ -class CredentialUnavailableError extends Error { - constructor(message) { - super(message); - this.name = CredentialUnavailableErrorName; - } -} -/** - * The Error.name value of an AuthenticationError - */ -const AuthenticationErrorName = "AuthenticationError"; -/** - * Provides details about a failure to authenticate with Azure Active - * Directory. The `errorResponse` field contains more details about - * the specific failure. - */ -class AuthenticationError extends Error { - // eslint-disable-next-line @typescript-eslint/ban-types - constructor(statusCode, errorBody) { - let errorResponse = { - error: "unknown", - errorDescription: "An unknown error occurred and no additional details are available.", - }; - if (isErrorResponse(errorBody)) { - errorResponse = convertOAuthErrorResponseToErrorResponse(errorBody); - } - else if (typeof errorBody === "string") { - try { - // Most error responses will contain JSON-formatted error details - // in the response body - const oauthErrorResponse = JSON.parse(errorBody); - errorResponse = convertOAuthErrorResponseToErrorResponse(oauthErrorResponse); - } - catch (e) { - if (statusCode === 400) { - errorResponse = { - error: "authority_not_found", - errorDescription: "The specified authority URL was not found.", - }; - } - else { - errorResponse = { - error: "unknown_error", - errorDescription: `An unknown error has occurred. Response body:\n\n${errorBody}`, - }; - } - } - } - else { - errorResponse = { - error: "unknown_error", - errorDescription: "An unknown error occurred and no additional details are available.", - }; - } - super(`${errorResponse.error} Status code: ${statusCode}\nMore details:\n${errorResponse.errorDescription}`); - this.statusCode = statusCode; - this.errorResponse = errorResponse; - // Ensure that this type reports the correct name - this.name = AuthenticationErrorName; - } -} -/** - * The Error.name value of an AggregateAuthenticationError - */ -const AggregateAuthenticationErrorName = "AggregateAuthenticationError"; -/** - * Provides an `errors` array containing {@link AuthenticationError} instance - * for authentication failures from credentials in a {@link ChainedTokenCredential}. - */ -class AggregateAuthenticationError extends Error { - constructor(errors, errorMessage) { - const errorDetail = errors.join("\n"); - super(`${errorMessage}\n${errorDetail}`); - this.errors = errors; - // Ensure that this type reports the correct name - this.name = AggregateAuthenticationErrorName; - } -} -function convertOAuthErrorResponseToErrorResponse(errorBody) { - return { - error: errorBody.error, - errorDescription: errorBody.error_description, - correlationId: errorBody.correlation_id, - errorCodes: errorBody.error_codes, - timestamp: errorBody.timestamp, - traceId: errorBody.trace_id, - }; -} -/** - * Error used to enforce authentication after trying to retrieve a token silently. - */ -class AuthenticationRequiredError extends Error { - constructor( - /** - * Optional parameters. A message can be specified. The {@link GetTokenOptions} of the request can also be specified to more easily associate the error with the received parameters. - */ - options) { - super(options.message); - this.scopes = options.scopes; - this.getTokenOptions = options.getTokenOptions; - this.name = "AuthenticationRequiredError"; - } -} +var coreHttpCompat__namespace = /*#__PURE__*/_interopNamespaceDefault(coreHttpCompat); +var coreClient__namespace = /*#__PURE__*/_interopNamespaceDefault(coreClient); +var fs__namespace = /*#__PURE__*/_interopNamespaceDefault(fs); +var util__namespace = /*#__PURE__*/_interopNamespaceDefault(util); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The AzureLogger used for all clients within the identity package - */ -const logger$n = logger$o.createClientLogger("identity"); -/** - * Separates a list of environment variable names into a plain object with two arrays: an array of missing environment variables and another array with assigned environment variables. - * @param supportedEnvVars - List of environment variable names - */ -function processEnvVars(supportedEnvVars) { - return supportedEnvVars.reduce((acc, envVariable) => { - if (process.env[envVariable]) { - acc.assigned.push(envVariable); - } - else { - acc.missing.push(envVariable); - } - return acc; - }, { missing: [], assigned: [] }); -} -/** - * Formatting the success event on the credentials - */ -function formatSuccess(scope) { - return `SUCCESS. Scopes: ${Array.isArray(scope) ? scope.join(", ") : scope}.`; -} -/** - * Formatting the success event on the credentials - */ -function formatError(scope, error) { - let message = "ERROR."; - if (scope === null || scope === void 0 ? void 0 : scope.length) { - message += ` Scopes: ${Array.isArray(scope) ? scope.join(", ") : scope}.`; - } - return `${message} Error message: ${typeof error === "string" ? error : error.message}.`; -} -/** - * Generates a CredentialLoggerInstance. - * - * It logs with the format: - * - * `[title] => [message]` - * - */ -function credentialLoggerInstance(title, parent, log = logger$n) { - const fullTitle = parent ? `${parent.fullTitle} ${title}` : title; - function info(message) { - log.info(`${fullTitle} =>`, message); - } - function warning(message) { - log.warning(`${fullTitle} =>`, message); - } - function verbose(message) { - log.verbose(`${fullTitle} =>`, message); - } - return { - title, - fullTitle, - info, - warning, - verbose, - }; -} +// Licensed under the MIT License. /** - * Generates a CredentialLogger, which is a logger declared at the credential's constructor, and used at any point in the credential. - * It has all the properties of a CredentialLoggerInstance, plus other logger instances, one per method. - * - * It logs with the format: - * - * `[title] => [message]` - * `[title] => getToken() => [message]` - * + * The `@azure/logger` configuration for this package. */ -function credentialLogger(title, log = logger$n) { - const credLogger = credentialLoggerInstance(title, undefined, log); - return Object.assign(Object.assign({}, credLogger), { parent: log, getToken: credentialLoggerInstance("=> getToken()", credLogger, log) }); -} +const logger = logger$1.createClientLogger("storage-blob"); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Current version of the `@azure/identity` package. - */ -const SDK_VERSION = `4.0.0`; -/** - * The default client ID for authentication - * @internal - */ -// TODO: temporary - this is the Azure CLI clientID - we'll replace it when -// Developer Sign On application is available -// https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/identity/Azure.Identity/src/Constants.cs#L9 -const DeveloperSignOnClientId = "04b07795-8ddb-461a-bbee-02f9e1bf7b46"; -/** - * The default tenant for authentication - * @internal - */ -const DefaultTenantId = "common"; +// Licensed under the MIT License. /** - * A list of known Azure authority hosts + * The base class from which all request policies derive. */ -exports.AzureAuthorityHosts = void 0; -(function (AzureAuthorityHosts) { +class BaseRequestPolicy { /** - * China-based Azure Authority Host + * The main method to implement that manipulates a request/response. */ - AzureAuthorityHosts["AzureChina"] = "https://login.chinacloudapi.cn"; + constructor( /** - * Germany-based Azure Authority Host + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. */ - AzureAuthorityHosts["AzureGermany"] = "https://login.microsoftonline.de"; + _nextPolicy, /** - * US Government Azure Authority Host + * The options that can be passed to a given request policy. */ - AzureAuthorityHosts["AzureGovernment"] = "https://login.microsoftonline.us"; + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } /** - * Public Cloud Azure Authority Host + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. */ - AzureAuthorityHosts["AzurePublicCloud"] = "https://login.microsoftonline.com"; -})(exports.AzureAuthorityHosts || (exports.AzureAuthorityHosts = {})); -/** - * The default authority host. - */ -const DefaultAuthorityHost = exports.AzureAuthorityHosts.AzurePublicCloud; -/** - * Allow acquiring tokens for any tenant for multi-tentant auth. - */ -const ALL_TENANTS = ["*"]; -const CACHE_CAE_SUFFIX = ".cae"; -const CACHE_NON_CAE_SUFFIX = ".nocae"; + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } +} // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Latest AuthenticationRecord version - * @internal - */ -const LatestAuthenticationRecordVersion = "1.0"; +// Licensed under the MIT License. +const SDK_VERSION = "12.25.0"; +const SERVICE_VERSION = "2024-11-04"; +const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +const BLOCK_BLOB_MAX_BLOCKS = 50000; +const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +const REQUEST_TIMEOUT = 100 * 1000; // In ms /** - * Ensures the validity of the MSAL token - * @internal + * The OAuth scope to use with Azure Storage. */ -function ensureValidMsalToken(scopes, logger, msalToken, getTokenOptions) { - const error = (message) => { - logger.getToken.info(message); - return new AuthenticationRequiredError({ - scopes: Array.isArray(scopes) ? scopes : [scopes], - getTokenOptions, - message, - }); - }; - if (!msalToken) { - throw error("No response"); - } - if (!msalToken.expiresOn) { - throw error(`Response had no "expiresOn" property.`); +const StorageOAuthScopes = "https://storage.azure.com/.default"; +const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, +}; +const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", + X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code", +}; +const ETagNone = ""; +const ETagAny = "*"; +const SIZE_1_MB = 1 * 1024 * 1024; +const BATCH_MAX_REQUEST = 256; +const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +const HTTP_LINE_ENDING = "\r\n"; +const HTTP_VERSION_1_1 = "HTTP/1.1"; +const EncryptionAlgorithmAES25 = "AES256"; +const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-copy-source-error-code", + "x-ms-copy-source-status-code", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; +/// List of ports used for path style addressing. +/// Path style addressing means that storage account is put in URI's Path segment in instead of in host. +const PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104", +]; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +function escapeURLPath(url) { + const urlParsed = new URL(url); + let path = urlParsed.pathname; + path = path || "/"; + path = escape(path); + urlParsed.pathname = path; + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } } - if (!msalToken.accessToken) { - throw error(`Response had no "accessToken" property.`); + return proxyUri; +} +function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } } + return ""; } /** - * Generates a valid authority by combining a host with a tenantId. - * @internal + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. */ -function getAuthority(tenantId, host) { - if (!host) { - host = DefaultAuthorityHost; - } - if (new RegExp(`${tenantId}/?$`).test(host)) { - return host; +function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; } - if (host.endsWith("/")) { - return host + tenantId; + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; } else { - return `${host}/${tenantId}`; + // SAS connection string + let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + // if accountName is empty, try to read it from BlobEndpoint + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + // client constructors assume accountSas does *not* start with ? + if (accountSas.startsWith("?")) { + accountSas = accountSas.substring(1); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } } /** - * Generates the known authorities. - * If the Tenant Id is `adfs`, the authority can't be validated since the format won't match the expected one. - * For that reason, we have to force MSAL to disable validating the authority - * by sending it within the known authorities in the MSAL configuration. - * @internal + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - */ -function getKnownAuthorities(tenantId, authorityHost, disableInstanceDiscovery) { - if ((tenantId === "adfs" && authorityHost) || disableInstanceDiscovery) { - return [authorityHost]; - } - return []; +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" } /** - * Generates a logger that can be passed to the MSAL clients. - * @param logger - The logger of the credential. - * @internal + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string */ -const defaultLoggerCallback = (logger, platform = coreUtil.isNode ? "Node" : "Browser") => (level, message, containsPii) => { - if (containsPii) { - return; +function appendToURLPath(url, name) { + const urlParsed = new URL(url); + let path = urlParsed.pathname; + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.pathname = path; + return urlParsed.toString(); +} +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +function setURLParameter(url, name, value) { + const urlParsed = new URL(url); + const encodedName = encodeURIComponent(name); + const encodedValue = value ? encodeURIComponent(value) : undefined; + // mutating searchParams will change the encoding, so we have to do this ourselves + const searchString = urlParsed.search === "" ? "?" : urlParsed.search; + const searchPieces = []; + for (const pair of searchString.slice(1).split("&")) { + if (pair) { + const [key] = pair.split("=", 2); + if (key !== encodedName) { + searchPieces.push(pair); + } + } } - switch (level) { - case msalCommon__namespace.LogLevel.Error: - logger.info(`MSAL ${platform} V2 error: ${message}`); - return; - case msalCommon__namespace.LogLevel.Info: - logger.info(`MSAL ${platform} V2 info message: ${message}`); - return; - case msalCommon__namespace.LogLevel.Verbose: - logger.info(`MSAL ${platform} V2 verbose message: ${message}`); - return; - case msalCommon__namespace.LogLevel.Warning: - logger.info(`MSAL ${platform} V2 warning: ${message}`); - return; + if (encodedValue) { + searchPieces.push(`${encodedName}=${encodedValue}`); } -}; + urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return urlParsed.toString(); +} /** - * @internal + * Get URL parameter by name. + * + * @param url - + * @param name - */ -function getMSALLogLevel(logLevel) { - switch (logLevel) { - case "error": - return msalCommon__namespace.LogLevel.Error; - case "info": - return msalCommon__namespace.LogLevel.Info; - case "verbose": - return msalCommon__namespace.LogLevel.Verbose; - case "warning": - return msalCommon__namespace.LogLevel.Warning; - default: - // default msal logging level should be Info - return msalCommon__namespace.LogLevel.Info; - } +function getURLParameter(url, name) { + var _a; + const urlParsed = new URL(url); + return (_a = urlParsed.searchParams.get(name)) !== null && _a !== void 0 ? _a : undefined; } /** - * The common utility functions for the MSAL clients. - * Defined as a class so that the classes extending this one can have access to its methods and protected properties. + * Set URL host. * - * It keeps track of a logger and an in-memory copy of the AuthenticationRecord. + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +function setURLHost(url, host) { + const urlParsed = new URL(url); + urlParsed.hostname = host; + return urlParsed.toString(); +} +/** + * Get URL path from an URL string. * - * @internal + * @param url - Source URL string */ -class MsalBaseUtilities { - constructor(options) { - this.logger = options.logger; - this.account = options.authenticationRecord; +function getURLPath(url) { + try { + const urlParsed = new URL(url); + return urlParsed.pathname; } - /** - * Generates a UUID - */ - generateUuid() { - return coreUtil.randomUUID(); + catch (e) { + return undefined; } - /** - * Handles the MSAL authentication result. - * If the result has an account, we update the local account reference. - * If the token received is invalid, an error will be thrown depending on what's missing. - */ - handleResult(scopes, clientId, result, getTokenOptions) { - if (result === null || result === void 0 ? void 0 : result.account) { - this.account = msalToPublic(clientId, result.account); - } - ensureValidMsalToken(scopes, this.logger, result, getTokenOptions); - this.logger.getToken.info(formatSuccess(scopes)); - return { - token: result.accessToken, - expiresOnTimestamp: result.expiresOn.getTime(), - }; +} +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +function getURLScheme(url) { + try { + const urlParsed = new URL(url); + return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; } - /** - * Handles MSAL errors. - */ - handleError(scopes, error, getTokenOptions) { - if (error.name === "AuthError" || - error.name === "ClientAuthError" || - error.name === "BrowserAuthError") { - const msalError = error; - switch (msalError.errorCode) { - case "endpoints_resolution_error": - this.logger.info(formatError(scopes, error.message)); - return new CredentialUnavailableError(error.message); - case "device_code_polling_cancelled": - return new abortController.AbortError("The authentication has been aborted by the caller."); - case "consent_required": - case "interaction_required": - case "login_required": - this.logger.info(formatError(scopes, `Authentication returned errorCode ${msalError.errorCode}`)); - break; - default: - this.logger.info(formatError(scopes, `Failed to acquire token: ${error.message}`)); - break; - } - } - if (error.name === "ClientConfigurationError" || - error.name === "BrowserConfigurationAuthError" || - error.name === "AbortError") { - return error; - } - if (error.name === "NativeAuthError") { - this.logger.info(formatError(scopes, `Error from the native broker: ${error.message} with status code: ${error.statusCode}`)); - return error; - } - return new AuthenticationRequiredError({ scopes, getTokenOptions, message: error.message }); + catch (e) { + return undefined; } } -// transformations.ts -function publicToMsal(account) { - const [environment] = account.authority.match(/([a-z]*\.[a-z]*\.[a-z]*)/) || [""]; - return Object.assign(Object.assign({}, account), { localAccountId: account.homeAccountId, environment }); -} -function msalToPublic(clientId, account) { - const record = { - authority: getAuthority(account.tenantId, account.environment), - homeAccountId: account.homeAccountId, - tenantId: account.tenantId || DefaultTenantId, - username: account.username, - clientId, - version: LatestAuthenticationRecordVersion, - }; - return record; -} /** - * Serializes an `AuthenticationRecord` into a string. - * - * The output of a serialized authentication record will contain the following properties: - * - * - "authority" - * - "homeAccountId" - * - "clientId" - * - "tenantId" - * - "username" - * - "version" + * Get URL path and query from an URL string. * - * To later convert this string to a serialized `AuthenticationRecord`, please use the exported function `deserializeAuthenticationRecord()`. + * @param url - Source URL string */ -function serializeAuthenticationRecord(record) { - return JSON.stringify(record); +function getURLPathAndQuery(url) { + const urlParsed = new URL(url); + const pathString = urlParsed.pathname; + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.search || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; } /** - * Deserializes a previously serialized authentication record from a string into an object. - * - * The input string must contain the following properties: - * - * - "authority" - * - "homeAccountId" - * - "clientId" - * - "tenantId" - * - "username" - * - "version" - * - * If the version we receive is unsupported, an error will be thrown. - * - * At the moment, the only available version is: "1.0", which is always set when the authentication record is serialized. + * Get URL query key value pairs from an URL string. * - * @param serializedRecord - Authentication record previously serialized into string. - * @returns AuthenticationRecord. + * @param url - */ -function deserializeAuthenticationRecord(serializedRecord) { - const parsed = JSON.parse(serializedRecord); - if (parsed.version && parsed.version !== LatestAuthenticationRecordVersion) { - throw Error("Unsupported AuthenticationRecord version"); +function getURLQueries(url) { + let queryString = new URL(url).search; + if (!queryString) { + return {}; } - return parsed; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function createConfigurationErrorMessage(tenantId) { - return `The current credential is not configured to acquire tokens for tenant ${tenantId}. To enable acquiring tokens for this tenant add it to the AdditionallyAllowedTenants on the credential options, or add "*" to AdditionallyAllowedTenants to allow acquiring tokens for any tenant.`; + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; } /** - * Of getToken contains a tenantId, this functions allows picking this tenantId as the appropriate for authentication, - * unless multitenant authentication has been disabled through the AZURE_IDENTITY_DISABLE_MULTITENANTAUTH (on Node.js), - * or unless the original tenant Id is `adfs`. - * @internal + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. */ -function processMultiTenantRequest(tenantId, getTokenOptions, additionallyAllowedTenantIds = [], logger) { - var _a; - let resolvedTenantId; - if (process.env.AZURE_IDENTITY_DISABLE_MULTITENANTAUTH) { - resolvedTenantId = tenantId; - } - else if (tenantId === "adfs") { - resolvedTenantId = tenantId; +function appendToURLQuery(url, queryParts) { + const urlParsed = new URL(url); + let query = urlParsed.search; + if (query) { + query += "&" + queryParts; } else { - resolvedTenantId = (_a = getTokenOptions === null || getTokenOptions === void 0 ? void 0 : getTokenOptions.tenantId) !== null && _a !== void 0 ? _a : tenantId; - } - if (tenantId && - resolvedTenantId !== tenantId && - !additionallyAllowedTenantIds.includes("*") && - !additionallyAllowedTenantIds.some((t) => t.localeCompare(resolvedTenantId) === 0)) { - const message = createConfigurationErrorMessage(tenantId); - logger === null || logger === void 0 ? void 0 : logger.info(message); - throw new CredentialUnavailableError(message); + query = queryParts; } - return resolvedTenantId; + urlParsed.search = query; + return urlParsed.toString(); } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * @internal + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component */ -function checkTenantId(logger, tenantId) { - if (!tenantId.match(/^[0-9a-zA-Z-.]+$/)) { - const error = new Error("Invalid tenant id provided. You can locate your tenant id by following the instructions listed here: https://learn.microsoft.com/partner-center/find-ids-and-domain-names."); - logger.info(formatError("", error)); - throw error; - } +function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; } /** - * @internal + * Base64 encode. + * + * @param content - */ -function resolveTenantId(logger, tenantId, clientId) { - if (tenantId) { - checkTenantId(logger, tenantId); - return tenantId; - } - if (!clientId) { - clientId = DeveloperSignOnClientId; - } - if (clientId !== DeveloperSignOnClientId) { - return "common"; - } - return "organizations"; +function base64encode(content) { + return !coreUtil.isNode ? btoa(content) : Buffer.from(content).toString("base64"); } /** - * @internal + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - */ -function resolveAdditionallyAllowedTenantIds(additionallyAllowedTenants) { - if (!additionallyAllowedTenants || additionallyAllowedTenants.length === 0) { - return []; - } - if (additionallyAllowedTenants.includes("*")) { - return ALL_TENANTS; +function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); } - return additionallyAllowedTenants; + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function getIdentityTokenEndpointSuffix(tenantId) { - if (tenantId === "adfs") { - return "oauth2/token"; +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + }); +} +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; } else { - return "oauth2/v2.0/token"; + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; } } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** - * Creates a span using the global tracer. - * @internal + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - */ -const tracingClient = coreTracing.createTracingClient({ - namespace: "Microsoft.AAD", - packageName: "@azure/identity", - packageVersion: SDK_VERSION, -}); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const DefaultScopeSuffix = "/.default"; -const imdsHost = "http://169.254.169.254"; -const imdsEndpointPath = "/metadata/identity/oauth2/token"; -const imdsApiVersion = "2018-02-01"; -const azureArcAPIVersion = "2019-11-01"; -const azureFabricVersion = "2019-07-01-preview"; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); +} /** - * Most MSIs send requests to the IMDS endpoint, or a similar endpoint. - * These are GET requests that require sending a `resource` parameter on the query. - * This resource can be derived from the scopes received through the getToken call, as long as only one scope is received. - * Multiple scopes assume that the resulting token will have access to multiple resources, which won't be the case. - * - * For that reason, when we encounter multiple scopes, we return undefined. - * It's up to the individual MSI implementations to throw the errors (which helps us provide less generic errors). + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name */ -function mapScopesToResource(scopes) { - let scope = ""; - if (Array.isArray(scopes)) { - if (scopes.length !== 1) { - return; +function getAccountNameFromUrl(url) { + const parsedUrl = new URL(url); + let accountName; + try { + if (parsedUrl.hostname.split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.hostname.split(".")[0]; } - scope = scopes[0]; - } - else if (typeof scopes === "string") { - scope = scopes; + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.pathname.split("/")[1]; + } + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; + } + return accountName; } - if (!scope.endsWith(DefaultScopeSuffix)) { - return scope; + catch (error) { + throw new Error("Unable to extract accountName with provided information."); } - return scope.substr(0, scope.lastIndexOf(DefaultScopeSuffix)); +} +function isIpEndpointStyle(parsedUrl) { + const host = parsedUrl.host; + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || + (Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port))); } /** - * Given a token response, return the expiration timestamp as the number of milliseconds from the Unix epoch. - * @param body - A parsed response body from the authentication endpoint. + * Convert Tags to encoded string. + * + * @param tags - */ -function parseExpirationTimestamp(body) { - if (typeof body.expires_on === "number") { - return body.expires_on * 1000; +function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; } - if (typeof body.expires_on === "string") { - const asNumber = +body.expires_on; - if (!isNaN(asNumber)) { - return asNumber * 1000; - } - const asDate = Date.parse(body.expires_on); - if (!isNaN(asDate)) { - return asDate; + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); } } - if (typeof body.expires_in === "number") { - return Date.now() + body.expires_in * 1000; - } - throw new Error(`Failed to parse token expiration from body. expires_in="${body.expires_in}", expires_on="${body.expires_on}"`); + return tagPairs.join("&"); } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const noCorrelationId = "noCorrelationId"; /** - * @internal + * Convert Tags type to BlobTags. + * + * @param tags - */ -function getIdentityClientAuthorityHost(options) { - // The authorityHost can come from options or from the AZURE_AUTHORITY_HOST environment variable. - let authorityHost = options === null || options === void 0 ? void 0 : options.authorityHost; - // The AZURE_AUTHORITY_HOST environment variable can only be provided in Node.js. - if (coreUtil.isNode) { - authorityHost = authorityHost !== null && authorityHost !== void 0 ? authorityHost : process.env.AZURE_AUTHORITY_HOST; +function toBlobTags(tags) { + if (tags === undefined) { + return undefined; } - // If the authorityHost is not provided, we use the default one from the public cloud: https://login.microsoftonline.com - return authorityHost !== null && authorityHost !== void 0 ? authorityHost : DefaultAuthorityHost; + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } + } + return res; } /** - * The network module used by the Identity credentials. + * Covert BlobTags to Tags type. * - * It allows for credentials to abort any pending request independently of the MSAL flow, - * by calling to the `abortRequests()` method. + * @param tags - + */ +function toTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; +} +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. * + * @param textConfiguration - */ -class IdentityClient extends coreClient.ServiceClient { - constructor(options) { - var _a, _b; - const packageDetails = `azsdk-js-identity/${SDK_VERSION}`; - const userAgentPrefix = ((_a = options === null || options === void 0 ? void 0 : options.userAgentOptions) === null || _a === void 0 ? void 0 : _a.userAgentPrefix) - ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` - : `${packageDetails}`; - const baseUri = getIdentityClientAuthorityHost(options); - if (!baseUri.startsWith("https:")) { - throw new Error("The authorityHost address must use the 'https' protocol."); - } - super(Object.assign(Object.assign({ requestContentType: "application/json; charset=utf-8", retryOptions: { - maxRetries: 3, - } }, options), { userAgentOptions: { - userAgentPrefix, - }, baseUri })); - this.authorityHost = baseUri; - this.abortControllers = new Map(); - this.allowLoggingAccountIdentifiers = (_b = options === null || options === void 0 ? void 0 : options.loggingOptions) === null || _b === void 0 ? void 0 : _b.allowLoggingAccountIdentifiers; - // used for WorkloadIdentity - this.tokenCredentialOptions = Object.assign({}, options); +function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; } - async sendTokenRequest(request) { - logger$n.info(`IdentityClient: sending token request to [${request.url}]`); - const response = await this.sendRequest(request); - if (response.bodyAsText && (response.status === 200 || response.status === 201)) { - const parsedBody = JSON.parse(response.bodyAsText); - if (!parsedBody.access_token) { - return null; - } - this.logIdentifiers(response); - const token = { - accessToken: { - token: parsedBody.access_token, - expiresOnTimestamp: parseExpirationTimestamp(parsedBody), + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, }, - refreshToken: parsedBody.refresh_token, }; - logger$n.info(`IdentityClient: [${request.url}] token acquired, expires on ${token.accessToken.expiresOnTimestamp}`); - return token; - } - else { - const error = new AuthenticationError(response.status, response.bodyAsText); - logger$n.warning(`IdentityClient: authentication error. HTTP status: ${response.status}, ${error.errorResponse.errorDescription}`); - throw error; - } - } - async refreshAccessToken(tenantId, clientId, scopes, refreshToken, clientSecret, options = {}) { - if (refreshToken === undefined) { - return null; - } - logger$n.info(`IdentityClient: refreshing access token with client ID: ${clientId}, scopes: ${scopes} started`); - const refreshParams = { - grant_type: "refresh_token", - client_id: clientId, - refresh_token: refreshToken, - scope: scopes, - }; - if (clientSecret !== undefined) { - refreshParams.client_secret = clientSecret; - } - const query = new URLSearchParams(refreshParams); - return tracingClient.withSpan("IdentityClient.refreshAccessToken", options, async (updatedOptions) => { - try { - const urlSuffix = getIdentityTokenEndpointSuffix(tenantId); - const request = coreRestPipeline.createPipelineRequest({ - url: `${this.authorityHost}/${tenantId}/${urlSuffix}`, - method: "POST", - body: query.toString(), - abortSignal: options.abortSignal, - headers: coreRestPipeline.createHttpHeaders({ - Accept: "application/json", - "Content-Type": "application/x-www-form-urlencoded", - }), - tracingOptions: updatedOptions.tracingOptions, - }); - const response = await this.sendTokenRequest(request); - logger$n.info(`IdentityClient: refreshed token for client ID: ${clientId}`); - return response; - } - catch (err) { - if (err.name === AuthenticationErrorName && - err.errorResponse.error === "interaction_required") { - // It's likely that the refresh token has expired, so - // return null so that the credential implementation will - // initiate the authentication flow again. - logger$n.info(`IdentityClient: interaction required for client ID: ${clientId}`); - return null; - } - else { - logger$n.warning(`IdentityClient: failed refreshing token for client ID: ${clientId}: ${err}`); - throw err; - } - } - }); - } - // Here is a custom layer that allows us to abort requests that go through MSAL, - // since MSAL doesn't allow us to pass options all the way through. - generateAbortSignal(correlationId) { - const controller = new abortController.AbortController(); - const controllers = this.abortControllers.get(correlationId) || []; - controllers.push(controller); - this.abortControllers.set(correlationId, controllers); - const existingOnAbort = controller.signal.onabort; - controller.signal.onabort = (...params) => { - this.abortControllers.set(correlationId, undefined); - if (existingOnAbort) { - existingOnAbort(...params); - } - }; - return controller.signal; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); } - abortRequests(correlationId) { - const key = correlationId || noCorrelationId; - const controllers = [ - ...(this.abortControllers.get(key) || []), - // MSAL passes no correlation ID to the get requests... - ...(this.abortControllers.get(noCorrelationId) || []), - ]; - if (!controllers.length) { - return; - } - for (const controller of controllers) { - controller.abort(); - } - this.abortControllers.set(key, undefined); +} +function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; } - getCorrelationId(options) { - var _a; - const parameter = (_a = options === null || options === void 0 ? void 0 : options.body) === null || _a === void 0 ? void 0 : _a.split("&").map((part) => part.split("=")).find(([key]) => key === "client-request-id"); - return parameter && parameter.length ? parameter[1] || noCorrelationId : noCorrelationId; + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; } - // The MSAL network module methods follow - async sendGetRequestAsync(url, options) { - const request = coreRestPipeline.createPipelineRequest({ - url, - method: "GET", - body: options === null || options === void 0 ? void 0 : options.body, - headers: coreRestPipeline.createHttpHeaders(options === null || options === void 0 ? void 0 : options.headers), - abortSignal: this.generateAbortSignal(noCorrelationId), - }); - const response = await this.sendRequest(request); - this.logIdentifiers(response); - return { - body: response.bodyAsText ? JSON.parse(response.bodyAsText) : undefined, - headers: response.headers.toJSON(), - status: response.status, + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); + } } - async sendPostRequestAsync(url, options) { - const request = coreRestPipeline.createPipelineRequest({ - url, - method: "POST", - body: options === null || options === void 0 ? void 0 : options.body, - headers: coreRestPipeline.createHttpHeaders(options === null || options === void 0 ? void 0 : options.headers), - // MSAL doesn't send the correlation ID on the get requests. - abortSignal: this.generateAbortSignal(this.getCorrelationId(options)), - }); - const response = await this.sendRequest(request); - this.logIdentifiers(response); - return { - body: response.bodyAsText ? JSON.parse(response.bodyAsText) : undefined, - headers: response.headers.toJSON(), - status: response.status, - }; + return orProperties; +} +function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); } - /** - * - * @internal - */ - getTokenCredentialOptions() { - return this.tokenCredentialOptions; + else { + return name.content; } - /** - * If allowLoggingAccountIdentifiers was set on the constructor options - * we try to log the account identifiers by parsing the received access token. - * - * The account identifiers we try to log are: - * - `appid`: The application or Client Identifier. - * - `upn`: User Principal Name. - * - It might not be available in some authentication scenarios. - * - If it's not available, we put a placeholder: "No User Principal Name available". - * - `tid`: Tenant Identifier. - * - `oid`: Object Identifier of the authenticated user. - */ - logIdentifiers(response) { - if (!this.allowLoggingAccountIdentifiers || !response.bodyAsText) { - return; - } - const unavailableUpn = "No User Principal Name available"; - try { - const parsed = response.parsedBody || JSON.parse(response.bodyAsText); - const accessToken = parsed.access_token; - if (!accessToken) { - // Without an access token allowLoggingAccountIdentifiers isn't useful. - return; - } - const base64Metadata = accessToken.split(".")[1]; - const { appid, upn, tid, oid } = JSON.parse(Buffer.from(base64Metadata, "base64").toString("utf8")); - logger$n.info(`[Authenticated account] Client ID: ${appid}. Tenant ID: ${tid}. User Principal Name: ${upn || unavailableUpn}. Object ID (user): ${oid}`); +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; } - catch (e) { - logger$n.warning("allowLoggingAccountIdentifiers was set, but we couldn't log the account information. Error:", e.message); + else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; } } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + } } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helps specify a regional authority, or "AutoDiscoverRegion" to auto-detect the region. - */ -var RegionalAuthority; -(function (RegionalAuthority) { - /** Instructs MSAL to attempt to discover the region */ - RegionalAuthority["AutoDiscoverRegion"] = "AutoDiscoverRegion"; - /** Uses the {@link RegionalAuthority} for the Azure 'westus' region. */ - RegionalAuthority["USWest"] = "westus"; - /** Uses the {@link RegionalAuthority} for the Azure 'westus2' region. */ - RegionalAuthority["USWest2"] = "westus2"; - /** Uses the {@link RegionalAuthority} for the Azure 'centralus' region. */ - RegionalAuthority["USCentral"] = "centralus"; - /** Uses the {@link RegionalAuthority} for the Azure 'eastus' region. */ - RegionalAuthority["USEast"] = "eastus"; - /** Uses the {@link RegionalAuthority} for the Azure 'eastus2' region. */ - RegionalAuthority["USEast2"] = "eastus2"; - /** Uses the {@link RegionalAuthority} for the Azure 'northcentralus' region. */ - RegionalAuthority["USNorthCentral"] = "northcentralus"; - /** Uses the {@link RegionalAuthority} for the Azure 'southcentralus' region. */ - RegionalAuthority["USSouthCentral"] = "southcentralus"; - /** Uses the {@link RegionalAuthority} for the Azure 'westcentralus' region. */ - RegionalAuthority["USWestCentral"] = "westcentralus"; - /** Uses the {@link RegionalAuthority} for the Azure 'canadacentral' region. */ - RegionalAuthority["CanadaCentral"] = "canadacentral"; - /** Uses the {@link RegionalAuthority} for the Azure 'canadaeast' region. */ - RegionalAuthority["CanadaEast"] = "canadaeast"; - /** Uses the {@link RegionalAuthority} for the Azure 'brazilsouth' region. */ - RegionalAuthority["BrazilSouth"] = "brazilsouth"; - /** Uses the {@link RegionalAuthority} for the Azure 'northeurope' region. */ - RegionalAuthority["EuropeNorth"] = "northeurope"; - /** Uses the {@link RegionalAuthority} for the Azure 'westeurope' region. */ - RegionalAuthority["EuropeWest"] = "westeurope"; - /** Uses the {@link RegionalAuthority} for the Azure 'uksouth' region. */ - RegionalAuthority["UKSouth"] = "uksouth"; - /** Uses the {@link RegionalAuthority} for the Azure 'ukwest' region. */ - RegionalAuthority["UKWest"] = "ukwest"; - /** Uses the {@link RegionalAuthority} for the Azure 'francecentral' region. */ - RegionalAuthority["FranceCentral"] = "francecentral"; - /** Uses the {@link RegionalAuthority} for the Azure 'francesouth' region. */ - RegionalAuthority["FranceSouth"] = "francesouth"; - /** Uses the {@link RegionalAuthority} for the Azure 'switzerlandnorth' region. */ - RegionalAuthority["SwitzerlandNorth"] = "switzerlandnorth"; - /** Uses the {@link RegionalAuthority} for the Azure 'switzerlandwest' region. */ - RegionalAuthority["SwitzerlandWest"] = "switzerlandwest"; - /** Uses the {@link RegionalAuthority} for the Azure 'germanynorth' region. */ - RegionalAuthority["GermanyNorth"] = "germanynorth"; - /** Uses the {@link RegionalAuthority} for the Azure 'germanywestcentral' region. */ - RegionalAuthority["GermanyWestCentral"] = "germanywestcentral"; - /** Uses the {@link RegionalAuthority} for the Azure 'norwaywest' region. */ - RegionalAuthority["NorwayWest"] = "norwaywest"; - /** Uses the {@link RegionalAuthority} for the Azure 'norwayeast' region. */ - RegionalAuthority["NorwayEast"] = "norwayeast"; - /** Uses the {@link RegionalAuthority} for the Azure 'eastasia' region. */ - RegionalAuthority["AsiaEast"] = "eastasia"; - /** Uses the {@link RegionalAuthority} for the Azure 'southeastasia' region. */ - RegionalAuthority["AsiaSouthEast"] = "southeastasia"; - /** Uses the {@link RegionalAuthority} for the Azure 'japaneast' region. */ - RegionalAuthority["JapanEast"] = "japaneast"; - /** Uses the {@link RegionalAuthority} for the Azure 'japanwest' region. */ - RegionalAuthority["JapanWest"] = "japanwest"; - /** Uses the {@link RegionalAuthority} for the Azure 'australiaeast' region. */ - RegionalAuthority["AustraliaEast"] = "australiaeast"; - /** Uses the {@link RegionalAuthority} for the Azure 'australiasoutheast' region. */ - RegionalAuthority["AustraliaSouthEast"] = "australiasoutheast"; - /** Uses the {@link RegionalAuthority} for the Azure 'australiacentral' region. */ - RegionalAuthority["AustraliaCentral"] = "australiacentral"; - /** Uses the {@link RegionalAuthority} for the Azure 'australiacentral2' region. */ - RegionalAuthority["AustraliaCentral2"] = "australiacentral2"; - /** Uses the {@link RegionalAuthority} for the Azure 'centralindia' region. */ - RegionalAuthority["IndiaCentral"] = "centralindia"; - /** Uses the {@link RegionalAuthority} for the Azure 'southindia' region. */ - RegionalAuthority["IndiaSouth"] = "southindia"; - /** Uses the {@link RegionalAuthority} for the Azure 'westindia' region. */ - RegionalAuthority["IndiaWest"] = "westindia"; - /** Uses the {@link RegionalAuthority} for the Azure 'koreasouth' region. */ - RegionalAuthority["KoreaSouth"] = "koreasouth"; - /** Uses the {@link RegionalAuthority} for the Azure 'koreacentral' region. */ - RegionalAuthority["KoreaCentral"] = "koreacentral"; - /** Uses the {@link RegionalAuthority} for the Azure 'uaecentral' region. */ - RegionalAuthority["UAECentral"] = "uaecentral"; - /** Uses the {@link RegionalAuthority} for the Azure 'uaenorth' region. */ - RegionalAuthority["UAENorth"] = "uaenorth"; - /** Uses the {@link RegionalAuthority} for the Azure 'southafricanorth' region. */ - RegionalAuthority["SouthAfricaNorth"] = "southafricanorth"; - /** Uses the {@link RegionalAuthority} for the Azure 'southafricawest' region. */ - RegionalAuthority["SouthAfricaWest"] = "southafricawest"; - /** Uses the {@link RegionalAuthority} for the Azure 'chinanorth' region. */ - RegionalAuthority["ChinaNorth"] = "chinanorth"; - /** Uses the {@link RegionalAuthority} for the Azure 'chinaeast' region. */ - RegionalAuthority["ChinaEast"] = "chinaeast"; - /** Uses the {@link RegionalAuthority} for the Azure 'chinanorth2' region. */ - RegionalAuthority["ChinaNorth2"] = "chinanorth2"; - /** Uses the {@link RegionalAuthority} for the Azure 'chinaeast2' region. */ - RegionalAuthority["ChinaEast2"] = "chinaeast2"; - /** Uses the {@link RegionalAuthority} for the Azure 'germanycentral' region. */ - RegionalAuthority["GermanyCentral"] = "germanycentral"; - /** Uses the {@link RegionalAuthority} for the Azure 'germanynortheast' region. */ - RegionalAuthority["GermanyNorthEast"] = "germanynortheast"; - /** Uses the {@link RegionalAuthority} for the Azure 'usgovvirginia' region. */ - RegionalAuthority["GovernmentUSVirginia"] = "usgovvirginia"; - /** Uses the {@link RegionalAuthority} for the Azure 'usgoviowa' region. */ - RegionalAuthority["GovernmentUSIowa"] = "usgoviowa"; - /** Uses the {@link RegionalAuthority} for the Azure 'usgovarizona' region. */ - RegionalAuthority["GovernmentUSArizona"] = "usgovarizona"; - /** Uses the {@link RegionalAuthority} for the Azure 'usgovtexas' region. */ - RegionalAuthority["GovernmentUSTexas"] = "usgovtexas"; - /** Uses the {@link RegionalAuthority} for the Azure 'usdodeast' region. */ - RegionalAuthority["GovernmentUSDodEast"] = "usdodeast"; - /** Uses the {@link RegionalAuthority} for the Azure 'usdodcentral' region. */ - RegionalAuthority["GovernmentUSDodCentral"] = "usdodcentral"; -})(RegionalAuthority || (RegionalAuthority = {})); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The current persistence provider, undefined by default. - * @internal - */ -let persistenceProvider = undefined; /** - * An object that allows setting the persistence provider. - * @internal + * Escape the blobName but keep path separator ('/'). */ -const msalNodeFlowCacheControl = { - setPersistence(pluginProvider) { - persistenceProvider = pluginProvider; - }, -}; +function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); + } + return split.join("/"); +} /** - * The current native broker provider, undefined by default. - * @internal + * A typesafe helper for ensuring that a given response object has + * the original _response attached. + * @param response - A response object from calling a client operation + * @returns The same object, but with known _response property */ -let nativeBrokerInfo = undefined; -function hasNativeBroker() { - return nativeBrokerInfo !== undefined; +function assertResponse(response) { + if (`_response` in response) { + return response; + } + throw new TypeError(`Unexpected response object ${response}`); } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * An object that allows setting the native broker provider. - * @internal + * RetryPolicy types. */ -const msalNodeFlowNativeBrokerControl = { - setNativeBroker(broker) { - nativeBrokerInfo = { - broker, - }; - }, +exports.StorageRetryPolicyType = void 0; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS$1 = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy }; +const RETRY_ABORT_ERROR$1 = new abortController.AbortError("The operation was aborted."); /** - * MSAL partial base client for Node.js. - * - * It completes the input configuration with some default values. - * It also provides with utility protected methods that can be used from any of the clients, - * which includes handlers for successful responses and errors. - * - * @internal + * Retry policy with exponential retry and linear retry implemented. */ -class MsalNode extends MsalBaseUtilities { - constructor(options) { - var _a, _b, _c, _d, _e, _f, _g; - super(options); - this.app = {}; - this.caeApp = {}; - this.requiresConfidential = false; - this.msalConfig = this.defaultNodeMsalConfig(options); - this.tenantId = resolveTenantId(options.logger, options.tenantId, options.clientId); - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds((_a = options === null || options === void 0 ? void 0 : options.tokenCredentialOptions) === null || _a === void 0 ? void 0 : _a.additionallyAllowedTenants); - this.clientId = this.msalConfig.auth.clientId; - if (options === null || options === void 0 ? void 0 : options.getAssertion) { - this.getAssertion = options.getAssertion; - } - this.enableBroker = (_b = options === null || options === void 0 ? void 0 : options.brokerOptions) === null || _b === void 0 ? void 0 : _b.enabled; - this.enableMsaPassthrough = (_c = options === null || options === void 0 ? void 0 : options.brokerOptions) === null || _c === void 0 ? void 0 : _c.legacyEnableMsaPassthrough; - this.parentWindowHandle = (_d = options.brokerOptions) === null || _d === void 0 ? void 0 : _d.parentWindowHandle; - // If persistence has been configured - if (persistenceProvider !== undefined && ((_e = options.tokenCachePersistenceOptions) === null || _e === void 0 ? void 0 : _e.enabled)) { - const nonCaeOptions = Object.assign({ name: `${options.tokenCachePersistenceOptions.name}.${CACHE_NON_CAE_SUFFIX}` }, options.tokenCachePersistenceOptions); - const caeOptions = Object.assign({ name: `${options.tokenCachePersistenceOptions.name}.${CACHE_CAE_SUFFIX}` }, options.tokenCachePersistenceOptions); - this.createCachePlugin = () => persistenceProvider(nonCaeOptions); - this.createCachePluginCae = () => persistenceProvider(caeOptions); - } - else if ((_f = options.tokenCachePersistenceOptions) === null || _f === void 0 ? void 0 : _f.enabled) { - throw new Error([ - "Persistent token caching was requested, but no persistence provider was configured.", - "You must install the identity-cache-persistence plugin package (`npm install --save @azure/identity-cache-persistence`)", - "and enable it by importing `useIdentityPlugin` from `@azure/identity` and calling", - "`useIdentityPlugin(cachePersistencePlugin)` before using `tokenCachePersistenceOptions`.", - ].join(" ")); - } - // If broker has not been configured - if (!hasNativeBroker() && this.enableBroker) { - throw new Error([ - "Broker for WAM was requested to be enabled, but no native broker was configured.", - "You must install the identity-broker plugin package (`npm install --save @azure/identity-broker`)", - "and enable it by importing `useIdentityPlugin` from `@azure/identity` and calling", - "`useIdentityPlugin(createNativeBrokerPlugin())` before using `enableBroker`.", - ].join(" ")); - } - this.azureRegion = (_g = options.regionalAuthority) !== null && _g !== void 0 ? _g : process.env.AZURE_REGIONAL_AUTHORITY_NAME; - if (this.azureRegion === RegionalAuthority.AutoDiscoverRegion) { - this.azureRegion = "AUTO_DISCOVER"; - } - } +class StorageRetryPolicy extends BaseRequestPolicy { /** - * Generates a MSAL configuration that generally works for Node.js + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - */ - defaultNodeMsalConfig(options) { - var _a; - const clientId = options.clientId || DeveloperSignOnClientId; - const tenantId = resolveTenantId(options.logger, options.tenantId, options.clientId); - this.authorityHost = options.authorityHost || process.env.AZURE_AUTHORITY_HOST; - const authority = getAuthority(tenantId, this.authorityHost); - this.identityClient = new IdentityClient(Object.assign(Object.assign({}, options.tokenCredentialOptions), { authorityHost: authority, loggingOptions: options.loggingOptions })); - const clientCapabilities = []; - return { - auth: { - clientId, - authority, - knownAuthorities: getKnownAuthorities(tenantId, authority, options.disableInstanceDiscovery), - clientCapabilities, - }, - // Cache is defined in this.prepare(); - system: { - networkClient: this.identityClient, - loggerOptions: { - loggerCallback: defaultLoggerCallback(options.logger), - logLevel: getMSALLogLevel(logger$o.getLogLevel()), - piiLoggingEnabled: (_a = options.loggingOptions) === null || _a === void 0 ? void 0 : _a.enableUnsafeSupportLogging, - }, - }, + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS$1) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS$1.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS$1.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS$1.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS$1.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS$1.secondaryHost, }; } - getApp(appType, enableCae) { - const app = enableCae ? this.caeApp : this.app; - if (appType === "publicFirst") { - return (app.public || app.confidential); - } - else if (appType === "confidentialFirst") { - return (app.confidential || app.public); - } - else if (appType === "confidential") { - return app.confidential; - } - else { - return app.public; - } - } - /** - * Prepares the MSAL applications. - */ - async init(options) { - if (options === null || options === void 0 ? void 0 : options.abortSignal) { - options.abortSignal.addEventListener("abort", () => { - // This will abort any pending request in the IdentityClient, - // based on the received or generated correlationId - this.identityClient.abortRequests(options.correlationId); - }); - } - const app = (options === null || options === void 0 ? void 0 : options.enableCae) ? this.caeApp : this.app; - if (options === null || options === void 0 ? void 0 : options.enableCae) { - this.msalConfig.auth.clientCapabilities = ["cp1"]; - } - if (app.public || app.confidential) { - return; - } - if ((options === null || options === void 0 ? void 0 : options.enableCae) && this.createCachePluginCae !== undefined) { - this.msalConfig.cache = { - cachePlugin: await this.createCachePluginCae(), - }; - } - if (this.createCachePlugin !== undefined) { - this.msalConfig.cache = { - cachePlugin: await this.createCachePlugin(), - }; - } - if (hasNativeBroker() && this.enableBroker) { - this.msalConfig.broker = { - nativeBrokerPlugin: nativeBrokerInfo.broker, - }; - if (!this.parentWindowHandle) { - // error should have been thrown from within the constructor of InteractiveBrowserCredential - this.logger.warning("Parent window handle is not specified for the broker. This may cause unexpected behavior. Please provide the parentWindowHandle."); - } - } - if (options === null || options === void 0 ? void 0 : options.enableCae) { - this.caeApp.public = new msalCommon__namespace.PublicClientApplication(this.msalConfig); - } - else { - this.app.public = new msalCommon__namespace.PublicClientApplication(this.msalConfig); - } - if (this.getAssertion) { - this.msalConfig.auth.clientAssertion = await this.getAssertion(); - } - // The confidential client requires either a secret, assertion or certificate. - if (this.msalConfig.auth.clientSecret || - this.msalConfig.auth.clientAssertion || - this.msalConfig.auth.clientCertificate) { - if (options === null || options === void 0 ? void 0 : options.enableCae) { - this.caeApp.confidential = new msalCommon__namespace.ConfidentialClientApplication(this.msalConfig); - } - else { - this.app.confidential = new msalCommon__namespace.ConfidentialClientApplication(this.msalConfig); - } - } - else { - if (this.requiresConfidential) { - throw new Error("Unable to generate the MSAL confidential client. Missing either the client's secret, certificate or assertion."); - } - } - } /** - * Allows the cancellation of a MSAL request. + * Sends request. + * + * @param request - */ - withCancellation(promise, abortSignal, onCancel) { - return new Promise((resolve, reject) => { - promise - .then((msalToken) => { - return resolve(msalToken); - }) - .catch(reject); - if (abortSignal) { - abortSignal.addEventListener("abort", () => { - onCancel === null || onCancel === void 0 ? void 0 : onCancel(); - }); - } - }); + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); } /** - * Returns the existing account, attempts to load the account from MSAL. + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. */ - async getActiveAccount(enableCae = false) { - if (this.account) { - return this.account; + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); } - const cache = this.getApp("confidentialFirst", enableCae).getTokenCache(); - const accountsByTenant = await (cache === null || cache === void 0 ? void 0 : cache.getAllAccounts()); - if (!accountsByTenant) { - return; + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); } - if (accountsByTenant.length === 1) { - this.account = msalToPublic(this.clientId, accountsByTenant[0]); + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); } - else { - this.logger - .info(`More than one account was found authenticated for this Client ID and Tenant ID. -However, no "authenticationRecord" has been provided for this credential, -therefore we're unable to pick between these accounts. -A new login attempt will be requested, to ensure the correct account is picked. -To work with multiple accounts for the same Client ID and Tenant ID, please provide an "authenticationRecord" when initializing a credential to prevent this from happening.`); - return; + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } } - return this.account; + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); } /** - * Attempts to retrieve a token from cache. + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - */ - async getTokenSilent(scopes, options) { - var _a, _b, _c; - await this.getActiveAccount(options === null || options === void 0 ? void 0 : options.enableCae); - if (!this.account) { - throw new AuthenticationRequiredError({ - scopes, - getTokenOptions: options, - message: "Silent authentication failed. We couldn't retrieve an active account from the cache.", - }); + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; } - const silentRequest = { - // To be able to re-use the account, the Token Cache must also have been provided. - account: publicToMsal(this.account), - correlationId: options === null || options === void 0 ? void 0 : options.correlationId, - scopes, - authority: options === null || options === void 0 ? void 0 : options.authority, - claims: options === null || options === void 0 ? void 0 : options.claims, - }; - if (hasNativeBroker() && this.enableBroker) { - if (!silentRequest.tokenQueryParameters) { - silentRequest.tokenQueryParameters = {}; + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } } - if (!this.parentWindowHandle) { - // error should have been thrown from within the constructor of InteractiveBrowserCredential - this.logger.warning("Parent window handle is not specified for the broker. This may cause unexpected behavior. Please provide the parentWindowHandle."); + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; } - if (this.enableMsaPassthrough) { - silentRequest.tokenQueryParameters["msal_request_type"] = "consumer_passthrough"; + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; } } - try { - this.logger.info("Attempting to acquire token silently"); - /** - * The following code to retrieve all accounts is done as a workaround in an attempt to force the - * refresh of the token cache with the token and the account passed in through the - * `authenticationRecord` parameter. See issue - https://github.com/Azure/azure-sdk-for-js/issues/24349#issuecomment-1496715651 - * This workaround serves as a workaround for silent authentication not happening when authenticationRecord is passed. - */ - await ((_a = this.getApp("publicFirst", options === null || options === void 0 ? void 0 : options.enableCae)) === null || _a === void 0 ? void 0 : _a.getTokenCache().getAllAccounts()); - const response = (_c = (await ((_b = this.getApp("confidential", options === null || options === void 0 ? void 0 : options.enableCae)) === null || _b === void 0 ? void 0 : _b.acquireTokenSilent(silentRequest)))) !== null && _c !== void 0 ? _c : (await this.getApp("public", options === null || options === void 0 ? void 0 : options.enableCae).acquireTokenSilent(silentRequest)); - return this.handleResult(scopes, this.clientId, response || undefined); - } - catch (err) { - throw this.handleError(scopes, err, options); + // [Copy source error code] Feature is pending on service side, skip retry on copy source error for now. + // if (response) { + // // Retry select Copy Source Error Codes. + // if (response?.status >= 400) { + // const copySourceError = response.headers.get(HeaderConstants.X_MS_CopySourceErrorCode); + // if (copySourceError !== undefined) { + // switch (copySourceError) { + // case "InternalError": + // case "OperationTimedOut": + // case "ServerBusy": + // return true; + // } + // } + // } + // } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; } + return false; } /** - * Wrapper around each MSAL flow get token operation: doGetToken. - * If disableAutomaticAuthentication is sent through the constructor, it will prevent MSAL from requesting the user input. + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - */ - async getToken(scopes, options = {}) { - const tenantId = processMultiTenantRequest(this.tenantId, options, this.additionallyAllowedTenantIds) || - this.tenantId; - options.authority = getAuthority(tenantId, this.authorityHost); - options.correlationId = (options === null || options === void 0 ? void 0 : options.correlationId) || this.generateUuid(); - await this.init(options); - try { - // MSAL now caches tokens based on their claims, - // so now one has to keep track fo claims in order to retrieve the newer tokens from acquireTokenSilent - // This update happened on PR: https://github.com/AzureAD/microsoft-authentication-library-for-js/pull/4533 - const optionsClaims = options.claims; - if (optionsClaims) { - this.cachedClaims = optionsClaims; - } - if (this.cachedClaims && !optionsClaims) { - options.claims = this.cachedClaims; + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; } - // We don't return the promise since we want to catch errors right here. - return await this.getTokenSilent(scopes, options); } - catch (err) { - if (err.name !== "AuthenticationRequiredError") { - throw err; - } - if (options === null || options === void 0 ? void 0 : options.disableAutomaticAuthentication) { - throw new AuthenticationRequiredError({ - scopes, - getTokenOptions: options, - message: "Automatic authentication has been disabled. You may call the authentication() method.", - }); - } - this.logger.info(`Silent authentication failed, falling back to interactive method.`); - return this.doGetToken(scopes, options); + else { + delayTimeInMs = Math.random() * 1000; } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR$1); } } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const CommonTenantId = "common"; -const AzureAccountClientId = "aebc6443-996d-45c2-90f0-388ff96faa56"; // VSC: 'aebc6443-996d-45c2-90f0-388ff96faa56' -const logger$m = credentialLogger("VisualStudioCodeCredential"); -let findCredentials = undefined; -const vsCodeCredentialControl = { - setVsCodeCredentialFinder(finder) { - findCredentials = finder; - }, -}; -// Map of unsupported Tenant IDs and the errors we will be throwing. -const unsupportedTenantIds = { - adfs: "The VisualStudioCodeCredential does not support authentication with ADFS tenants.", -}; -function checkUnsupportedTenant(tenantId) { - // If the Tenant ID isn't supported, we throw. - const unsupportedTenantError = unsupportedTenantIds[tenantId]; - if (unsupportedTenantError) { - throw new CredentialUnavailableError(unsupportedTenantError); - } -} -const mapVSCodeAuthorityHosts = { - AzureCloud: exports.AzureAuthorityHosts.AzurePublicCloud, - AzureChina: exports.AzureAuthorityHosts.AzureChina, - AzureGermanCloud: exports.AzureAuthorityHosts.AzureGermany, - AzureUSGovernment: exports.AzureAuthorityHosts.AzureGovernment, -}; +// Licensed under the MIT License. /** - * Attempts to load a specific property from the VSCode configurations of the current OS. - * If it fails at any point, returns undefined. + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. */ -function getPropertyFromVSCode(property) { - const settingsPath = ["User", "settings.json"]; - // Eventually we can add more folders for more versions of VSCode. - const vsCodeFolder = "Code"; - const homedir = os.homedir(); - function loadProperty(...pathSegments) { - const fullPath = path.join(...pathSegments, vsCodeFolder, ...settingsPath); - const settings = JSON.parse(fs.readFileSync(fullPath, { encoding: "utf8" })); - return settings[property]; - } - try { - let appData; - switch (process.platform) { - case "win32": - appData = process.env.APPDATA; - return appData ? loadProperty(appData) : undefined; - case "darwin": - return loadProperty(homedir, "Library", "Application Support"); - case "linux": - return loadProperty(homedir, ".config"); - default: - return; - } +class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; } - catch (e) { - logger$m.info(`Failed to load the Visual Studio Code configuration file. Error: ${e.message}`); - return; + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); } } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Connects to Azure using the credential provided by the VSCode extension 'Azure Account'. - * Once the user has logged in via the extension, this credential can share the same refresh token - * that is cached by the extension. - * - * It's a [known issue](https://github.com/Azure/azure-sdk-for-js/issues/20500) that this credential doesn't - * work with [Azure Account extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode.azure-account) - * versions newer than **0.9.11**. A long-term fix to this problem is in progress. In the meantime, consider - * authenticating with {@link AzureCliCredential}. + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. */ -class VisualStudioCodeCredential { +class CredentialPolicy extends BaseRequestPolicy { /** - * Creates an instance of VisualStudioCodeCredential to use for automatically authenticating via VSCode. + * Sends out request. * - * **Note**: `VisualStudioCodeCredential` is provided by a plugin package: - * `@azure/identity-vscode`. If this package is not installed and registered - * using the plugin API (`useIdentityPlugin`), then authentication using - * `VisualStudioCodeCredential` will not be available. + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. * - * @param options - Options for configuring the client which makes the authentication request. + * @param request - */ - constructor(options) { - // We want to make sure we use the one assigned by the user on the VSCode settings. - // Or just `AzureCloud` by default. - this.cloudName = (getPropertyFromVSCode("azure.cloud") || "AzureCloud"); - // Picking an authority host based on the cloud name. - const authorityHost = mapVSCodeAuthorityHosts[this.cloudName]; - this.identityClient = new IdentityClient(Object.assign({ authorityHost }, options)); - if (options && options.tenantId) { - checkTenantId(logger$m, options.tenantId); - this.tenantId = options.tenantId; + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/* + * We need to imitate .Net culture-aware sorting, which is used in storage service. + * Below tables contain sort-keys for en-US culture. + */ +const table_lv0 = new Uint32Array([ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71c, 0x0, 0x71f, 0x721, + 0x723, 0x725, 0x0, 0x0, 0x0, 0x72d, 0x803, 0x0, 0x0, 0x733, 0x0, 0xd03, 0xd1a, 0xd1c, 0xd1e, + 0xd20, 0xd22, 0xd24, 0xd26, 0xd28, 0xd2a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe02, 0xe09, 0xe0a, + 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, 0xe7c, 0xe7e, 0xe89, + 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x0, 0x0, 0x743, 0x744, 0x748, + 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, + 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x74c, + 0x0, 0x750, 0x0, +]); +const table_lv2 = new Uint32Array([ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, + 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, + 0x12, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +]); +const table_lv4 = new Uint32Array([ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x8012, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8212, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +]); +function compareHeader(lhs, rhs) { + if (isLessThan(lhs, rhs)) + return -1; + return 1; +} +function isLessThan(lhs, rhs) { + const tables = [table_lv0, table_lv2, table_lv4]; + let curr_level = 0; + let i = 0; + let j = 0; + while (curr_level < tables.length) { + if (curr_level === tables.length - 1 && i !== j) { + return i > j; + } + const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 0x1; + const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 0x1; + if (weight1 === 0x1 && weight2 === 0x1) { + i = 0; + j = 0; + ++curr_level; + } + else if (weight1 === weight2) { + ++i; + ++j; + } + else if (weight1 === 0) { + ++i; + } + else if (weight2 === 0) { + ++j; } else { - this.tenantId = CommonTenantId; + return weight1 < weight2; } - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - checkUnsupportedTenant(this.tenantId); } + return false; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +class StorageSharedKeyCredentialPolicy extends CredentialPolicy { /** - * Runs preparations for any further getToken request. + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - */ - async prepare() { - // Attempts to load the tenant from the VSCode configuration file. - const settingsTenant = getPropertyFromVSCode("azure.tenant"); - if (settingsTenant) { - this.tenantId = settingsTenant; - } - checkUnsupportedTenant(this.tenantId); + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; } /** - * Runs preparations for any further getToken, but only once. + * Signs request. + * + * @param request - */ - prepareOnce() { - if (!this.preparePromise) { - this.preparePromise = this.prepare(); + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } - return this.preparePromise; + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; } /** - * Returns the token found by searching VSCode's authentication cache or - * returns null if no token could be found. + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * `TokenCredential` implementation might make. + * @param request - + * @param headerName - */ - async getToken(scopes, options) { - var _a, _b; - await this.prepareOnce(); - const tenantId = processMultiTenantRequest(this.tenantId, options, this.additionallyAllowedTenantIds, logger$m) || this.tenantId; - if (findCredentials === undefined) { - throw new CredentialUnavailableError([ - "No implementation of `VisualStudioCodeCredential` is available.", - "You must install the identity-vscode plugin package (`npm install --save-dev @azure/identity-vscode`)", - "and enable it by importing `useIdentityPlugin` from `@azure/identity` and calling", - "`useIdentityPlugin(vsCodePlugin)` before creating a `VisualStudioCodeCredential`.", - "To troubleshoot, visit https://aka.ms/azsdk/js/identity/vscodecredential/troubleshoot.", - ].join(" ")); - } - let scopeString = typeof scopes === "string" ? scopes : scopes.join(" "); - // Check to make sure the scope we get back is a valid scope - if (!scopeString.match(/^[0-9a-zA-Z-.:/]+$/)) { - const error = new Error("Invalid scope was specified by the user or calling client"); - logger$m.getToken.info(formatError(scopes, error)); - throw error; + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; } - if (scopeString.indexOf("offline_access") < 0) { - scopeString += " offline_access"; + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; } - // findCredentials returns an array similar to: - // [ - // { - // account: "", - // password: "", - // }, - // /* ... */ - // ] - const credentials = await findCredentials(); - // If we can't find the credential based on the name, we'll pick the first one available. - const { password: refreshToken } = (_b = (_a = credentials.find(({ account }) => account === this.cloudName)) !== null && _a !== void 0 ? _a : credentials[0]) !== null && _b !== void 0 ? _b : {}; - if (refreshToken) { - const tokenResponse = await this.identityClient.refreshAccessToken(tenantId, AzureAccountClientId, scopeString, refreshToken, undefined); - if (tokenResponse) { - logger$m.getToken.info(formatSuccess(scopes)); - return tokenResponse.accessToken; + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; } - else { - const error = new CredentialUnavailableError("Could not retrieve the token associated with Visual Studio Code. Have you connected using the 'Azure Account' extension recently? To troubleshoot, visit https://aka.ms/azsdk/js/identity/vscodecredential/troubleshoot."); - logger$m.getToken.info(formatError(scopes, error)); - throw error; + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } - else { - const error = new CredentialUnavailableError("Could not retrieve the token associated with Visual Studio Code. Did you connect using the 'Azure Account' extension? To troubleshoot, visit https://aka.ms/azsdk/js/identity/vscodecredential/troubleshoot."); - logger$m.getToken.info(formatError(scopes, error)); - throw error; - } + return canonicalizedResourceString; } } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The context passed to an Identity plugin. This contains objects that - * plugins can use to set backend implementations. - * @internal - */ -const pluginContext = { - cachePluginControl: msalNodeFlowCacheControl, - nativeBrokerPluginControl: msalNodeFlowNativeBrokerControl, - vsCodeCredentialControl: vsCodeCredentialControl, -}; +// Licensed under the MIT License. /** - * Extend Azure Identity with additional functionality. Pass a plugin from - * a plugin package, such as: - * - * - `@azure/identity-cache-persistence`: provides persistent token caching - * - `@azure/identity-vscode`: provides the dependencies of - * `VisualStudioCodeCredential` and enables it - * - * Example: - * - * ```javascript - * import { cachePersistencePlugin } from "@azure/identity-cache-persistence"; - * - * import { useIdentityPlugin, DefaultAzureCredential } from "@azure/identity"; - * useIdentityPlugin(cachePersistencePlugin); - * - * // The plugin has the capability to extend `DefaultAzureCredential` and to - * // add middleware to the underlying credentials, such as persistence. - * const credential = new DefaultAzureCredential({ - * tokenCachePersistenceOptions: { - * enabled: true - * } - * }); - * ``` - * - * @param plugin - the plugin to register + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. */ -function useIdentityPlugin(plugin) { - plugin(pluginContext); +class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const msiName$6 = "ManagedIdentityCredential - AppServiceMSI 2017"; -const logger$l = credentialLogger(msiName$6); +// Licensed under the MIT License. /** - * Generates the options used on the request for an access token. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. */ -function prepareRequestOptions$5(scopes, clientId) { - const resource = mapScopesToResource(scopes); - if (!resource) { - throw new Error(`${msiName$6}: Multiple scopes are not supported.`); - } - const queryParameters = { - resource, - "api-version": "2017-09-01", - }; - if (clientId) { - queryParameters.clientid = clientId; +class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); } - const query = new URLSearchParams(queryParameters); - // This error should not bubble up, since we verify that this environment variable is defined in the isAvailable() method defined below. - if (!process.env.MSI_ENDPOINT) { - throw new Error(`${msiName$6}: Missing environment variable: MSI_ENDPOINT`); + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); } - if (!process.env.MSI_SECRET) { - throw new Error(`${msiName$6}: Missing environment variable: MSI_SECRET`); + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } - return { - url: `${process.env.MSI_ENDPOINT}?${query.toString()}`, - method: "GET", - headers: coreRestPipeline.createHttpHeaders({ - Accept: "application/json", - secret: process.env.MSI_SECRET, - }), - }; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Defines how to determine whether the Azure App Service MSI is available, and also how to retrieve a token from the Azure App Service MSI. + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). */ -const appServiceMsi2017 = { - name: "appServiceMsi2017", - async isAvailable({ scopes }) { - const resource = mapScopesToResource(scopes); - if (!resource) { - logger$l.info(`${msiName$6}: Unavailable. Multiple scopes are not supported.`); - return false; - } - const env = process.env; - const result = Boolean(env.MSI_ENDPOINT && env.MSI_SECRET); - if (!result) { - logger$l.info(`${msiName$6}: Unavailable. The environment variables needed are: MSI_ENDPOINT and MSI_SECRET.`); - } - return result; - }, - async getToken(configuration, getTokenOptions = {}) { - const { identityClient, scopes, clientId, resourceId } = configuration; - if (resourceId) { - logger$l.warning(`${msiName$6}: managed Identity by resource Id is not supported. Argument resourceId might be ignored by the service.`); - } - logger$l.info(`${msiName$6}: Using the endpoint and the secret coming form the environment variables: MSI_ENDPOINT=${process.env.MSI_ENDPOINT} and MSI_SECRET=[REDACTED].`); - const request = coreRestPipeline.createPipelineRequest(Object.assign(Object.assign({ abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions$5(scopes, clientId)), { - // Generally, MSI endpoints use the HTTP protocol, without transport layer security (TLS). - allowInsecureConnection: true })); - const tokenResponse = await identityClient.sendTokenRequest(request); - return (tokenResponse && tokenResponse.accessToken) || null; - }, -}; +class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } +} // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const msiName$5 = "ManagedIdentityCredential - CloudShellMSI"; -const logger$k = credentialLogger(msiName$5); +// Licensed under the MIT License. /** - * Generates the options used on the request for an access token. + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). */ -function prepareRequestOptions$4(scopes, clientId, resourceId) { - const resource = mapScopesToResource(scopes); - if (!resource) { - throw new Error(`${msiName$5}: Multiple scopes are not supported.`); - } - const body = { - resource, - }; - if (clientId) { - body.client_id = clientId; - } - if (resourceId) { - body.msi_res_id = resourceId; +class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); } - // This error should not bubble up, since we verify that this environment variable is defined in the isAvailable() method defined below. - if (!process.env.MSI_ENDPOINT) { - throw new Error(`${msiName$5}: Missing environment variable: MSI_ENDPOINT`); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +let _defaultHttpClient; +function getCachedDefaultHttpClient() { + if (!_defaultHttpClient) { + _defaultHttpClient = coreRestPipeline.createDefaultHttpClient(); } - const params = new URLSearchParams(body); - return { - url: process.env.MSI_ENDPOINT, - method: "POST", - body: params.toString(), - headers: coreRestPipeline.createHttpHeaders({ - Accept: "application/json", - Metadata: "true", - "Content-Type": "application/x-www-form-urlencoded", - }), - }; + return _defaultHttpClient; } -/** - * Defines how to determine whether the Azure Cloud Shell MSI is available, and also how to retrieve a token from the Azure Cloud Shell MSI. - * Since Azure Managed Identities aren't available in the Azure Cloud Shell, we log a warning for users that try to access cloud shell using user assigned identity. - */ -const cloudShellMsi = { - name: "cloudShellMsi", - async isAvailable({ scopes }) { - const resource = mapScopesToResource(scopes); - if (!resource) { - logger$k.info(`${msiName$5}: Unavailable. Multiple scopes are not supported.`); - return false; - } - const result = Boolean(process.env.MSI_ENDPOINT); - if (!result) { - logger$k.info(`${msiName$5}: Unavailable. The environment variable MSI_ENDPOINT is needed.`); - } - return result; - }, - async getToken(configuration, getTokenOptions = {}) { - const { identityClient, scopes, clientId, resourceId } = configuration; - if (clientId) { - logger$k.warning(`${msiName$5}: user-assigned identities not supported. The argument clientId might be ignored by the service.`); - } - if (resourceId) { - logger$k.warning(`${msiName$5}: user defined managed Identity by resource Id not supported. The argument resourceId might be ignored by the service.`); - } - logger$k.info(`${msiName$5}: Using the endpoint coming form the environment variable MSI_ENDPOINT = ${process.env.MSI_ENDPOINT}.`); - const request = coreRestPipeline.createPipelineRequest(Object.assign(Object.assign({ abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions$4(scopes, clientId, resourceId)), { - // Generally, MSI endpoints use the HTTP protocol, without transport layer security (TLS). - allowInsecureConnection: true })); - const tokenResponse = await identityClient.sendTokenRequest(request); - return (tokenResponse && tokenResponse.accessToken) || null; - }, -}; // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const msiName$4 = "ManagedIdentityCredential - IMDS"; -const logger$j = credentialLogger(msiName$4); +// Licensed under the MIT License. /** - * Generates the options used on the request for an access token. + * The programmatic identifier of the StorageBrowserPolicy. */ -function prepareRequestOptions$3(scopes, clientId, resourceId, options) { - var _a; - const resource = mapScopesToResource(scopes); - if (!resource) { - throw new Error(`${msiName$4}: Multiple scopes are not supported.`); - } - const { skipQuery, skipMetadataHeader } = options || {}; - let query = ""; - // Pod Identity will try to process this request even if the Metadata header is missing. - // We can exclude the request query to ensure no IMDS endpoint tries to process the ping request. - if (!skipQuery) { - const queryParameters = { - resource, - "api-version": imdsApiVersion, - }; - if (clientId) { - queryParameters.client_id = clientId; - } - if (resourceId) { - queryParameters.msi_res_id = resourceId; - } - const params = new URLSearchParams(queryParameters); - query = `?${params.toString()}`; - } - const url = new URL(imdsEndpointPath, (_a = process.env.AZURE_POD_IDENTITY_AUTHORITY_HOST) !== null && _a !== void 0 ? _a : imdsHost); - const rawHeaders = { - Accept: "application/json", - Metadata: "true", - }; - // Remove the Metadata header to invoke a request error from some IMDS endpoints. - if (skipMetadataHeader) { - delete rawHeaders.Metadata; - } +const storageBrowserPolicyName = "storageBrowserPolicy"; +/** + * storageBrowserPolicy is a policy used to prevent browsers from caching requests + * and to remove cookies and explicit content-length headers. + */ +function storageBrowserPolicy() { return { - // In this case, the `?` should be added in the "query" variable `skipQuery` is not set. - url: `${url}${query}`, - method: "GET", - headers: coreRestPipeline.createHttpHeaders(rawHeaders), + name: storageBrowserPolicyName, + async sendRequest(request, next) { + if (coreUtil.isNode) { + return next(request); + } + if (request.method === "GET" || request.method === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.delete(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.delete(HeaderConstants.CONTENT_LENGTH); + return next(request); + }, }; } -// 800ms -> 1600ms -> 3200ms -const imdsMsiRetryConfig = { - maxRetries: 3, - startDelayInMs: 800, - intervalIncrement: 2, + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Name of the {@link storageRetryPolicy} + */ +const storageRetryPolicyName = "storageRetryPolicy"; +/** + * RetryPolicy types. + */ +var StorageRetryPolicyType; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(StorageRetryPolicyType || (StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy }; +const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", +]; +const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); /** - * Defines how to determine whether the Azure IMDS MSI is available, and also how to retrieve a token from the Azure IMDS MSI. + * Retry policy with exponential retry and linear retry implemented. */ -const imdsMsi = { - name: "imdsMsi", - async isAvailable({ scopes, identityClient, clientId, resourceId, getTokenOptions = {}, }) { - const resource = mapScopesToResource(scopes); - if (!resource) { - logger$j.info(`${msiName$4}: Unavailable. Multiple scopes are not supported.`); +function storageRetryPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f; + const retryPolicyType = (_a = options.retryPolicyType) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = (_b = options.maxTries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = (_c = options.retryDelayInMs) !== null && _c !== void 0 ? _c : DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = (_d = options.maxRetryDelayInMs) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + function shouldRetry({ isPrimaryRetry, attempt, response, error, }) { + var _a, _b; + if (attempt >= maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); return false; } - // if the PodIdentityEndpoint environment variable was set no need to probe the endpoint, it can be assumed to exist - if (process.env.AZURE_POD_IDENTITY_AUTHORITY_HOST) { - return true; - } - if (!identityClient) { - throw new Error("Missing IdentityClient"); + if (error) { + for (const retriableError of retriableErrors) { + if (error.name.toUpperCase().includes(retriableError) || + error.message.toUpperCase().includes(retriableError) || + (error.code && error.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + if ((error === null || error === void 0 ? void 0 : error.code) === "PARSE_ERROR" && + (error === null || error === void 0 ? void 0 : error.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } } - const requestOptions = prepareRequestOptions$3(resource, clientId, resourceId, { - skipMetadataHeader: true, - skipQuery: true, - }); - return tracingClient.withSpan("ManagedIdentityCredential-pingImdsEndpoint", getTokenOptions, async (options) => { - var _a, _b; - requestOptions.tracingOptions = options.tracingOptions; - // Create a request with a timeout since we expect that - // not having a "Metadata" header should cause an error to be - // returned quickly from the endpoint, proving its availability. - const request = coreRestPipeline.createPipelineRequest(requestOptions); - // Default to 1000 if the default of 0 is used. - // Negative values can still be used to disable the timeout. - request.timeout = ((_a = options.requestOptions) === null || _a === void 0 ? void 0 : _a.timeout) || 1000; - // This MSI uses the imdsEndpoint to get the token, which only uses http:// - request.allowInsecureConnection = true; - let response; - try { - logger$j.info(`${msiName$4}: Pinging the Azure IMDS endpoint`); - response = await identityClient.sendRequest(request); + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || error) { + const statusCode = (_b = (_a = response === null || response === void 0 ? void 0 : response.status) !== null && _a !== void 0 ? _a : error === null || error === void 0 ? void 0 : error.statusCode) !== null && _b !== void 0 ? _b : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; } - catch (err) { - // If the request failed, or Node.js was unable to establish a connection, - // or the host was down, we'll assume the IMDS endpoint isn't available. - if (coreUtil.isError(err)) { - logger$j.verbose(`${msiName$4}: Caught error ${err.name}: ${err.message}`); - } - // This is a special case for Docker Desktop which responds with a 403 with a message that contains "A socket operation was attempted to an unreachable network" - // rather than just timing out, as expected. - logger$j.info(`${msiName$4}: The Azure IMDS endpoint is unavailable`); - return false; + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; } - if (response.status === 403) { - if ((_b = response.bodyAsText) === null || _b === void 0 ? void 0 : _b.includes("A socket operation was attempted to an unreachable network")) { - logger$j.info(`${msiName$4}: The Azure IMDS endpoint is unavailable`); - logger$j.info(`${msiName$4}: ${response.bodyAsText}`); - return false; - } + } + // [Copy source error code] Feature is pending on service side, skip retry on copy source error for now. + // if (response) { + // // Retry select Copy Source Error Codes. + // if (response?.status >= 400) { + // const copySourceError = response.headers.get(HeaderConstants.X_MS_CopySourceErrorCode); + // if (copySourceError !== undefined) { + // switch (copySourceError) { + // case "InternalError": + // case "OperationTimedOut": + // case "ServerBusy": + // return true; + // } + // } + // } + // } + return false; + } + function calculateDelay(isPrimaryRetry, attempt) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (retryPolicyType) { + case StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); + break; + case StorageRetryPolicyType.FIXED: + delayTimeInMs = retryDelayInMs; + break; } - // If we received any response, the endpoint is available - logger$j.info(`${msiName$4}: The Azure IMDS endpoint is available`); - return true; - }); - }, - async getToken(configuration, getTokenOptions = {}) { - const { identityClient, scopes, clientId, resourceId } = configuration; - if (process.env.AZURE_POD_IDENTITY_AUTHORITY_HOST) { - logger$j.info(`${msiName$4}: Using the Azure IMDS endpoint coming from the environment variable AZURE_POD_IDENTITY_AUTHORITY_HOST=${process.env.AZURE_POD_IDENTITY_AUTHORITY_HOST}.`); } else { - logger$j.info(`${msiName$4}: Using the default Azure IMDS endpoint ${imdsHost}.`); + delayTimeInMs = Math.random() * 1000; } - let nextDelayInMs = imdsMsiRetryConfig.startDelayInMs; - for (let retries = 0; retries < imdsMsiRetryConfig.maxRetries; retries++) { - try { - const request = coreRestPipeline.createPipelineRequest(Object.assign(Object.assign({ abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions$3(scopes, clientId, resourceId)), { allowInsecureConnection: true })); - const tokenResponse = await identityClient.sendTokenRequest(request); - return (tokenResponse && tokenResponse.accessToken) || null; - } - catch (error) { - if (error.statusCode === 404) { - await coreUtil.delay(nextDelayInMs); - nextDelayInMs *= imdsMsiRetryConfig.intervalIncrement; - continue; + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delayTimeInMs; + } + return { + name: storageRetryPolicyName, + async sendRequest(request, next) { + // Set the server-side timeout query parameter "timeout=[seconds]" + if (tryTimeoutInMs) { + request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1000))); + } + const primaryUrl = request.url; + const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : undefined; + let secondaryHas404 = false; + let attempt = 1; + let retryAgain = true; + let response; + let error; + while (retryAgain) { + const isPrimaryRetry = secondaryHas404 || + !secondaryUrl || + !["GET", "HEAD", "OPTIONS"].includes(request.method) || + attempt % 2 === 1; + request.url = isPrimaryRetry ? primaryUrl : secondaryUrl; + response = undefined; + error = undefined; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await next(request); + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); } - throw error; + catch (e) { + if (coreRestPipeline.isRestError(e)) { + logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + error = e; + } + else { + logger.error(`RetryPolicy: Caught error, message: ${coreUtil.getErrorMessage(e)}`); + throw e; + } + } + retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error }); + if (retryAgain) { + await delay(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); + } + attempt++; } - } - throw new AuthenticationError(404, `${msiName$4}: Failed to retrieve IMDS token after ${imdsMsiRetryConfig.maxRetries} retries.`); - }, -}; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const msiName$3 = "ManagedIdentityCredential - Azure Arc MSI"; -const logger$i = credentialLogger(msiName$3); -/** - * Generates the options used on the request for an access token. - */ -function prepareRequestOptions$2(scopes, clientId, resourceId) { - const resource = mapScopesToResource(scopes); - if (!resource) { - throw new Error(`${msiName$3}: Multiple scopes are not supported.`); - } - const queryParameters = { - resource, - "api-version": azureArcAPIVersion, + if (response) { + return response; + } + throw error !== null && error !== void 0 ? error : new coreRestPipeline.RestError("RetryPolicy failed without known error."); + }, }; - if (clientId) { - queryParameters.client_id = clientId; - } - if (resourceId) { - queryParameters.msi_res_id = resourceId; - } - // This error should not bubble up, since we verify that this environment variable is defined in the isAvailable() method defined below. - if (!process.env.IDENTITY_ENDPOINT) { - throw new Error(`${msiName$3}: Missing environment variable: IDENTITY_ENDPOINT`); - } - const query = new URLSearchParams(queryParameters); - return coreRestPipeline.createPipelineRequest({ - // Should be similar to: http://localhost:40342/metadata/identity/oauth2/token - url: `${process.env.IDENTITY_ENDPOINT}?${query.toString()}`, - method: "GET", - headers: coreRestPipeline.createHttpHeaders({ - Accept: "application/json", - Metadata: "true", - }), - }); } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Retrieves the file contents at the given path using promises. - * Useful since `fs`'s readFileSync locks the thread, and to avoid extra dependencies. + * The programmatic identifier of the storageSharedKeyCredentialPolicy. */ -function readFileAsync$1(path, options) { - return new Promise((resolve, reject) => fs.readFile(path, options, (err, data) => { - if (err) { - reject(err); - } - resolve(data); - })); -} +const storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; /** - * Does a request to the authentication provider that results in a file path. + * storageSharedKeyCredentialPolicy handles signing requests using storage account keys. */ -async function filePathRequest(identityClient, requestPrepareOptions) { - const response = await identityClient.sendRequest(coreRestPipeline.createPipelineRequest(requestPrepareOptions)); - if (response.status !== 401) { - let message = ""; - if (response.bodyAsText) { - message = ` Response: ${response.bodyAsText}`; +function storageSharedKeyCredentialPolicy(options) { + function signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || Buffer.isBuffer(request.body)) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } - throw new AuthenticationError(response.status, `${msiName$3}: To authenticate with Azure Arc MSI, status code 401 is expected on the first request. ${message}`); - } - const authHeader = response.headers.get("www-authenticate") || ""; - try { - return authHeader.split("=").slice(1)[0]; - } - catch (e) { - throw Error(`Invalid www-authenticate header format: ${authHeader}`); + const stringToSign = [ + request.method.toUpperCase(), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request, HeaderConstants.DATE), + getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + getCanonicalizedHeadersString(request) + + getCanonicalizedResourceString(request); + const signature = crypto.createHmac("sha256", options.accountKey) + .update(stringToSign, "utf8") + .digest("base64"); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); } -} -/** - * Defines how to determine whether the Azure Arc MSI is available, and also how to retrieve a token from the Azure Arc MSI. - */ -const arcMsi = { - name: "arc", - async isAvailable({ scopes }) { - const resource = mapScopesToResource(scopes); - if (!resource) { - logger$i.info(`${msiName$3}: Unavailable. Multiple scopes are not supported.`); - return false; - } - const result = Boolean(process.env.IMDS_ENDPOINT && process.env.IDENTITY_ENDPOINT); - if (!result) { - logger$i.info(`${msiName$3}: The environment variables needed are: IMDS_ENDPOINT and IDENTITY_ENDPOINT`); - } - return result; - }, - async getToken(configuration, getTokenOptions = {}) { - var _a; - const { identityClient, scopes, clientId, resourceId } = configuration; - if (clientId) { - logger$i.warning(`${msiName$3}: user-assigned identities not supported. The argument clientId might be ignored by the service.`); - } - if (resourceId) { - logger$i.warning(`${msiName$3}: user defined managed Identity by resource Id is not supported. Argument resourceId will be ignored.`); + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + */ + function getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; } - logger$i.info(`${msiName$3}: Authenticating.`); - const requestOptions = Object.assign(Object.assign({ disableJsonStringifyOnBody: true, deserializationMapper: undefined, abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions$2(scopes, clientId, resourceId)), { allowInsecureConnection: true }); - const filePath = await filePathRequest(identityClient, requestOptions); - if (!filePath) { - throw new Error(`${msiName$3}: Failed to find the token file.`); + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; } - const key = await readFileAsync$1(filePath, { encoding: "utf-8" }); - (_a = requestOptions.headers) === null || _a === void 0 ? void 0 : _a.set("Authorization", `Basic ${key}`); - const request = coreRestPipeline.createPipelineRequest(Object.assign(Object.assign({}, requestOptions), { - // Generally, MSI endpoints use the HTTP protocol, without transport layer security (TLS). - allowInsecureConnection: true })); - const tokenResponse = await identityClient.sendTokenRequest(request); - return (tokenResponse && tokenResponse.accessToken) || null; - }, -}; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * MSAL client assertion client. Calls to MSAL's confidential application's `acquireTokenByClientCredential` during `doGetToken`. - * @internal - */ -class MsalClientAssertion extends MsalNode { - constructor(options) { - super(options); - this.requiresConfidential = true; - this.getAssertion = options.getAssertion; + return value; } - async doGetToken(scopes, options = {}) { - try { - const assertion = await this.getAssertion(); - const result = await this.getApp("confidential", options.enableCae).acquireTokenByClientCredential({ - scopes, - correlationId: options.correlationId, - azureRegion: this.azureRegion, - authority: options.authority, - claims: options.claims, - clientAssertion: assertion, - }); - // The Client Credential flow does not return an account, - // so each time getToken gets called, we will have to acquire a new token through the service. - return this.handleResult(scopes, this.clientId, result || undefined); + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + */ + function getCanonicalizedHeadersString(request) { + let headersArray = []; + for (const [name, value] of request.headers) { + if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) { + headersArray.push({ name, value }); + } } - catch (err) { - let err2 = err; - if (err === null || err === undefined) { - err2 = new Error(JSON.stringify(err)); + headersArray.sort((a, b) => { + return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; } - else { - err2 = coreUtil.isError(err) ? err : new Error(String(err)); + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + function getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${options.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; } - throw this.handleError(scopes, err2, options); } + return canonicalizedResourceString; } + return { + name: storageSharedKeyCredentialPolicyName, + async sendRequest(request, next) { + signRequest(request); + return next(request); + }, + }; } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const logger$h = credentialLogger("ClientAssertionCredential"); +// Licensed under the MIT License. /** - * Authenticates a service principal with a JWT assertion. + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning */ -class ClientAssertionCredential { +class StorageBrowserPolicy extends BaseRequestPolicy { /** - * Creates an instance of the ClientAssertionCredential with the details - * needed to authenticate against Microsoft Entra ID with a client - * assertion provided by the developer through the `getAssertion` function parameter. - * - * @param tenantId - The Microsoft Entra tenant (directory) ID. - * @param clientId - The client (application) ID of an App Registration in the tenant. - * @param getAssertion - A function that retrieves the assertion for the credential to use. - * @param options - Options for configuring the client which makes the authentication request. + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - */ - constructor(tenantId, clientId, getAssertion, options = {}) { - if (!tenantId || !clientId || !getAssertion) { - throw new Error("ClientAssertionCredential: tenantId, clientId, and clientAssertion are required parameters."); - } - this.tenantId = tenantId; - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - this.clientId = clientId; - this.options = options; - this.msalFlow = new MsalClientAssertion(Object.assign(Object.assign({}, options), { logger: logger$h, clientId: this.clientId, tenantId: this.tenantId, tokenCredentialOptions: this.options, getAssertion })); + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); } /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * Sends out request. * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. + * @param request - */ - async getToken(scopes, options = {}) { - return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { - newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$h); - const arrayScopes = Array.isArray(scopes) ? scopes : [scopes]; - return this.msalFlow.getToken(arrayScopes, newOptions); - }); + async sendRequest(request) { + if (coreUtil.isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); } } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const credentialName$3 = "WorkloadIdentityCredential"; -/** - * Contains the list of all supported environment variable names so that an - * appropriate error message can be generated when no credentials can be - * configured. - * - * @internal - */ -const SupportedWorkloadEnvironmentVariables = [ - "AZURE_TENANT_ID", - "AZURE_CLIENT_ID", - "AZURE_FEDERATED_TOKEN_FILE", -]; -const logger$g = credentialLogger(credentialName$3); +// Licensed under the MIT License. /** - * Workload Identity authentication is a feature in Azure that allows applications running on virtual machines (VMs) - * to access other Azure resources without the need for a service principal or managed identity. With Workload Identity - * authentication, applications authenticate themselves using their own identity, rather than using a shared service - * principal or managed identity. Under the hood, Workload Identity authentication uses the concept of Service Account - * Credentials (SACs), which are automatically created by Azure and stored securely in the VM. By using Workload - * Identity authentication, you can avoid the need to manage and rotate service principals or managed identities for - * each application on each VM. Additionally, because SACs are created automatically and managed by Azure, you don't - * need to worry about storing and securing sensitive credentials themselves. - * The WorkloadIdentityCredential supports Microsoft Entra Workload ID authentication on Azure Kubernetes and acquires - * a token using the SACs available in the Azure Kubernetes environment. - * Refer to Microsoft Entra - * Workload ID for more information. + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. */ -class WorkloadIdentityCredential { - /** - * WorkloadIdentityCredential supports Microsoft Entra Workload ID on Kubernetes. - * - * @param options - The identity client options to use for authentication. - */ - constructor(options) { - this.azureFederatedTokenFileContent = undefined; - this.cacheDate = undefined; - // Logging environment variables for error details - const assignedEnv = processEnvVars(SupportedWorkloadEnvironmentVariables).assigned.join(", "); - logger$g.info(`Found the following environment variables: ${assignedEnv}`); - const workloadIdentityCredentialOptions = options !== null && options !== void 0 ? options : {}; - const tenantId = workloadIdentityCredentialOptions.tenantId || process.env.AZURE_TENANT_ID; - const clientId = workloadIdentityCredentialOptions.clientId || process.env.AZURE_CLIENT_ID; - this.federatedTokenFilePath = - workloadIdentityCredentialOptions.tokenFilePath || process.env.AZURE_FEDERATED_TOKEN_FILE; - if (tenantId) { - checkTenantId(logger$g, tenantId); - } - if (clientId && tenantId && this.federatedTokenFilePath) { - logger$g.info(`Invoking ClientAssertionCredential with tenant ID: ${tenantId}, clientId: ${workloadIdentityCredentialOptions.clientId} and federated token path: [REDACTED]`); - this.client = new ClientAssertionCredential(tenantId, clientId, this.readFileContents.bind(this), options); - } - } +class StorageBrowserPolicyFactory { /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. + * Creates a StorageBrowserPolicyFactory object. * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. + * @param nextPolicy - + * @param options - */ - async getToken(scopes, options) { - if (!this.client) { - const errorMessage = `${credentialName$3}: is unavailable. tenantId, clientId, and federatedTokenFilePath are required parameters. - In DefaultAzureCredential and ManagedIdentityCredential, these can be provided as environment variables - - "AZURE_TENANT_ID", - "AZURE_CLIENT_ID", - "AZURE_FEDERATED_TOKEN_FILE". See the troubleshooting guide for more information: https://aka.ms/azsdk/js/identity/workloadidentitycredential/troubleshoot `; - logger$g.info(errorMessage); - throw new CredentialUnavailableError(errorMessage); - } - logger$g.info("Invoking getToken() of Client Assertion Credential"); - return this.client.getToken(scopes, options); - } - async readFileContents() { - // Cached assertions expire after 5 minutes - if (this.cacheDate !== undefined && Date.now() - this.cacheDate >= 1000 * 60 * 5) { - this.azureFederatedTokenFileContent = undefined; - } - if (!this.federatedTokenFilePath) { - throw new CredentialUnavailableError(`${credentialName$3}: is unavailable. Invalid file path provided ${this.federatedTokenFilePath}.`); - } - if (!this.azureFederatedTokenFileContent) { - const file = await promises.readFile(this.federatedTokenFilePath, "utf8"); - const value = file.trim(); - if (!value) { - throw new CredentialUnavailableError(`${credentialName$3}: is unavailable. No content on the file ${this.federatedTokenFilePath}.`); - } - else { - this.azureFederatedTokenFileContent = value; - this.cacheDate = Date.now(); - } - } - return this.azureFederatedTokenFileContent; + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); } } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const msiName$2 = "ManagedIdentityCredential - Token Exchange"; -const logger$f = credentialLogger(msiName$2); +// Licensed under the MIT License. /** - * Defines how to determine whether the token exchange MSI is available, and also how to retrieve a token from the token exchange MSI. + * The programmatic identifier of the storageCorrectContentLengthPolicy. */ -function tokenExchangeMsi() { +const storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; +/** + * storageCorrectContentLengthPolicy to correctly set Content-Length header with request body length. + */ +function storageCorrectContentLengthPolicy() { + function correctContentLength(request) { + if (request.body && + (typeof request.body === "string" || Buffer.isBuffer(request.body)) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + } return { - name: "tokenExchangeMsi", - async isAvailable({ clientId }) { - const env = process.env; - const result = Boolean((clientId || env.AZURE_CLIENT_ID) && - env.AZURE_TENANT_ID && - process.env.AZURE_FEDERATED_TOKEN_FILE); - if (!result) { - logger$f.info(`${msiName$2}: Unavailable. The environment variables needed are: AZURE_CLIENT_ID (or the client ID sent through the parameters), AZURE_TENANT_ID and AZURE_FEDERATED_TOKEN_FILE`); - } - return result; - }, - async getToken(configuration, getTokenOptions = {}) { - const { scopes, clientId } = configuration; - const identityClientTokenCredentialOptions = {}; - const workloadIdentityCredential = new WorkloadIdentityCredential(Object.assign(Object.assign({ clientId, tenantId: process.env.AZURE_TENANT_ID, tokenFilePath: process.env.AZURE_FEDERATED_TOKEN_FILE }, identityClientTokenCredentialOptions), { disableInstanceDiscovery: true })); - const token = await workloadIdentityCredential.getToken(scopes, getTokenOptions); - return token; + name: storageCorrectContentLengthPolicyName, + async sendRequest(request, next) { + correctContentLength(request); + return next(request); }, }; } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// This MSI can be easily tested by deploying a container to Azure Service Fabric with the Dockerfile: -// -// FROM node:12 -// RUN wget https://host.any/path/bash.sh -// CMD ["bash", "bash.sh"] -// -// Where the bash script contains: -// -// curl --insecure $IDENTITY_ENDPOINT'?api-version=2019-07-01-preview&resource=https://vault.azure.net/' -H "Secret: $IDENTITY_HEADER" -// -const msiName$1 = "ManagedIdentityCredential - Fabric MSI"; -const logger$e = credentialLogger(msiName$1); +// Licensed under the MIT License. /** - * Generates the options used on the request for an access token. + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract */ -function prepareRequestOptions$1(scopes, clientId, resourceId) { - const resource = mapScopesToResource(scopes); - if (!resource) { - throw new Error(`${msiName$1}: Multiple scopes are not supported.`); - } - const queryParameters = { - resource, - "api-version": azureFabricVersion, - }; - if (clientId) { - queryParameters.client_id = clientId; - } - if (resourceId) { - queryParameters.msi_res_id = resourceId; - } - const query = new URLSearchParams(queryParameters); - // This error should not bubble up, since we verify that this environment variable is defined in the isAvailable() method defined below. - if (!process.env.IDENTITY_ENDPOINT) { - throw new Error("Missing environment variable: IDENTITY_ENDPOINT"); - } - if (!process.env.IDENTITY_HEADER) { - throw new Error("Missing environment variable: IDENTITY_HEADER"); +function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; } - return { - url: `${process.env.IDENTITY_ENDPOINT}?${query.toString()}`, - method: "GET", - headers: coreRestPipeline.createHttpHeaders({ - Accept: "application/json", - secret: process.env.IDENTITY_HEADER, - }), - }; + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); } /** - * Defines how to determine whether the Azure Service Fabric MSI is available, and also how to retrieve a token from the Azure Service Fabric MSI. + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. */ -const fabricMsi = { - name: "fabricMsi", - async isAvailable({ scopes }) { - const resource = mapScopesToResource(scopes); - if (!resource) { - logger$e.info(`${msiName$1}: Unavailable. Multiple scopes are not supported.`); - return false; - } - const env = process.env; - const result = Boolean(env.IDENTITY_ENDPOINT && env.IDENTITY_HEADER && env.IDENTITY_SERVER_THUMBPRINT); - if (!result) { - logger$e.info(`${msiName$1}: Unavailable. The environment variables needed are: IDENTITY_ENDPOINT, IDENTITY_HEADER and IDENTITY_SERVER_THUMBPRINT`); - } - return result; - }, - async getToken(configuration, getTokenOptions = {}) { - const { scopes, identityClient, clientId, resourceId } = configuration; - if (resourceId) { - logger$e.warning(`${msiName$1}: user defined managed Identity by resource Id is not supported. Argument resourceId might be ignored by the service.`); - } - logger$e.info([ - `${msiName$1}:`, - "Using the endpoint and the secret coming from the environment variables:", - `IDENTITY_ENDPOINT=${process.env.IDENTITY_ENDPOINT},`, - "IDENTITY_HEADER=[REDACTED] and", - "IDENTITY_SERVER_THUMBPRINT=[REDACTED].", - ].join(" ")); - const request = coreRestPipeline.createPipelineRequest(Object.assign({ abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions$1(scopes, clientId, resourceId))); - request.agent = new https.Agent({ - // This is necessary because Service Fabric provides a self-signed certificate. - // The alternative path is to verify the certificate using the IDENTITY_SERVER_THUMBPRINT env variable. - rejectUnauthorized: false, - }); - const tokenResponse = await identityClient.sendTokenRequest(request); - return (tokenResponse && tokenResponse.accessToken) || null; - }, -}; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const msiName = "ManagedIdentityCredential - AppServiceMSI 2019"; -const logger$d = credentialLogger(msiName); -/** - * Generates the options used on the request for an access token. - */ -function prepareRequestOptions(scopes, clientId, resourceId) { - const resource = mapScopesToResource(scopes); - if (!resource) { - throw new Error(`${msiName}: Multiple scopes are not supported.`); - } - const queryParameters = { - resource, - "api-version": "2019-08-01", - }; - if (clientId) { - queryParameters.client_id = clientId; - } - if (resourceId) { - queryParameters.mi_res_id = resourceId; - } - const query = new URLSearchParams(queryParameters); - // This error should not bubble up, since we verify that this environment variable is defined in the isAvailable() method defined below. - if (!process.env.IDENTITY_ENDPOINT) { - throw new Error(`${msiName}: Missing environment variable: IDENTITY_ENDPOINT`); - } - if (!process.env.IDENTITY_HEADER) { - throw new Error(`${msiName}: Missing environment variable: IDENTITY_HEADER`); - } - return { - url: `${process.env.IDENTITY_ENDPOINT}?${query.toString()}`, - method: "GET", - headers: coreRestPipeline.createHttpHeaders({ - Accept: "application/json", - "X-IDENTITY-HEADER": process.env.IDENTITY_HEADER, - }), - }; -} -/** - * Defines how to determine whether the Azure App Service MSI is available, and also how to retrieve a token from the Azure App Service MSI. - */ -const appServiceMsi2019 = { - name: "appServiceMsi2019", - async isAvailable({ scopes }) { - const resource = mapScopesToResource(scopes); - if (!resource) { - logger$d.info(`${msiName}: Unavailable. Multiple scopes are not supported.`); - return false; - } - const env = process.env; - const result = Boolean(env.IDENTITY_ENDPOINT && env.IDENTITY_HEADER); - if (!result) { - logger$d.info(`${msiName}: Unavailable. The environment variables needed are: IDENTITY_ENDPOINT and IDENTITY_HEADER.`); - } - return result; - }, - async getToken(configuration, getTokenOptions = {}) { - const { identityClient, scopes, clientId, resourceId } = configuration; - logger$d.info(`${msiName}: Using the endpoint and the secret coming form the environment variables: IDENTITY_ENDPOINT=${process.env.IDENTITY_ENDPOINT} and IDENTITY_HEADER=[REDACTED].`); - const request = coreRestPipeline.createPipelineRequest(Object.assign(Object.assign({ abortSignal: getTokenOptions.abortSignal }, prepareRequestOptions(scopes, clientId, resourceId)), { - // Generally, MSI endpoints use the HTTP protocol, without transport layer security (TLS). - allowInsecureConnection: true })); - const tokenResponse = await identityClient.sendTokenRequest(request); - return (tokenResponse && tokenResponse.accessToken) || null; - }, -}; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const logger$c = credentialLogger("ManagedIdentityCredential"); -/** - * Attempts authentication using a managed identity available at the deployment environment. - * This authentication type works in Azure VMs, App Service instances, Azure Functions applications, - * Azure Kubernetes Services, Azure Service Fabric instances and inside of the Azure Cloud Shell. - * - * More information about configuring managed identities can be found here: - * https://learn.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/overview - */ -class ManagedIdentityCredential { - /** - * @internal - * @hidden - */ - constructor(clientIdOrOptions, options) { - var _a; - this.isEndpointUnavailable = null; - this.isAppTokenProviderInitialized = false; - let _options; - if (typeof clientIdOrOptions === "string") { - this.clientId = clientIdOrOptions; - _options = options; - } - else { - this.clientId = clientIdOrOptions === null || clientIdOrOptions === void 0 ? void 0 : clientIdOrOptions.clientId; - _options = clientIdOrOptions; - } - this.resourceId = _options === null || _options === void 0 ? void 0 : _options.resourceId; - // For JavaScript users. - if (this.clientId && this.resourceId) { - throw new Error(`${ManagedIdentityCredential.name} - Client Id and Resource Id can't be provided at the same time.`); - } - this.identityClient = new IdentityClient(_options); - this.isAvailableIdentityClient = new IdentityClient(Object.assign(Object.assign({}, _options), { retryOptions: { - maxRetries: 0, - } })); - /** authority host validation and metadata discovery to be skipped in managed identity - * since this wasn't done previously before adding token cache support - */ - this.confidentialApp = new msalCommon.ConfidentialClientApplication({ - auth: { - authority: "https://login.microsoftonline.com/managed_identity", - clientId: (_a = this.clientId) !== null && _a !== void 0 ? _a : DeveloperSignOnClientId, - clientSecret: "dummy-secret", - cloudDiscoveryMetadata: '{"tenant_discovery_endpoint":"https://login.microsoftonline.com/common/v2.0/.well-known/openid-configuration","api-version":"1.1","metadata":[{"preferred_network":"login.microsoftonline.com","preferred_cache":"login.windows.net","aliases":["login.microsoftonline.com","login.windows.net","login.microsoft.com","sts.windows.net"]},{"preferred_network":"login.partner.microsoftonline.cn","preferred_cache":"login.partner.microsoftonline.cn","aliases":["login.partner.microsoftonline.cn","login.chinacloudapi.cn"]},{"preferred_network":"login.microsoftonline.de","preferred_cache":"login.microsoftonline.de","aliases":["login.microsoftonline.de"]},{"preferred_network":"login.microsoftonline.us","preferred_cache":"login.microsoftonline.us","aliases":["login.microsoftonline.us","login.usgovcloudapi.net"]},{"preferred_network":"login-us.microsoftonline.com","preferred_cache":"login-us.microsoftonline.com","aliases":["login-us.microsoftonline.com"]}]}', - authorityMetadata: '{"token_endpoint":"https://login.microsoftonline.com/common/oauth2/v2.0/token","token_endpoint_auth_methods_supported":["client_secret_post","private_key_jwt","client_secret_basic"],"jwks_uri":"https://login.microsoftonline.com/common/discovery/v2.0/keys","response_modes_supported":["query","fragment","form_post"],"subject_types_supported":["pairwise"],"id_token_signing_alg_values_supported":["RS256"],"response_types_supported":["code","id_token","code id_token","id_token token"],"scopes_supported":["openid","profile","email","offline_access"],"issuer":"https://login.microsoftonline.com/{tenantid}/v2.0","request_uri_parameter_supported":false,"userinfo_endpoint":"https://graph.microsoft.com/oidc/userinfo","authorization_endpoint":"https://login.microsoftonline.com/common/oauth2/v2.0/authorize","device_authorization_endpoint":"https://login.microsoftonline.com/common/oauth2/v2.0/devicecode","http_logout_supported":true,"frontchannel_logout_supported":true,"end_session_endpoint":"https://login.microsoftonline.com/common/oauth2/v2.0/logout","claims_supported":["sub","iss","cloud_instance_name","cloud_instance_host_name","cloud_graph_host_name","msgraph_host","aud","exp","iat","auth_time","acr","nonce","preferred_username","name","tid","ver","at_hash","c_hash","email"],"kerberos_endpoint":"https://login.microsoftonline.com/common/kerberos","tenant_region_scope":null,"cloud_instance_name":"microsoftonline.com","cloud_graph_host_name":"graph.windows.net","msgraph_host":"graph.microsoft.com","rbac_url":"https://pas.windows.net"}', - clientCapabilities: [], - }, - system: { - loggerOptions: { - logLevel: getMSALLogLevel(logger$o.getLogLevel()), - }, - }, - }); - } - async cachedAvailableMSI(scopes, getTokenOptions) { - if (this.cachedMSI) { - return this.cachedMSI; - } - const MSIs = [ - arcMsi, - fabricMsi, - appServiceMsi2019, - appServiceMsi2017, - cloudShellMsi, - tokenExchangeMsi(), - imdsMsi, - ]; - for (const msi of MSIs) { - if (await msi.isAvailable({ - scopes, - identityClient: this.isAvailableIdentityClient, - clientId: this.clientId, - resourceId: this.resourceId, - getTokenOptions, - })) { - this.cachedMSI = msi; - return msi; - } - } - throw new CredentialUnavailableError(`${ManagedIdentityCredential.name} - No MSI credential available`); - } - async authenticateManagedIdentity(scopes, getTokenOptions) { - const { span, updatedOptions } = tracingClient.startSpan(`${ManagedIdentityCredential.name}.authenticateManagedIdentity`, getTokenOptions); - try { - // Determining the available MSI, and avoiding checking for other MSIs while the program is running. - const availableMSI = await this.cachedAvailableMSI(scopes, updatedOptions); - return availableMSI.getToken({ - identityClient: this.identityClient, - scopes, - clientId: this.clientId, - resourceId: this.resourceId, - }, updatedOptions); - } - catch (err) { - span.setStatus({ - status: "error", - error: err, - }); - throw err; - } - finally { - span.end(); - } - } +class Pipeline { /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * If an unexpected error occurs, an {@link AuthenticationError} will be thrown with the details of the failure. + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. + * @param factories - + * @param options - */ - async getToken(scopes, options) { - let result = null; - const { span, updatedOptions } = tracingClient.startSpan(`${ManagedIdentityCredential.name}.getToken`, options); - try { - // isEndpointAvailable can be true, false, or null, - // If it's null, it means we don't yet know whether - // the endpoint is available and need to check for it. - if (this.isEndpointUnavailable !== true) { - const availableMSI = await this.cachedAvailableMSI(scopes, updatedOptions); - if (availableMSI.name === "tokenExchangeMsi") { - result = await this.authenticateManagedIdentity(scopes, updatedOptions); - } - else { - const appTokenParameters = { - correlationId: this.identityClient.getCorrelationId(), - tenantId: (options === null || options === void 0 ? void 0 : options.tenantId) || "managed_identity", - scopes: Array.isArray(scopes) ? scopes : [scopes], - claims: options === null || options === void 0 ? void 0 : options.claims, - }; - // Added a check to see if SetAppTokenProvider was already defined. - this.initializeSetAppTokenProvider(); - const authenticationResult = await this.confidentialApp.acquireTokenByClientCredential(Object.assign({}, appTokenParameters)); - result = this.handleResult(scopes, authenticationResult || undefined); - } - if (result === null) { - // If authenticateManagedIdentity returns null, - // it means no MSI endpoints are available. - // If so, we avoid trying to reach to them in future requests. - this.isEndpointUnavailable = true; - // It also means that the endpoint answered with either 200 or 201 (see the sendTokenRequest method), - // yet we had no access token. For this reason, we'll throw once with a specific message: - const error = new CredentialUnavailableError("The managed identity endpoint was reached, yet no tokens were received."); - logger$c.getToken.info(formatError(scopes, error)); - throw error; - } - // Since `authenticateManagedIdentity` didn't throw, and the result was not null, - // We will assume that this endpoint is reachable from this point forward, - // and avoid pinging again to it. - this.isEndpointUnavailable = false; - } - else { - // We've previously determined that the endpoint was unavailable, - // either because it was unreachable or permanently unable to authenticate. - const error = new CredentialUnavailableError("The managed identity endpoint is not currently available"); - logger$c.getToken.info(formatError(scopes, error)); - throw error; - } - logger$c.getToken.info(formatSuccess(scopes)); - return result; - } - catch (err) { - // CredentialUnavailable errors are expected to reach here. - // We intend them to bubble up, so that DefaultAzureCredential can catch them. - if (err.name === "AuthenticationRequiredError") { - throw err; - } - // Expected errors to reach this point: - // - Errors coming from a method unexpectedly breaking. - // - When identityClient.sendTokenRequest throws, in which case - // if the status code was 400, it means that the endpoint is working, - // but no identity is available. - span.setStatus({ - status: "error", - error: err, - }); - // If either the network is unreachable, - // we can safely assume the credential is unavailable. - if (err.code === "ENETUNREACH") { - const error = new CredentialUnavailableError(`${ManagedIdentityCredential.name}: Unavailable. Network unreachable. Message: ${err.message}`); - logger$c.getToken.info(formatError(scopes, error)); - throw error; - } - // If either the host was unreachable, - // we can safely assume the credential is unavailable. - if (err.code === "EHOSTUNREACH") { - const error = new CredentialUnavailableError(`${ManagedIdentityCredential.name}: Unavailable. No managed identity endpoint found. Message: ${err.message}`); - logger$c.getToken.info(formatError(scopes, error)); - throw error; - } - // If err.statusCode has a value of 400, it comes from sendTokenRequest, - // and it means that the endpoint is working, but that no identity is available. - if (err.statusCode === 400) { - throw new CredentialUnavailableError(`${ManagedIdentityCredential.name}: The managed identity endpoint is indicating there's no available identity. Message: ${err.message}`); - } - // This is a special case for Docker Desktop which responds with a 403 with a message that contains "A socket operation was attempted to an unreachable network" - // rather than just timing out, as expected. - if (err.statusCode === 403 || err.code === 403) { - if (err.message.includes("A socket operation was attempted to an unreachable network")) { - const error = new CredentialUnavailableError(`${ManagedIdentityCredential.name}: Unavailable. Network unreachable. Message: ${err.message}`); - logger$c.getToken.info(formatError(scopes, error)); - throw error; - } - } - // If the error has no status code, we can assume there was no available identity. - // This will throw silently during any ChainedTokenCredential. - if (err.statusCode === undefined) { - throw new CredentialUnavailableError(`${ManagedIdentityCredential.name}: Authentication failed. Message ${err.message}`); - } - // Any other error should break the chain. - throw new AuthenticationError(err.statusCode, { - error: `${ManagedIdentityCredential.name} authentication failed.`, - error_description: err.message, - }); - } - finally { - // Finally is always called, both if we return and if we throw in the above try/catch. - span.end(); - } + constructor(factories, options = {}) { + this.factories = factories; + this.options = options; } /** - * Handles the MSAL authentication result. - * If the result has an account, we update the local account reference. - * If the token received is invalid, an error will be thrown depending on what's missing. + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. */ - handleResult(scopes, result, getTokenOptions) { - this.ensureValidMsalToken(scopes, result, getTokenOptions); - logger$c.getToken.info(formatSuccess(scopes)); + toServiceClientOptions() { return { - token: result.accessToken, - expiresOnTimestamp: result.expiresOn.getTime(), - }; - } - /** - * Ensures the validity of the MSAL token - * @internal - */ - ensureValidMsalToken(scopes, msalToken, getTokenOptions) { - const error = (message) => { - logger$c.getToken.info(message); - return new AuthenticationRequiredError({ - scopes: Array.isArray(scopes) ? scopes : [scopes], - getTokenOptions, - message, - }); + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, }; - if (!msalToken) { - throw error("No response"); - } - if (!msalToken.expiresOn) { - throw error(`Response had no "expiresOn" property.`); - } - if (!msalToken.accessToken) { - throw error(`Response had no "accessToken" property.`); - } - } - initializeSetAppTokenProvider() { - if (!this.isAppTokenProviderInitialized) { - this.confidentialApp.SetAppTokenProvider(async (appTokenProviderParameters) => { - logger$c.info(`SetAppTokenProvider invoked with parameters- ${JSON.stringify(appTokenProviderParameters)}`); - const getTokenOptions = Object.assign({}, appTokenProviderParameters); - logger$c.info(`authenticateManagedIdentity invoked with scopes- ${JSON.stringify(appTokenProviderParameters.scopes)} and getTokenOptions - ${JSON.stringify(getTokenOptions)}`); - const resultToken = await this.authenticateManagedIdentity(appTokenProviderParameters.scopes, getTokenOptions); - if (resultToken) { - logger$c.info(`SetAppTokenProvider will save the token in cache`); - const expiresInSeconds = (resultToken === null || resultToken === void 0 ? void 0 : resultToken.expiresOnTimestamp) - ? Math.floor((resultToken.expiresOnTimestamp - Date.now()) / 1000) - : 0; - return { - accessToken: resultToken === null || resultToken === void 0 ? void 0 : resultToken.token, - expiresInSeconds, - }; - } - else { - logger$c.info(`SetAppTokenProvider token has "no_access_token_returned" as the saved token`); - return { - accessToken: "no_access_token_returned", - expiresInSeconds: 0, - }; - } - }); - this.isAppTokenProviderInitialized = true; - } } } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Ensures the scopes value is an array. - * @internal - */ -function ensureScopes(scopes) { - return Array.isArray(scopes) ? scopes : [scopes]; -} /** - * Throws if the received scope is not valid. - * @internal + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. */ -function ensureValidScopeForDevTimeCreds(scope, logger) { - if (!scope.match(/^[0-9a-zA-Z-_.:/]+$/)) { - const error = new Error("Invalid scope was specified by the user or calling client"); - logger.getToken.info(formatError(scope, error)); - throw error; +function newPipeline(credential, pipelineOptions = {}) { + if (!credential) { + credential = new AnonymousCredential(); } + const pipeline = new Pipeline([], pipelineOptions); + pipeline._credential = credential; + return pipeline; } -/** - * Returns the resource out of a scope. - * @internal - */ -function getScopeResource(scope) { - return scope.replace(/\/.default$/, ""); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Mockable reference to the CLI credential cliCredentialFunctions - * @internal - */ -const cliCredentialInternals = { - /** - * @internal - */ - getSafeWorkingDir() { - if (process.platform === "win32") { - if (!process.env.SystemRoot) { - throw new Error("Azure CLI credential expects a 'SystemRoot' environment variable"); - } - return process.env.SystemRoot; - } - else { - return "/bin"; - } - }, - /** - * Gets the access token from Azure CLI - * @param resource - The resource to use when getting the token - * @internal - */ - async getAzureCliAccessToken(resource, tenantId, timeout) { - let tenantSection = []; - if (tenantId) { - tenantSection = ["--tenant", tenantId]; - } - return new Promise((resolve, reject) => { - try { - child_process.execFile("az", [ - "account", - "get-access-token", - "--output", - "json", - "--resource", - resource, - ...tenantSection, - ], { cwd: cliCredentialInternals.getSafeWorkingDir(), shell: true, timeout }, (error, stdout, stderr) => { - resolve({ stdout: stdout, stderr: stderr, error }); - }); - } - catch (err) { - reject(err); - } +function processDownlevelPipeline(pipeline) { + const knownFactoryFunctions = [ + isAnonymousCredential, + isStorageSharedKeyCredential, + isCoreHttpBearerTokenFactory, + isStorageBrowserPolicyFactory, + isStorageRetryPolicyFactory, + isStorageTelemetryPolicyFactory, + isCoreHttpPolicyFactory, + ]; + if (pipeline.factories.length) { + const novelFactories = pipeline.factories.filter((factory) => { + return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory)); }); - }, -}; -const logger$b = credentialLogger("AzureCliCredential"); -/** - * This credential will use the currently logged-in user login information - * via the Azure CLI ('az') commandline tool. - * To do so, it will read the user access token and expire time - * with Azure CLI command "az account get-access-token". - */ -class AzureCliCredential { - /** - * Creates an instance of the {@link AzureCliCredential}. - * - * To use this credential, ensure that you have already logged - * in via the 'az' tool using the command "az login" from the commandline. - * - * @param options - Options, to optionally allow multi-tenant requests. - */ - constructor(options) { - if (options === null || options === void 0 ? void 0 : options.tenantId) { - checkTenantId(logger$b, options === null || options === void 0 ? void 0 : options.tenantId); - this.tenantId = options === null || options === void 0 ? void 0 : options.tenantId; - } - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - this.timeout = options === null || options === void 0 ? void 0 : options.processTimeoutInMs; - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. - */ - async getToken(scopes, options = {}) { - const tenantId = processMultiTenantRequest(this.tenantId, options, this.additionallyAllowedTenantIds); - if (tenantId) { - checkTenantId(logger$b, tenantId); + if (novelFactories.length) { + const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); + // if there are any left over, wrap in a requestPolicyFactoryPolicy + return { + wrappedPolicies: coreHttpCompat.createRequestPolicyFactoryPolicy(novelFactories), + afterRetry: hasInjector, + }; } - const scope = typeof scopes === "string" ? scopes : scopes[0]; - logger$b.getToken.info(`Using the scope ${scope}`); - return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async () => { - var _a, _b, _c, _d; - try { - ensureValidScopeForDevTimeCreds(scope, logger$b); - const resource = getScopeResource(scope); - const obj = await cliCredentialInternals.getAzureCliAccessToken(resource, tenantId, this.timeout); - const specificScope = (_a = obj.stderr) === null || _a === void 0 ? void 0 : _a.match("(.*)az login --scope(.*)"); - const isLoginError = ((_b = obj.stderr) === null || _b === void 0 ? void 0 : _b.match("(.*)az login(.*)")) && !specificScope; - const isNotInstallError = ((_c = obj.stderr) === null || _c === void 0 ? void 0 : _c.match("az:(.*)not found")) || ((_d = obj.stderr) === null || _d === void 0 ? void 0 : _d.startsWith("'az' is not recognized")); - if (isNotInstallError) { - const error = new CredentialUnavailableError("Azure CLI could not be found. Please visit https://aka.ms/azure-cli for installation instructions and then, once installed, authenticate to your Azure account using 'az login'."); - logger$b.getToken.info(formatError(scopes, error)); - throw error; - } - if (isLoginError) { - const error = new CredentialUnavailableError("Please run 'az login' from a command prompt to authenticate before using this credential."); - logger$b.getToken.info(formatError(scopes, error)); - throw error; - } - try { - const responseData = obj.stdout; - const response = JSON.parse(responseData); - logger$b.getToken.info(formatSuccess(scopes)); - const returnValue = { - token: response.accessToken, - expiresOnTimestamp: new Date(response.expiresOn).getTime(), - }; - return returnValue; - } - catch (e) { - if (obj.stderr) { - throw new CredentialUnavailableError(obj.stderr); - } - throw e; - } - } - catch (err) { - const error = err.name === "CredentialUnavailableError" - ? err - : new CredentialUnavailableError(err.message || "Unknown error while trying to retrieve the access token"); - logger$b.getToken.info(formatError(scopes, error)); - throw error; - } - }); } + return undefined; } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Easy to mock childProcess utils. - * @internal - */ -const processUtils = { - /** - * Promisifying childProcess.execFile - * @internal - */ - execFile(file, params, options) { - return new Promise((resolve, reject) => { - child_process__namespace.execFile(file, params, options, (error, stdout, stderr) => { - if (Buffer.isBuffer(stdout)) { - stdout = stdout.toString("utf8"); - } - if (Buffer.isBuffer(stderr)) { - stderr = stderr.toString("utf8"); - } - if (stderr || error) { - reject(stderr ? new Error(stderr) : error); - } - else { - resolve(stdout); - } - }); - }); - }, -}; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const logger$a = credentialLogger("AzurePowerShellCredential"); -const isWindows = process.platform === "win32"; -/** - * Returns a platform-appropriate command name by appending ".exe" on Windows. - * - * @internal - */ -function formatCommand(commandName) { - if (isWindows) { - return `${commandName}.exe`; - } - else { - return commandName; +function getCoreClientOptions(pipeline) { + var _a; + const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = tslib.__rest(_b, ["httpClient"]); + let httpClient = pipeline._coreHttpClient; + if (!httpClient) { + httpClient = v1Client ? coreHttpCompat.convertHttpClient(v1Client) : getCachedDefaultHttpClient(); + pipeline._coreHttpClient = httpClient; + } + let corePipeline = pipeline._corePipeline; + if (!corePipeline) { + const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`; + const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix + ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + corePipeline = coreClient.createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: { + additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + logger: logger.info, + }, userAgentOptions: { + userAgentPrefix, + }, serializationOptions: { + stringifyXML: coreXml.stringifyXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + }, deserializationOptions: { + parseXML: coreXml.parseXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + } })); + corePipeline.removePolicy({ phase: "Retry" }); + corePipeline.removePolicy({ name: coreRestPipeline.decompressResponsePolicyName }); + corePipeline.addPolicy(storageCorrectContentLengthPolicy()); + corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" }); + corePipeline.addPolicy(storageBrowserPolicy()); + const downlevelResults = processDownlevelPipeline(pipeline); + if (downlevelResults) { + corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : undefined); + } + const credential = getCredentialFromPipeline(pipeline); + if (coreAuth.isTokenCredential(credential)) { + corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential, + scopes: (_a = restOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + pipeline._corePipeline = corePipeline; + } + return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline }); +} +function getCredentialFromPipeline(pipeline) { + // see if we squirreled one away on the type itself + if (pipeline._credential) { + return pipeline._credential; + } + // if it came from another package, loop over the factories and look for one like before + let credential = new AnonymousCredential(); + for (const factory of pipeline.factories) { + if (coreAuth.isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + credential = factory.credential; + } + else if (isStorageSharedKeyCredential(factory)) { + return factory; + } + } + return credential; +} +function isStorageSharedKeyCredential(factory) { + if (factory instanceof StorageSharedKeyCredential) { + return true; } + return factory.constructor.name === "StorageSharedKeyCredential"; } -/** - * Receives a list of commands to run, executes them, then returns the outputs. - * If anything fails, an error is thrown. - * @internal - */ -async function runCommands(commands, timeout) { - const results = []; - for (const command of commands) { - const [file, ...parameters] = command; - const result = (await processUtils.execFile(file, parameters, { - encoding: "utf8", - timeout, - })); - results.push(result); +function isAnonymousCredential(factory) { + if (factory instanceof AnonymousCredential) { + return true; } - return results; + return factory.constructor.name === "AnonymousCredential"; } -/** - * Known PowerShell errors - * @internal - */ -const powerShellErrors = { - login: "Run Connect-AzAccount to login", - installed: "The specified module 'Az.Accounts' with version '2.2.0' was not loaded because no valid module file was found in any module directory", -}; -/** - * Messages to use when throwing in this credential. - * @internal - */ -const powerShellPublicErrorMessages = { - login: "Please run 'Connect-AzAccount' from PowerShell to authenticate before using this credential.", - installed: `The 'Az.Account' module >= 2.2.0 is not installed. Install the Azure Az PowerShell module with: "Install-Module -Name Az -Scope CurrentUser -Repository PSGallery -Force".`, - troubleshoot: `To troubleshoot, visit https://aka.ms/azsdk/js/identity/powershellcredential/troubleshoot.`, -}; -// PowerShell Azure User not logged in error check. -const isLoginError = (err) => err.message.match(`(.*)${powerShellErrors.login}(.*)`); -// Az Module not Installed in Azure PowerShell check. -const isNotInstalledError = (err) => err.message.match(powerShellErrors.installed); -/** - * The PowerShell commands to be tried, in order. - * - * @internal - */ -const commandStack = [formatCommand("pwsh")]; -if (isWindows) { - commandStack.push(formatCommand("powershell")); +function isCoreHttpBearerTokenFactory(factory) { + return coreAuth.isTokenCredential(factory.credential); } -/** - * This credential will use the currently logged-in user information from the - * Azure PowerShell module. To do so, it will read the user access token and - * expire time with Azure PowerShell command `Get-AzAccessToken -ResourceUrl {ResourceScope}` - */ -class AzurePowerShellCredential { - /** - * Creates an instance of the {@link AzurePowerShellCredential}. - * - * To use this credential: - * - Install the Azure Az PowerShell module with: - * `Install-Module -Name Az -Scope CurrentUser -Repository PSGallery -Force`. - * - You have already logged in to Azure PowerShell using the command - * `Connect-AzAccount` from the command line. - * - * @param options - Options, to optionally allow multi-tenant requests. - */ - constructor(options) { - if (options === null || options === void 0 ? void 0 : options.tenantId) { - checkTenantId(logger$a, options === null || options === void 0 ? void 0 : options.tenantId); - this.tenantId = options === null || options === void 0 ? void 0 : options.tenantId; - } - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - this.timeout = options === null || options === void 0 ? void 0 : options.processTimeoutInMs; - } - /** - * Gets the access token from Azure PowerShell - * @param resource - The resource to use when getting the token - */ - async getAzurePowerShellAccessToken(resource, tenantId, timeout) { - // Clone the stack to avoid mutating it while iterating - for (const powerShellCommand of [...commandStack]) { - try { - await runCommands([[powerShellCommand, "/?"]], timeout); - } - catch (e) { - // Remove this credential from the original stack so that we don't try it again. - commandStack.shift(); - continue; - } - let tenantSection = ""; - if (tenantId) { - tenantSection = `-TenantId "${tenantId}"`; - } - const results = await runCommands([ - [ - powerShellCommand, - "-NoProfile", - "-NonInteractive", - "-Command", - "Import-Module Az.Accounts -MinimumVersion 2.2.0 -PassThru", - ], - [ - powerShellCommand, - "-NoProfile", - "-NonInteractive", - "-Command", - `Get-AzAccessToken ${tenantSection} -ResourceUrl "${resource}" | ConvertTo-Json`, - ], - ]); - const result = results[1]; - try { - return JSON.parse(result); - } - catch (e) { - throw new Error(`Unable to parse the output of PowerShell. Received output: ${result}`); - } - } - throw new Error(`Unable to execute PowerShell. Ensure that it is installed in your system`); - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If the authentication cannot be performed through PowerShell, a {@link CredentialUnavailableError} will be thrown. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this TokenCredential implementation might make. - */ - async getToken(scopes, options = {}) { - return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async () => { - const tenantId = processMultiTenantRequest(this.tenantId, options, this.additionallyAllowedTenantIds); - const scope = typeof scopes === "string" ? scopes : scopes[0]; - if (tenantId) { - checkTenantId(logger$a, tenantId); - } - try { - ensureValidScopeForDevTimeCreds(scope, logger$a); - logger$a.getToken.info(`Using the scope ${scope}`); - const resource = getScopeResource(scope); - const response = await this.getAzurePowerShellAccessToken(resource, tenantId, this.timeout); - logger$a.getToken.info(formatSuccess(scopes)); - return { - token: response.Token, - expiresOnTimestamp: new Date(response.ExpiresOn).getTime(), - }; - } - catch (err) { - if (isNotInstalledError(err)) { - const error = new CredentialUnavailableError(powerShellPublicErrorMessages.installed); - logger$a.getToken.info(formatError(scope, error)); - throw error; - } - else if (isLoginError(err)) { - const error = new CredentialUnavailableError(powerShellPublicErrorMessages.login); - logger$a.getToken.info(formatError(scope, error)); - throw error; - } - const error = new CredentialUnavailableError(`${err}. ${powerShellPublicErrorMessages.troubleshoot}`); - logger$a.getToken.info(formatError(scope, error)); - throw error; - } - }); +function isStorageBrowserPolicyFactory(factory) { + if (factory instanceof StorageBrowserPolicyFactory) { + return true; } + return factory.constructor.name === "StorageBrowserPolicyFactory"; } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * @internal - */ -const logger$9 = credentialLogger("ChainedTokenCredential"); -/** - * Enables multiple `TokenCredential` implementations to be tried in order - * until one of the getToken methods returns an access token. - */ -class ChainedTokenCredential { - /** - * Creates an instance of ChainedTokenCredential using the given credentials. - * - * @param sources - `TokenCredential` implementations to be tried in order. - * - * Example usage: - * ```javascript - * const firstCredential = new ClientSecretCredential(tenantId, clientId, clientSecret); - * const secondCredential = new ClientSecretCredential(tenantId, anotherClientId, anotherSecret); - * const credentialChain = new ChainedTokenCredential(firstCredential, secondCredential); - * ``` - */ - constructor(...sources) { - this._sources = []; - this._sources = sources; - } - /** - * Returns the first access token returned by one of the chained - * `TokenCredential` implementations. Throws an {@link AggregateAuthenticationError} - * when one or more credentials throws an {@link AuthenticationError} and - * no credentials have returned an access token. - * - * This method is called automatically by Azure SDK client libraries. You may call this method - * directly, but you must also handle token caching and token refreshing. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * `TokenCredential` implementation might make. - */ - async getToken(scopes, options = {}) { - const { token } = await this.getTokenInternal(scopes, options); - return token; - } - async getTokenInternal(scopes, options = {}) { - let token = null; - let successfulCredential; - const errors = []; - return tracingClient.withSpan("ChainedTokenCredential.getToken", options, async (updatedOptions) => { - for (let i = 0; i < this._sources.length && token === null; i++) { - try { - token = await this._sources[i].getToken(scopes, updatedOptions); - successfulCredential = this._sources[i]; - } - catch (err) { - if (err.name === "CredentialUnavailableError" || - err.name === "AuthenticationRequiredError") { - errors.push(err); - } - else { - logger$9.getToken.info(formatError(scopes, err)); - throw err; - } - } - } - if (!token && errors.length > 0) { - const err = new AggregateAuthenticationError(errors, "ChainedTokenCredential authentication failed."); - logger$9.getToken.info(formatError(scopes, err)); - throw err; - } - logger$9.getToken.info(`Result for ${successfulCredential.constructor.name}: ${formatSuccess(scopes)}`); - if (token === null) { - throw new CredentialUnavailableError("Failed to retrieve a valid token"); - } - return { token, successfulCredential }; - }); +function isStorageRetryPolicyFactory(factory) { + if (factory instanceof StorageRetryPolicyFactory) { + return true; } + return factory.constructor.name === "StorageRetryPolicyFactory"; } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const readFileAsync = util.promisify(fs.readFile); -/** - * Tries to asynchronously load a certificate from the given path. - * - * @param configuration - Either the PEM value or the path to the certificate. - * @param sendCertificateChain - Option to include x5c header for SubjectName and Issuer name authorization. - * @returns - The certificate parts, or `undefined` if the certificate could not be loaded. - * @internal - */ -async function parseCertificate(configuration, sendCertificateChain) { - const certificateParts = {}; - const certificate = configuration - .certificate; - const certificatePath = configuration - .certificatePath; - certificateParts.certificateContents = - certificate || (await readFileAsync(certificatePath, "utf8")); - if (sendCertificateChain) { - certificateParts.x5c = certificateParts.certificateContents; - } - const certificatePattern = /(-+BEGIN CERTIFICATE-+)(\n\r?|\r\n?)([A-Za-z0-9+/\n\r]+=*)(\n\r?|\r\n?)(-+END CERTIFICATE-+)/g; - const publicKeys = []; - // Match all possible certificates, in the order they are in the file. These will form the chain that is used for x5c - let match; - do { - match = certificatePattern.exec(certificateParts.certificateContents); - if (match) { - publicKeys.push(match[3]); - } - } while (match); - if (publicKeys.length === 0) { - throw new Error("The file at the specified path does not contain a PEM-encoded certificate."); - } - certificateParts.thumbprint = crypto.createHash("sha1") - .update(Buffer.from(publicKeys[0], "base64")) - .digest("hex") - .toUpperCase(); - return certificateParts; +function isStorageTelemetryPolicyFactory(factory) { + return factory.constructor.name === "TelemetryPolicyFactory"; } -/** - * MSAL client certificate client. Calls to MSAL's confidential application's `acquireTokenByClientCredential` during `doGetToken`. - * @internal - */ -class MsalClientCertificate extends MsalNode { - constructor(options) { - super(options); - this.requiresConfidential = true; - this.configuration = options.configuration; - this.sendCertificateChain = options.sendCertificateChain; - } - // Changing the MSAL configuration asynchronously - async init(options) { - try { - const parts = await parseCertificate(this.configuration, this.sendCertificateChain); - let privateKey; - if (this.configuration.certificatePassword !== undefined) { - const privateKeyObject = crypto.createPrivateKey({ - key: parts.certificateContents, - passphrase: this.configuration.certificatePassword, - format: "pem", - }); - privateKey = privateKeyObject - .export({ - format: "pem", - type: "pkcs8", - }) - .toString(); - } - else { - privateKey = parts.certificateContents; - } - this.msalConfig.auth.clientCertificate = { - thumbprint: parts.thumbprint, - privateKey: privateKey, - x5c: parts.x5c, - }; - } - catch (error) { - this.logger.info(formatError("", error)); - throw error; - } - return super.init(options); - } - async doGetToken(scopes, options = {}) { - try { - const clientCredReq = { - scopes, - correlationId: options.correlationId, - azureRegion: this.azureRegion, - authority: options.authority, - claims: options.claims, +function isInjectorPolicyFactory(factory) { + return factory.constructor.name === "InjectorPolicyFactory"; +} +function isCoreHttpPolicyFactory(factory) { + const knownPolicies = [ + "GenerateClientRequestIdPolicy", + "TracingPolicy", + "LogPolicy", + "ProxyPolicy", + "DisableResponseDecompressionPolicy", + "KeepAlivePolicy", + "DeserializationPolicy", + ]; + const mockHttpClient = { + sendRequest: async (request) => { + return { + request, + headers: request.headers.clone(), + status: 500, }; - const result = await this.getApp("confidential", options.enableCae).acquireTokenByClientCredential(clientCredReq); - // Even though we're providing the same default in memory persistence cache that we use for DeviceCodeCredential, - // The Client Credential flow does not return the account information from the authentication service, - // so each time getToken gets called, we will have to acquire a new token through the service. - return this.handleResult(scopes, this.clientId, result || undefined); - } - catch (err) { - throw this.handleError(scopes, err, options); - } - } + }, + }; + const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, + }; + const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions); + const policyName = policyInstance.constructor.name; + // bundlers sometimes add a custom suffix to the class name to make it unique + return knownPolicies.some((knownPolicyName) => { + return policyName.startsWith(knownPolicyName); + }); } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const credentialName$2 = "ClientCertificateCredential"; -const logger$8 = credentialLogger(credentialName$2); -/** - * Enables authentication to Microsoft Entra ID using a PEM-encoded - * certificate that is assigned to an App Registration. More information - * on how to configure certificate authentication can be found here: - * - * https://learn.microsoft.com/en-us/azure/active-directory/develop/active-directory-certificate-credentials#register-your-certificate-with-azure-ad +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -class ClientCertificateCredential { - constructor(tenantId, clientId, certificatePathOrConfiguration, options = {}) { - if (!tenantId || !clientId) { - throw new Error(`${credentialName$2}: tenantId and clientId are required parameters.`); - } - this.tenantId = tenantId; - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - const configuration = Object.assign({}, (typeof certificatePathOrConfiguration === "string" - ? { - certificatePath: certificatePathOrConfiguration, - } - : certificatePathOrConfiguration)); - const certificate = configuration - .certificate; - const certificatePath = configuration.certificatePath; - if (!configuration || !(certificate || certificatePath)) { - throw new Error(`${credentialName$2}: Provide either a PEM certificate in string form, or the path to that certificate in the filesystem. To troubleshoot, visit https://aka.ms/azsdk/js/identity/serviceprincipalauthentication/troubleshoot.`); - } - if (certificate && certificatePath) { - throw new Error(`${credentialName$2}: To avoid unexpected behaviors, providing both the contents of a PEM certificate and the path to a PEM certificate is forbidden. To troubleshoot, visit https://aka.ms/azsdk/js/identity/serviceprincipalauthentication/troubleshoot.`); - } - this.msalFlow = new MsalClientCertificate(Object.assign(Object.assign({}, options), { configuration, - logger: logger$8, - clientId, - tenantId, sendCertificateChain: options.sendCertificateChain, tokenCredentialOptions: options })); - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. - */ - async getToken(scopes, options = {}) { - return tracingClient.withSpan(`${credentialName$2}.getToken`, options, async (newOptions) => { - newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$8); - const arrayScopes = Array.isArray(scopes) ? scopes : [scopes]; - return this.msalFlow.getToken(arrayScopes, newOptions); - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * MSAL client secret client. Calls to MSAL's confidential application's `acquireTokenByClientCredential` during `doGetToken`. - * @internal - */ -class MsalClientSecret extends MsalNode { - constructor(options) { - super(options); - this.requiresConfidential = true; - this.msalConfig.auth.clientSecret = options.clientSecret; - } - async doGetToken(scopes, options = {}) { - try { - const result = await this.getApp("confidential", options.enableCae).acquireTokenByClientCredential({ - scopes, - correlationId: options.correlationId, - azureRegion: this.azureRegion, - authority: options.authority, - claims: options.claims, - }); - // The Client Credential flow does not return an account, - // so each time getToken gets called, we will have to acquire a new token through the service. - return this.handleResult(scopes, this.clientId, result || undefined); - } - catch (err) { - throw this.handleError(scopes, err, options); - } - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const logger$7 = credentialLogger("ClientSecretCredential"); -/** - * Enables authentication to Microsoft Entra ID using a client secret - * that was generated for an App Registration. More information on how - * to configure a client secret can be found here: - * - * https://learn.microsoft.com/azure/active-directory/develop/quickstart-configure-app-access-web-apis#add-credentials-to-your-web-application - * - */ -class ClientSecretCredential { - /** - * Creates an instance of the ClientSecretCredential with the details - * needed to authenticate against Microsoft Entra ID with a client - * secret. - * - * @param tenantId - The Microsoft Entra tenant (directory) ID. - * @param clientId - The client (application) ID of an App Registration in the tenant. - * @param clientSecret - A client secret that was generated for the App Registration. - * @param options - Options for configuring the client which makes the authentication request. - */ - constructor(tenantId, clientId, clientSecret, options = {}) { - if (!tenantId || !clientId || !clientSecret) { - throw new Error("ClientSecretCredential: tenantId, clientId, and clientSecret are required parameters. To troubleshoot, visit https://aka.ms/azsdk/js/identity/serviceprincipalauthentication/troubleshoot."); - } - this.tenantId = tenantId; - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - this.msalFlow = new MsalClientSecret(Object.assign(Object.assign({}, options), { logger: logger$7, - clientId, - tenantId, - clientSecret, tokenCredentialOptions: options })); - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. - */ - async getToken(scopes, options = {}) { - return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { - newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$7); - const arrayScopes = ensureScopes(scopes); - return this.msalFlow.getToken(arrayScopes, newOptions); - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * MSAL username and password client. Calls to the MSAL's public application's `acquireTokenByUsernamePassword` during `doGetToken`. - * @internal - */ -class MsalUsernamePassword extends MsalNode { - constructor(options) { - super(options); - this.username = options.username; - this.password = options.password; - } - async doGetToken(scopes, options) { - try { - const requestOptions = { - scopes, - username: this.username, - password: this.password, - correlationId: options === null || options === void 0 ? void 0 : options.correlationId, - authority: options === null || options === void 0 ? void 0 : options.authority, - claims: options === null || options === void 0 ? void 0 : options.claims, - }; - const result = await this.getApp("public", options === null || options === void 0 ? void 0 : options.enableCae).acquireTokenByUsernamePassword(requestOptions); - return this.handleResult(scopes, this.clientId, result || undefined); - } - catch (error) { - throw this.handleError(scopes, error, options); - } - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const logger$6 = credentialLogger("UsernamePasswordCredential"); -/** - * Enables authentication to Microsoft Entra ID with a user's - * username and password. This credential requires a high degree of - * trust so you should only use it when other, more secure credential - * types can't be used. - */ -class UsernamePasswordCredential { - /** - * Creates an instance of the UsernamePasswordCredential with the details - * needed to authenticate against Microsoft Entra ID with a username - * and password. - * - * @param tenantId - The Microsoft Entra tenant (directory). - * @param clientId - The client (application) ID of an App Registration in the tenant. - * @param username - The user account's e-mail address (user name). - * @param password - The user account's account password - * @param options - Options for configuring the client which makes the authentication request. - */ - constructor(tenantId, clientId, username, password, options = {}) { - if (!tenantId || !clientId || !username || !password) { - throw new Error("UsernamePasswordCredential: tenantId, clientId, username and password are required parameters. To troubleshoot, visit https://aka.ms/azsdk/js/identity/usernamepasswordcredential/troubleshoot."); - } - this.tenantId = tenantId; - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - this.msalFlow = new MsalUsernamePassword(Object.assign(Object.assign({}, options), { logger: logger$6, - clientId, - tenantId, - username, - password, tokenCredentialOptions: options || {} })); - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * If the user provided the option `disableAutomaticAuthentication`, - * once the token can't be retrieved silently, - * this method won't attempt to request user interaction to retrieve the token. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. - */ - async getToken(scopes, options = {}) { - return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { - newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$6); - const arrayScopes = ensureScopes(scopes); - return this.msalFlow.getToken(arrayScopes, newOptions); - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Contains the list of all supported environment variable names so that an - * appropriate error message can be generated when no credentials can be - * configured. - * - * @internal - */ -const AllSupportedEnvironmentVariables = [ - "AZURE_TENANT_ID", - "AZURE_CLIENT_ID", - "AZURE_CLIENT_SECRET", - "AZURE_CLIENT_CERTIFICATE_PATH", - "AZURE_CLIENT_CERTIFICATE_PASSWORD", - "AZURE_USERNAME", - "AZURE_PASSWORD", - "AZURE_ADDITIONALLY_ALLOWED_TENANTS", -]; -function getAdditionallyAllowedTenants() { - var _a; - const additionallyAllowedValues = (_a = process.env.AZURE_ADDITIONALLY_ALLOWED_TENANTS) !== null && _a !== void 0 ? _a : ""; - return additionallyAllowedValues.split(";"); -} -const credentialName$1 = "EnvironmentCredential"; -const logger$5 = credentialLogger(credentialName$1); -/** - * Enables authentication to Microsoft Entra ID using a client secret or certificate, or as a user - * with a username and password. - */ -class EnvironmentCredential { - /** - * Creates an instance of the EnvironmentCredential class and decides what credential to use depending on the available environment variables. - * - * Required environment variables: - * - `AZURE_TENANT_ID`: The Microsoft Entra tenant (directory) ID. - * - `AZURE_CLIENT_ID`: The client (application) ID of an App Registration in the tenant. - * - * If setting the AZURE_TENANT_ID, then you can also set the additionally allowed tenants - * - `AZURE_ADDITIONALLY_ALLOWED_TENANTS`: For multi-tenant applications, specifies additional tenants for which the credential may acquire tokens with a single semicolon delimited string. Use * to allow all tenants. - * - * Environment variables used for client credential authentication: - * - `AZURE_CLIENT_SECRET`: A client secret that was generated for the App Registration. - * - `AZURE_CLIENT_CERTIFICATE_PATH`: The path to a PEM certificate to use during the authentication, instead of the client secret. - * - `AZURE_CLIENT_CERTIFICATE_PASSWORD`: (optional) password for the certificate file. - * - * Alternatively, users can provide environment variables for username and password authentication: - * - `AZURE_USERNAME`: Username to authenticate with. - * - `AZURE_PASSWORD`: Password to authenticate with. - * - * If the environment variables required to perform the authentication are missing, a {@link CredentialUnavailableError} will be thrown. - * If the authentication fails, or if there's an unknown error, an {@link AuthenticationError} will be thrown. - * - * @param options - Options for configuring the client which makes the authentication request. - */ - constructor(options) { - // Keep track of any missing environment variables for error details - this._credential = undefined; - const assigned = processEnvVars(AllSupportedEnvironmentVariables).assigned.join(", "); - logger$5.info(`Found the following environment variables: ${assigned}`); - const tenantId = process.env.AZURE_TENANT_ID, clientId = process.env.AZURE_CLIENT_ID, clientSecret = process.env.AZURE_CLIENT_SECRET; - const additionallyAllowedTenantIds = getAdditionallyAllowedTenants(); - const newOptions = Object.assign(Object.assign({}, options), { additionallyAllowedTenantIds }); - if (tenantId) { - checkTenantId(logger$5, tenantId); - } - if (tenantId && clientId && clientSecret) { - logger$5.info(`Invoking ClientSecretCredential with tenant ID: ${tenantId}, clientId: ${clientId} and clientSecret: [REDACTED]`); - this._credential = new ClientSecretCredential(tenantId, clientId, clientSecret, newOptions); - return; - } - const certificatePath = process.env.AZURE_CLIENT_CERTIFICATE_PATH; - const certificatePassword = process.env.AZURE_CLIENT_CERTIFICATE_PASSWORD; - if (tenantId && clientId && certificatePath) { - logger$5.info(`Invoking ClientCertificateCredential with tenant ID: ${tenantId}, clientId: ${clientId} and certificatePath: ${certificatePath}`); - this._credential = new ClientCertificateCredential(tenantId, clientId, { certificatePath, certificatePassword }, newOptions); - return; - } - const username = process.env.AZURE_USERNAME; - const password = process.env.AZURE_PASSWORD; - if (tenantId && clientId && username && password) { - logger$5.info(`Invoking UsernamePasswordCredential with tenant ID: ${tenantId}, clientId: ${clientId} and username: ${username}`); - this._credential = new UsernamePasswordCredential(tenantId, clientId, username, password, newOptions); - } - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - Optional parameters. See {@link GetTokenOptions}. - */ - async getToken(scopes, options = {}) { - return tracingClient.withSpan(`${credentialName$1}.getToken`, options, async (newOptions) => { - if (this._credential) { - try { - const result = await this._credential.getToken(scopes, newOptions); - logger$5.getToken.info(formatSuccess(scopes)); - return result; - } - catch (err) { - const authenticationError = new AuthenticationError(400, { - error: `${credentialName$1} authentication failed. To troubleshoot, visit https://aka.ms/azsdk/js/identity/environmentcredential/troubleshoot.`, - error_description: err.message.toString().split("More details:").join(""), - }); - logger$5.getToken.info(formatError(scopes, authenticationError)); - throw authenticationError; - } - } - throw new CredentialUnavailableError(`${credentialName$1} is unavailable. No underlying credential could be used. To troubleshoot, visit https://aka.ms/azsdk/js/identity/environmentcredential/troubleshoot.`); - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Mockable reference to the Developer CLI credential cliCredentialFunctions - * @internal - */ -const developerCliCredentialInternals = { - /** - * @internal - */ - getSafeWorkingDir() { - if (process.platform === "win32") { - if (!process.env.SystemRoot) { - throw new Error("Azure Developer CLI credential expects a 'SystemRoot' environment variable"); - } - return process.env.SystemRoot; - } - else { - return "/bin"; - } +const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging", + }, + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics", + }, + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics", + }, + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule", + }, + }, + }, + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String", + }, + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + }, + }, + }, }, - /** - * Gets the access token from Azure Developer CLI - * @param scopes - The scopes to use when getting the token - * @internal - */ - async getAzdAccessToken(scopes, tenantId, timeout) { - let tenantSection = []; - if (tenantId) { - tenantSection = ["--tenant-id", tenantId]; - } - return new Promise((resolve, reject) => { - try { - child_process.execFile("azd", [ - "auth", - "token", - "--output", - "json", - ...scopes.reduce((previous, current) => previous.concat("--scope", current), []), - ...tenantSection, - ], { - cwd: developerCliCredentialInternals.getSafeWorkingDir(), - timeout, - }, (error, stdout, stderr) => { - resolve({ stdout, stderr, error }); - }); - } - catch (err) { - reject(err); - } - }); +}; +const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String", + }, + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean", + }, + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean", + }, + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean", + }, + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, + }, }, }; -const logger$4 = credentialLogger("AzureDeveloperCliCredential"); -/** - * Azure Developer CLI is a command-line interface tool that allows developers to create, manage, and deploy - * resources in Azure. It's built on top of the Azure CLI and provides additional functionality specific - * to Azure developers. It allows users to authenticate as a user and/or a service principal against - * Microsoft Entra ID. The - * AzureDeveloperCliCredential authenticates in a development environment and acquires a token on behalf of - * the logged-in user or service principal in the Azure Developer CLI. It acts as the Azure Developer CLI logged in user or - * service principal and executes an Azure CLI command underneath to authenticate the application against - * Microsoft Entra ID. - * - *

Configure AzureDeveloperCliCredential

- * - * To use this credential, the developer needs to authenticate locally in Azure Developer CLI using one of the - * commands below: - * - *
    - *
  1. Run "azd auth login" in Azure Developer CLI to authenticate interactively as a user.
  2. - *
  3. Run "azd auth login --client-id clientID --client-secret clientSecret - * --tenant-id tenantID" to authenticate as a service principal.
  4. - *
- * - * You may need to repeat this process after a certain time period, depending on the refresh token validity in your - * organization. Generally, the refresh token validity period is a few weeks to a few months. - * AzureDeveloperCliCredential will prompt you to sign in again. - */ -class AzureDeveloperCliCredential { - /** - * Creates an instance of the {@link AzureDeveloperCliCredential}. - * - * To use this credential, ensure that you have already logged - * in via the 'azd' tool using the command "azd auth login" from the commandline. - * - * @param options - Options, to optionally allow multi-tenant requests. - */ - constructor(options) { - if (options === null || options === void 0 ? void 0 : options.tenantId) { - checkTenantId(logger$4, options === null || options === void 0 ? void 0 : options.tenantId); - this.tenantId = options === null || options === void 0 ? void 0 : options.tenantId; - } - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - this.timeout = options === null || options === void 0 ? void 0 : options.processTimeoutInMs; - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. - */ - async getToken(scopes, options = {}) { - const tenantId = processMultiTenantRequest(this.tenantId, options, this.additionallyAllowedTenantIds); - if (tenantId) { - checkTenantId(logger$4, tenantId); - } - let scopeList; - if (typeof scopes === "string") { - scopeList = [scopes]; - } - else { - scopeList = scopes; - } - logger$4.getToken.info(`Using the scopes ${scopes}`); - return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async () => { - var _a, _b, _c, _d; - try { - scopeList.forEach((scope) => { - ensureValidScopeForDevTimeCreds(scope, logger$4); - }); - const obj = await developerCliCredentialInternals.getAzdAccessToken(scopeList, tenantId, this.timeout); - const isNotLoggedInError = ((_a = obj.stderr) === null || _a === void 0 ? void 0 : _a.match("not logged in, run `azd login` to login")) || - ((_b = obj.stderr) === null || _b === void 0 ? void 0 : _b.match("not logged in, run `azd auth login` to login")); - const isNotInstallError = ((_c = obj.stderr) === null || _c === void 0 ? void 0 : _c.match("azd:(.*)not found")) || - ((_d = obj.stderr) === null || _d === void 0 ? void 0 : _d.startsWith("'azd' is not recognized")); - if (isNotInstallError || (obj.error && obj.error.code === "ENOENT")) { - const error = new CredentialUnavailableError("Azure Developer CLI couldn't be found. To mitigate this issue, see the troubleshooting guidelines at https://aka.ms/azsdk/js/identity/azdevclicredential/troubleshoot."); - logger$4.getToken.info(formatError(scopes, error)); - throw error; - } - if (isNotLoggedInError) { - const error = new CredentialUnavailableError("Please run 'azd auth login' from a command prompt to authenticate before using this credential. For more information, see the troubleshooting guidelines at https://aka.ms/azsdk/js/identity/azdevclicredential/troubleshoot."); - logger$4.getToken.info(formatError(scopes, error)); - throw error; - } - try { - const resp = JSON.parse(obj.stdout); - logger$4.getToken.info(formatSuccess(scopes)); - return { - token: resp.token, - expiresOnTimestamp: new Date(resp.expiresOn).getTime(), - }; - } - catch (e) { - if (obj.stderr) { - throw new CredentialUnavailableError(obj.stderr); - } - throw e; - } - } - catch (err) { - const error = err.name === "CredentialUnavailableError" - ? err - : new CredentialUnavailableError(err.message || "Unknown error while trying to retrieve the access token"); - logger$4.getToken.info(formatError(scopes, error)); - throw error; - } - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * A shim around ManagedIdentityCredential that adapts it to accept - * `DefaultAzureCredentialOptions`. - * - * @internal - */ -class DefaultManagedIdentityCredential extends ManagedIdentityCredential { - // Constructor overload with just the other default options - // Last constructor overload with Union of all options not required since the above two constructor overloads have optional properties - constructor(options) { - var _a, _b, _c; - const managedIdentityClientId = (_a = options === null || options === void 0 ? void 0 : options.managedIdentityClientId) !== null && _a !== void 0 ? _a : process.env.AZURE_CLIENT_ID; - const workloadIdentityClientId = (_b = options === null || options === void 0 ? void 0 : options.workloadIdentityClientId) !== null && _b !== void 0 ? _b : managedIdentityClientId; - const managedResourceId = options === null || options === void 0 ? void 0 : options.managedIdentityResourceId; - const workloadFile = process.env.AZURE_FEDERATED_TOKEN_FILE; - const tenantId = (_c = options === null || options === void 0 ? void 0 : options.tenantId) !== null && _c !== void 0 ? _c : process.env.AZURE_TENANT_ID; - // ManagedIdentityCredential throws if both the resourceId and the clientId are provided. - if (managedResourceId) { - const managedIdentityResourceIdOptions = Object.assign(Object.assign({}, options), { resourceId: managedResourceId }); - super(managedIdentityResourceIdOptions); - } - else if (workloadFile && workloadIdentityClientId) { - const workloadIdentityCredentialOptions = Object.assign(Object.assign({}, options), { tenantId: tenantId }); - super(workloadIdentityClientId, workloadIdentityCredentialOptions); - } - else if (managedIdentityClientId) { - const managedIdentityClientOptions = Object.assign(Object.assign({}, options), { clientId: managedIdentityClientId }); - super(managedIdentityClientOptions); - } - else { - super(options); - } - } -} -/** - * A shim around WorkloadIdentityCredential that adapts it to accept - * `DefaultAzureCredentialOptions`. - * - * @internal - */ -class DefaultWorkloadIdentityCredential extends WorkloadIdentityCredential { - // Constructor overload with just the other default options - // Last constructor overload with Union of all options not required since the above two constructor overloads have optional properties - constructor(options) { - var _a, _b, _c; - const managedIdentityClientId = (_a = options === null || options === void 0 ? void 0 : options.managedIdentityClientId) !== null && _a !== void 0 ? _a : process.env.AZURE_CLIENT_ID; - const workloadIdentityClientId = (_b = options === null || options === void 0 ? void 0 : options.workloadIdentityClientId) !== null && _b !== void 0 ? _b : managedIdentityClientId; - const workloadFile = process.env.AZURE_FEDERATED_TOKEN_FILE; - const tenantId = (_c = options === null || options === void 0 ? void 0 : options.tenantId) !== null && _c !== void 0 ? _c : process.env.AZURE_TENANT_ID; - if (workloadFile && workloadIdentityClientId) { - const workloadIdentityCredentialOptions = Object.assign(Object.assign({}, options), { tenantId, clientId: workloadIdentityClientId, tokenFilePath: workloadFile }); - super(workloadIdentityCredentialOptions); - } - else if (tenantId) { - const workloadIdentityClientTenantOptions = Object.assign(Object.assign({}, options), { tenantId }); - super(workloadIdentityClientTenantOptions); - } - else { - super(options); - } - } -} -class DefaultAzureDeveloperCliCredential extends AzureDeveloperCliCredential { - constructor(options) { - super(Object.assign({ processTimeoutInMs: options === null || options === void 0 ? void 0 : options.processTimeoutInMs }, options)); - } -} -class DefaultAzureCliCredential extends AzureCliCredential { - constructor(options) { - super(Object.assign({ processTimeoutInMs: options === null || options === void 0 ? void 0 : options.processTimeoutInMs }, options)); - } -} -class DefaultAzurePowershellCredential extends AzurePowerShellCredential { - constructor(options) { - super(Object.assign({ processTimeoutInMs: options === null || options === void 0 ? void 0 : options.processTimeoutInMs }, options)); - } -} -const defaultCredentials = [ - EnvironmentCredential, - DefaultWorkloadIdentityCredential, - DefaultManagedIdentityCredential, - DefaultAzureCliCredential, - DefaultAzurePowershellCredential, - DefaultAzureDeveloperCliCredential, -]; -/** - * Provides a default {@link ChainedTokenCredential} configuration that should - * work for most applications that use the Azure SDK. - */ -class DefaultAzureCredential extends ChainedTokenCredential { - constructor(options) { - super(...defaultCredentials.map((ctor) => new ctor(options))); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * A call to open(), but mockable - * @internal - */ -const interactiveBrowserMockable = { - open, +const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + days: { + constraints: { + InclusiveMinimum: 1, + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number", + }, + }, + }, + }, }; -/** - * This MSAL client sets up a web server to listen for redirect callbacks, then calls to the MSAL's public application's `acquireTokenByDeviceCode` during `doGetToken` - * to trigger the authentication flow, and then respond based on the values obtained from the redirect callback - * @internal - */ -class MsalOpenBrowser extends MsalNode { - constructor(options) { - var _a, _b; - super(options); - this.loginHint = options.loginHint; - this.errorTemplate = (_a = options.browserCustomizationOptions) === null || _a === void 0 ? void 0 : _a.errorMessage; - this.successTemplate = (_b = options.browserCustomizationOptions) === null || _b === void 0 ? void 0 : _b.successMessage; - this.logger = credentialLogger("Node.js MSAL Open Browser"); - } - async doGetToken(scopes, options) { - var _a; - try { - const interactiveRequest = { - openBrowser: async (url) => { - await interactiveBrowserMockable.open(url, { wait: true, newInstance: true }); +const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String", }, - scopes, - authority: options === null || options === void 0 ? void 0 : options.authority, - claims: options === null || options === void 0 ? void 0 : options.claims, - correlationId: options === null || options === void 0 ? void 0 : options.correlationId, - loginHint: this.loginHint, - errorTemplate: this.errorTemplate, - successTemplate: this.successTemplate, - }; - if (hasNativeBroker() && this.enableBroker) { - this.logger.verbose("Authentication will resume through the broker"); - if (this.parentWindowHandle) { - interactiveRequest.windowHandle = Buffer.from(this.parentWindowHandle); - } - else { - // error should have been thrown from within the constructor of InteractiveBrowserCredential - this.logger.warning("Parent window handle is not specified for the broker. This may cause unexpected behavior. Please provide the parentWindowHandle."); - } - if (this.enableMsaPassthrough) { - ((_a = interactiveRequest.tokenQueryParameters) !== null && _a !== void 0 ? _a : (interactiveRequest.tokenQueryParameters = {}))["msal_request_type"] = - "consumer_passthrough"; - } - } - if (hasNativeBroker() && !this.enableBroker) { - this.logger.verbose("Authentication will resume normally without the broker, since it's not enabled"); - } - const result = await this.getApp("public", options === null || options === void 0 ? void 0 : options.enableCae).acquireTokenInteractive(interactiveRequest); - if (result.fromNativeBroker) { - this.logger.verbose(`This result is returned from native broker`); - } - return this.handleResult(scopes, this.clientId, result || undefined); - } - catch (err) { - throw this.handleError(scopes, err, options); - } - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const logger$3 = credentialLogger("InteractiveBrowserCredential"); -/** - * Enables authentication to Microsoft Entra ID inside of the web browser - * using the interactive login flow. - */ -class InteractiveBrowserCredential { - /** - * Creates an instance of InteractiveBrowserCredential with the details needed. - * - * This credential uses the [Authorization Code Flow](https://learn.microsoft.com/azure/active-directory/develop/v2-oauth2-auth-code-flow). - * On Node.js, it will open a browser window while it listens for a redirect response from the authentication service. - * On browsers, it authenticates via popups. The `loginStyle` optional parameter can be set to `redirect` to authenticate by redirecting the user to an Azure secure login page, which then will redirect the user back to the web application where the authentication started. - * - * For Node.js, if a `clientId` is provided, the Microsoft Entra application will need to be configured to have a "Mobile and desktop applications" redirect endpoint. - * Follow our guide on [setting up Redirect URIs for Desktop apps that calls to web APIs](https://learn.microsoft.com/azure/active-directory/develop/scenario-desktop-app-registration#redirect-uris). - * - * @param options - Options for configuring the client which makes the authentication requests. - */ - constructor(options) { - var _a, _b, _c; - const redirectUri = typeof options.redirectUri === "function" - ? options.redirectUri() - : options.redirectUri || "http://localhost"; - this.tenantId = options === null || options === void 0 ? void 0 : options.tenantId; - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - const ibcNodeOptions = options; - if ((_a = ibcNodeOptions === null || ibcNodeOptions === void 0 ? void 0 : ibcNodeOptions.brokerOptions) === null || _a === void 0 ? void 0 : _a.enabled) { - if (!((_b = ibcNodeOptions === null || ibcNodeOptions === void 0 ? void 0 : ibcNodeOptions.brokerOptions) === null || _b === void 0 ? void 0 : _b.parentWindowHandle)) { - throw new Error("In order to do WAM authentication, `parentWindowHandle` under `brokerOptions` is a required parameter"); - } - else { - this.msalFlow = new MsalOpenBrowser(Object.assign(Object.assign({}, options), { tokenCredentialOptions: options, logger: logger$3, - redirectUri, browserCustomizationOptions: ibcNodeOptions === null || ibcNodeOptions === void 0 ? void 0 : ibcNodeOptions.browserCustomizationOptions, brokerOptions: { - enabled: true, - parentWindowHandle: ibcNodeOptions.brokerOptions.parentWindowHandle, - legacyEnableMsaPassthrough: (_c = ibcNodeOptions.brokerOptions) === null || _c === void 0 ? void 0 : _c.legacyEnableMsaPassthrough, - } })); - } - } - else { - this.msalFlow = new MsalOpenBrowser(Object.assign(Object.assign({}, options), { tokenCredentialOptions: options, logger: logger$3, - redirectUri, browserCustomizationOptions: ibcNodeOptions === null || ibcNodeOptions === void 0 ? void 0 : ibcNodeOptions.browserCustomizationOptions })); - } - this.disableAutomaticAuthentication = options === null || options === void 0 ? void 0 : options.disableAutomaticAuthentication; - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * If the user provided the option `disableAutomaticAuthentication`, - * once the token can't be retrieved silently, - * this method won't attempt to request user interaction to retrieve the token. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. - */ - async getToken(scopes, options = {}) { - return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { - newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$3); - const arrayScopes = ensureScopes(scopes); - return this.msalFlow.getToken(arrayScopes, Object.assign(Object.assign({}, newOptions), { disableAutomaticAuthentication: this.disableAutomaticAuthentication })); - }); - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * If the token can't be retrieved silently, this method will require user interaction to retrieve the token. - * - * On Node.js, this credential has [Proof Key for Code Exchange (PKCE)](https://datatracker.ietf.org/doc/html/rfc7636) enabled by default. - * PKCE is a security feature that mitigates authentication code interception attacks. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. - */ - async authenticate(scopes, options = {}) { - return tracingClient.withSpan(`${this.constructor.name}.authenticate`, options, async (newOptions) => { - const arrayScopes = ensureScopes(scopes); - await this.msalFlow.getToken(arrayScopes, newOptions); - return this.msalFlow.getActiveAccount(); - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * MSAL device code client. Calls to the MSAL's public application's `acquireTokenByDeviceCode` during `doGetToken`. - * @internal - */ -class MsalDeviceCode extends MsalNode { - constructor(options) { - super(options); - this.userPromptCallback = options.userPromptCallback; - } - async doGetToken(scopes, options) { - try { - const requestOptions = { - deviceCodeCallback: this.userPromptCallback, - scopes, - cancel: false, - correlationId: options === null || options === void 0 ? void 0 : options.correlationId, - authority: options === null || options === void 0 ? void 0 : options.authority, - claims: options === null || options === void 0 ? void 0 : options.claims, - }; - const promise = this.getApp("public", options === null || options === void 0 ? void 0 : options.enableCae).acquireTokenByDeviceCode(requestOptions); - const deviceResponse = await this.withCancellation(promise, options === null || options === void 0 ? void 0 : options.abortSignal, () => { - requestOptions.cancel = true; - }); - return this.handleResult(scopes, this.clientId, deviceResponse || undefined); - } - catch (error) { - throw this.handleError(scopes, error, options); - } - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const logger$2 = credentialLogger("DeviceCodeCredential"); -/** - * Method that logs the user code from the DeviceCodeCredential. - * @param deviceCodeInfo - The device code. - */ -function defaultDeviceCodePromptCallback(deviceCodeInfo) { - console.log(deviceCodeInfo.message); -} -/** - * Enables authentication to Microsoft Entra ID using a device code - * that the user can enter into https://microsoft.com/devicelogin. - */ -class DeviceCodeCredential { - /** - * Creates an instance of DeviceCodeCredential with the details needed - * to initiate the device code authorization flow with Microsoft Entra ID. - * - * A message will be logged, giving users a code that they can use to authenticate once they go to https://microsoft.com/devicelogin - * - * Developers can configure how this message is shown by passing a custom `userPromptCallback`: - * - * ```js - * const credential = new DeviceCodeCredential({ - * tenantId: env.AZURE_TENANT_ID, - * clientId: env.AZURE_CLIENT_ID, - * userPromptCallback: (info) => { - * console.log("CUSTOMIZED PROMPT CALLBACK", info.message); - * } - * }); - * ``` - * - * @param options - Options for configuring the client which makes the authentication requests. - */ - constructor(options) { - this.tenantId = options === null || options === void 0 ? void 0 : options.tenantId; - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - this.msalFlow = new MsalDeviceCode(Object.assign(Object.assign({}, options), { logger: logger$2, userPromptCallback: (options === null || options === void 0 ? void 0 : options.userPromptCallback) || defaultDeviceCodePromptCallback, tokenCredentialOptions: options || {} })); - this.disableAutomaticAuthentication = options === null || options === void 0 ? void 0 : options.disableAutomaticAuthentication; - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * If the user provided the option `disableAutomaticAuthentication`, - * once the token can't be retrieved silently, - * this method won't attempt to request user interaction to retrieve the token. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. - */ - async getToken(scopes, options = {}) { - return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { - newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger$2); - const arrayScopes = ensureScopes(scopes); - return this.msalFlow.getToken(arrayScopes, Object.assign(Object.assign({}, newOptions), { disableAutomaticAuthentication: this.disableAutomaticAuthentication })); - }); - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * If the token can't be retrieved silently, this method will require user interaction to retrieve the token. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. - */ - async authenticate(scopes, options = {}) { - return tracingClient.withSpan(`${this.constructor.name}.authenticate`, options, async (newOptions) => { - const arrayScopes = Array.isArray(scopes) ? scopes : [scopes]; - await this.msalFlow.getToken(arrayScopes, newOptions); - return this.msalFlow.getActiveAccount(); - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * This MSAL client sets up a web server to listen for redirect callbacks, then calls to the MSAL's public application's `acquireTokenByDeviceCode` during `doGetToken` - * to trigger the authentication flow, and then respond based on the values obtained from the redirect callback - * @internal - */ -class MsalAuthorizationCode extends MsalNode { - constructor(options) { - super(options); - this.logger = credentialLogger("Node.js MSAL Authorization Code"); - this.redirectUri = options.redirectUri; - this.authorizationCode = options.authorizationCode; - if (options.clientSecret) { - this.msalConfig.auth.clientSecret = options.clientSecret; - } - } - async getAuthCodeUrl(options) { - await this.init(); - return this.getApp("confidentialFirst", options.enableCae).getAuthCodeUrl({ - scopes: options.scopes, - redirectUri: options.redirectUri, - }); - } - async doGetToken(scopes, options) { - try { - const result = await this.getApp("confidentialFirst", options === null || options === void 0 ? void 0 : options.enableCae).acquireTokenByCode({ - scopes, - redirectUri: this.redirectUri, - code: this.authorizationCode, - correlationId: options === null || options === void 0 ? void 0 : options.correlationId, - authority: options === null || options === void 0 ? void 0 : options.authority, - claims: options === null || options === void 0 ? void 0 : options.claims, - }); - // The Client Credential flow does not return an account, - // so each time getToken gets called, we will have to acquire a new token through the service. - return this.handleResult(scopes, this.clientId, result || undefined); - } - catch (err) { - throw this.handleError(scopes, err, options); - } - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const logger$1 = credentialLogger("AuthorizationCodeCredential"); -/** - * Enables authentication to Microsoft Entra ID using an authorization code - * that was obtained through the authorization code flow, described in more detail - * in the Microsoft Entra ID documentation: - * - * https://learn.microsoft.com/azure/active-directory/develop/v2-oauth2-auth-code-flow - */ -class AuthorizationCodeCredential { - /** - * @hidden - * @internal - */ - constructor(tenantId, clientId, clientSecretOrAuthorizationCode, authorizationCodeOrRedirectUri, redirectUriOrOptions, options) { - checkTenantId(logger$1, tenantId); - let clientSecret = clientSecretOrAuthorizationCode; - if (typeof redirectUriOrOptions === "string") { - // the clientId+clientSecret constructor - this.authorizationCode = authorizationCodeOrRedirectUri; - this.redirectUri = redirectUriOrOptions; - // in this case, options are good as they come - } - else { - // clientId only - this.authorizationCode = clientSecretOrAuthorizationCode; - this.redirectUri = authorizationCodeOrRedirectUri; - clientSecret = undefined; - options = redirectUriOrOptions; - } - // TODO: Validate tenant if provided - this.tenantId = tenantId; - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(options === null || options === void 0 ? void 0 : options.additionallyAllowedTenants); - this.msalFlow = new MsalAuthorizationCode(Object.assign(Object.assign({}, options), { clientSecret, - clientId, - tenantId, tokenCredentialOptions: options || {}, logger: logger$1, redirectUri: this.redirectUri, authorizationCode: this.authorizationCode })); - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure any requests this - * TokenCredential implementation might make. - */ - async getToken(scopes, options = {}) { - return tracingClient.withSpan(`${this.constructor.name}.getToken`, options, async (newOptions) => { - const tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds); - newOptions.tenantId = tenantId; - const arrayScopes = ensureScopes(scopes); - return this.msalFlow.getToken(arrayScopes, Object.assign(Object.assign({}, newOptions), { disableAutomaticAuthentication: this.disableAutomaticAuthentication })); - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * MSAL on behalf of flow. Calls to MSAL's confidential application's `acquireTokenOnBehalfOf` during `doGetToken`. - * @internal - */ -class MsalOnBehalfOf extends MsalNode { - constructor(options) { - super(options); - this.logger.info("Initialized MSAL's On-Behalf-Of flow"); - this.requiresConfidential = true; - this.userAssertionToken = options.userAssertionToken; - this.certificatePath = options.certificatePath; - this.sendCertificateChain = options.sendCertificateChain; - this.clientSecret = options.clientSecret; - } - // Changing the MSAL configuration asynchronously - async init(options) { - if (this.certificatePath) { - try { - const parts = await parseCertificate({ certificatePath: this.certificatePath }, this.sendCertificateChain); - this.msalConfig.auth.clientCertificate = { - thumbprint: parts.thumbprint, - privateKey: parts.certificateContents, - x5c: parts.x5c, - }; - } - catch (error) { - this.logger.info(formatError("", error)); - throw error; - } - } - else { - this.msalConfig.auth.clientSecret = this.clientSecret; - } - return super.init(options); - } - async doGetToken(scopes, options = {}) { - try { - const result = await this.getApp("confidential", options.enableCae).acquireTokenOnBehalfOf({ - scopes, - correlationId: options.correlationId, - authority: options.authority, - claims: options.claims, - oboAssertion: this.userAssertionToken, - }); - return this.handleResult(scopes, this.clientId, result || undefined); - } - catch (err) { - throw this.handleError(scopes, err, options); - } - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const credentialName = "OnBehalfOfCredential"; -const logger = credentialLogger(credentialName); -/** - * Enables authentication to Microsoft Entra ID using the [On Behalf Of flow](https://learn.microsoft.com/azure/active-directory/develop/v2-oauth2-on-behalf-of-flow). - */ -class OnBehalfOfCredential { - constructor(options) { - this.options = options; - const { clientSecret } = options; - const { certificatePath } = options; - const { tenantId, clientId, userAssertionToken, additionallyAllowedTenants: additionallyAllowedTenantIds, } = options; - if (!tenantId || !clientId || !(clientSecret || certificatePath) || !userAssertionToken) { - throw new Error(`${credentialName}: tenantId, clientId, clientSecret (or certificatePath) and userAssertionToken are required parameters.`); - } - this.tenantId = tenantId; - this.additionallyAllowedTenantIds = resolveAdditionallyAllowedTenantIds(additionallyAllowedTenantIds); - this.msalFlow = new MsalOnBehalfOf(Object.assign(Object.assign({}, this.options), { logger, tokenCredentialOptions: this.options })); - } - /** - * Authenticates with Microsoft Entra ID and returns an access token if successful. - * If authentication fails, a {@link CredentialUnavailableError} will be thrown with the details of the failure. - * - * @param scopes - The list of scopes for which the token will have access. - * @param options - The options used to configure the underlying network requests. - */ - async getToken(scopes, options = {}) { - return tracingClient.withSpan(`${credentialName}.getToken`, options, async (newOptions) => { - newOptions.tenantId = processMultiTenantRequest(this.tenantId, newOptions, this.additionallyAllowedTenantIds, logger); - const arrayScopes = ensureScopes(scopes); - return this.msalFlow.getToken(arrayScopes, newOptions); - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Returns a new instance of the {@link DefaultAzureCredential}. - */ -function getDefaultAzureCredential() { - return new DefaultAzureCredential(); -} - -exports.AggregateAuthenticationError = AggregateAuthenticationError; -exports.AggregateAuthenticationErrorName = AggregateAuthenticationErrorName; -exports.AuthenticationError = AuthenticationError; -exports.AuthenticationErrorName = AuthenticationErrorName; -exports.AuthenticationRequiredError = AuthenticationRequiredError; -exports.AuthorizationCodeCredential = AuthorizationCodeCredential; -exports.AzureCliCredential = AzureCliCredential; -exports.AzureDeveloperCliCredential = AzureDeveloperCliCredential; -exports.AzurePowerShellCredential = AzurePowerShellCredential; -exports.ChainedTokenCredential = ChainedTokenCredential; -exports.ClientAssertionCredential = ClientAssertionCredential; -exports.ClientCertificateCredential = ClientCertificateCredential; -exports.ClientSecretCredential = ClientSecretCredential; -exports.CredentialUnavailableError = CredentialUnavailableError; -exports.CredentialUnavailableErrorName = CredentialUnavailableErrorName; -exports.DefaultAzureCredential = DefaultAzureCredential; -exports.DeviceCodeCredential = DeviceCodeCredential; -exports.EnvironmentCredential = EnvironmentCredential; -exports.InteractiveBrowserCredential = InteractiveBrowserCredential; -exports.ManagedIdentityCredential = ManagedIdentityCredential; -exports.OnBehalfOfCredential = OnBehalfOfCredential; -exports.UsernamePasswordCredential = UsernamePasswordCredential; -exports.VisualStudioCodeCredential = VisualStudioCodeCredential; -exports.WorkloadIdentityCredential = WorkloadIdentityCredential; -exports.deserializeAuthenticationRecord = deserializeAuthenticationRecord; -exports.getDefaultAzureCredential = getDefaultAzureCredential; -exports.logger = logger$n; -exports.serializeAuthenticationRecord = serializeAuthenticationRecord; -exports.useIdentityPlugin = useIdentityPlugin; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 3233: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var os = __nccwpck_require__(22037); -var util = __nccwpck_require__(73837); - -function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } - -var util__default = /*#__PURE__*/_interopDefaultLegacy(util); - -// Copyright (c) Microsoft Corporation. -function log(message, ...args) { - process.stderr.write(`${util__default["default"].format(message, ...args)}${os.EOL}`); -} - -// Copyright (c) Microsoft Corporation. -const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; -let enabledString; -let enabledNamespaces = []; -let skippedNamespaces = []; -const debuggers = []; -if (debugEnvVariable) { - enable(debugEnvVariable); -} -const debugObj = Object.assign((namespace) => { - return createDebugger(namespace); -}, { - enable, - enabled, - disable, - log, -}); -function enable(namespaces) { - enabledString = namespaces; - enabledNamespaces = []; - skippedNamespaces = []; - const wildcard = /\*/g; - const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); - for (const ns of namespaceList) { - if (ns.startsWith("-")) { - skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); - } - else { - enabledNamespaces.push(new RegExp(`^${ns}$`)); - } - } - for (const instance of debuggers) { - instance.enabled = enabled(instance.namespace); - } -} -function enabled(namespace) { - if (namespace.endsWith("*")) { - return true; - } - for (const skipped of skippedNamespaces) { - if (skipped.test(namespace)) { - return false; - } - } - for (const enabledNamespace of enabledNamespaces) { - if (enabledNamespace.test(namespace)) { - return true; - } - } - return false; -} -function disable() { - const result = enabledString || ""; - enable(""); - return result; -} -function createDebugger(namespace) { - const newDebugger = Object.assign(debug, { - enabled: enabled(namespace), - destroy, - log: debugObj.log, - namespace, - extend, - }); - function debug(...args) { - if (!newDebugger.enabled) { - return; - } - if (args.length > 0) { - args[0] = `${namespace} ${args[0]}`; - } - newDebugger.log(...args); - } - debuggers.push(newDebugger); - return newDebugger; -} -function destroy() { - const index = debuggers.indexOf(this); - if (index >= 0) { - debuggers.splice(index, 1); - return true; - } - return false; -} -function extend(namespace) { - const newDebugger = createDebugger(`${this.namespace}:${namespace}`); - newDebugger.log = this.log; - return newDebugger; -} -var debug = debugObj; - -// Copyright (c) Microsoft Corporation. -const registeredLoggers = new Set(); -const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; -let azureLogLevel; -/** - * The AzureLogger provides a mechanism for overriding where logs are output to. - * By default, logs are sent to stderr. - * Override the `log` method to redirect logs to another location. - */ -const AzureLogger = debug("azure"); -AzureLogger.log = (...args) => { - debug.log(...args); + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean", + }, + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, + }, + }, }; -const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; -if (logLevelFromEnv) { - // avoid calling setLogLevel because we don't want a mis-set environment variable to crash - if (isAzureLogLevel(logLevelFromEnv)) { - setLogLevel(logLevelFromEnv); - } - else { - console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); - } -} -/** - * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. - * @param level - The log level to enable for logging. - * Options from most verbose to least verbose are: - * - verbose - * - info - * - warning - * - error - */ -function setLogLevel(level) { - if (level && !isAzureLogLevel(level)) { - throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); - } - azureLogLevel = level; - const enabledNamespaces = []; - for (const logger of registeredLoggers) { - if (shouldEnable(logger)) { - enabledNamespaces.push(logger.namespace); - } - } - debug.enable(enabledNamespaces.join(",")); -} -/** - * Retrieves the currently specified log level. - */ -function getLogLevel() { - return azureLogLevel; -} -const levelMap = { - verbose: 400, - info: 300, - warning: 200, - error: 100, -}; -/** - * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. - * @param namespace - The name of the SDK package. - * @hidden - */ -function createClientLogger(namespace) { - const clientRootLogger = AzureLogger.extend(namespace); - patchLogMethod(AzureLogger, clientRootLogger); - return { - error: createLogger(clientRootLogger, "error"), - warning: createLogger(clientRootLogger, "warning"), - info: createLogger(clientRootLogger, "info"), - verbose: createLogger(clientRootLogger, "verbose"), - }; -} -function patchLogMethod(parent, child) { - child.log = (...args) => { - parent.log(...args); - }; -} -function createLogger(parent, level) { - const logger = Object.assign(parent.extend(level), { - level, - }); - patchLogMethod(parent, logger); - if (shouldEnable(logger)) { - const enabledNamespaces = debug.disable(); - debug.enable(enabledNamespaces + "," + logger.namespace); - } - registeredLoggers.add(logger); - return logger; -} -function shouldEnable(logger) { - return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]); -} -function isAzureLogLevel(logLevel) { - return AZURE_LOG_LEVELS.includes(logLevel); -} - -exports.AzureLogger = AzureLogger; -exports.createClientLogger = createClientLogger; -exports.getLogLevel = getLogLevel; -exports.setLogLevel = setLogLevel; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 8786: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); - -var _v = _interopRequireDefault(__nccwpck_require__(12040)); - -var _v2 = _interopRequireDefault(__nccwpck_require__(86856)); - -var _v3 = _interopRequireDefault(__nccwpck_require__(3661)); - -var _v4 = _interopRequireDefault(__nccwpck_require__(45233)); - -var _nil = _interopRequireDefault(__nccwpck_require__(56619)); - -var _version = _interopRequireDefault(__nccwpck_require__(84721)); - -var _validate = _interopRequireDefault(__nccwpck_require__(8392)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(82127)); - -var _parse = _interopRequireDefault(__nccwpck_require__(20115)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/***/ }), - -/***/ 99057: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('md5').update(bytes).digest(); -} - -var _default = md5; -exports["default"] = _default; - -/***/ }), - -/***/ 56619: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; - -/***/ }), - -/***/ 20115: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(8392)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ - - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ - - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ - - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ - - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) - - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} - -var _default = parse; -exports["default"] = _default; - -/***/ }), - -/***/ 61134: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; - -/***/ }), - -/***/ 58634: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate - -let poolPtr = rnds8Pool.length; - -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); - - poolPtr = 0; - } - - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} - -/***/ }), - -/***/ 4764: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('sha1').update(bytes).digest(); -} - -var _default = sha1; -exports["default"] = _default; - -/***/ }), - -/***/ 82127: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(8392)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; - -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); -} - -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields - - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); - } - - return uuid; -} - -var _default = stringify; -exports["default"] = _default; - -/***/ }), - -/***/ 12040: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(58634)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(82127)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; - -let _clockseq; // Previous uuid creation time - - -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); - - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } - - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - - - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested - - - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` - - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` - - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - - b[i++] = clockseq & 0xff; // `node` - - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf || (0, _stringify.default)(b); -} - -var _default = v1; -exports["default"] = _default; - -/***/ }), - -/***/ 86856: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(40432)); - -var _md = _interopRequireDefault(__nccwpck_require__(99057)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; - -/***/ }), - -/***/ 40432: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; - -var _stringify = _interopRequireDefault(__nccwpck_require__(82127)); - -var _parse = _interopRequireDefault(__nccwpck_require__(20115)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape - - const bytes = []; - - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } - - return bytes; -} - -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); - } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); - } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } - - return buf; - } - - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support - - - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} - -/***/ }), - -/***/ 3661: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(58634)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(82127)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function v4(options, buf, offset) { - options = options || {}; - - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - - - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } - - return buf; - } - - return (0, _stringify.default)(rnds); -} - -var _default = v4; -exports["default"] = _default; - -/***/ }), - -/***/ 45233: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(40432)); - -var _sha = _interopRequireDefault(__nccwpck_require__(4764)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; - -/***/ }), - -/***/ 8392: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _regex = _interopRequireDefault(__nccwpck_require__(61134)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); -} - -var _default = validate; -exports["default"] = _default; - -/***/ }), - -/***/ 84721: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(8392)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - return parseInt(uuid.substr(14, 1), 16); -} - -var _default = version; -exports["default"] = _default; - -/***/ }), - -/***/ 84100: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var coreHttp = __nccwpck_require__(24607); -var tslib = __nccwpck_require__(70890); -var coreTracing = __nccwpck_require__(18486); -var logger$1 = __nccwpck_require__(3233); -var abortController = __nccwpck_require__(52557); -var os = __nccwpck_require__(22037); -var crypto = __nccwpck_require__(6113); -var stream = __nccwpck_require__(12781); -__nccwpck_require__(74559); -var coreLro = __nccwpck_require__(27094); -var events = __nccwpck_require__(82361); -var fs = __nccwpck_require__(57147); -var util = __nccwpck_require__(73837); - -function _interopNamespace(e) { - if (e && e.__esModule) return e; - var n = Object.create(null); - if (e) { - Object.keys(e).forEach(function (k) { - if (k !== 'default') { - var d = Object.getOwnPropertyDescriptor(e, k); - Object.defineProperty(n, k, d.get ? d : { - enumerable: true, - get: function () { return e[k]; } - }); - } - }); - } - n["default"] = e; - return Object.freeze(n); -} - -var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); -var os__namespace = /*#__PURE__*/_interopNamespace(os); -var fs__namespace = /*#__PURE__*/_interopNamespace(fs); -var util__namespace = /*#__PURE__*/_interopNamespace(util); - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -const BlobServiceProperties = { - serializedName: "BlobServiceProperties", - xmlName: "StorageServiceProperties", - type: { - name: "Composite", - className: "BlobServiceProperties", - modelProperties: { - blobAnalyticsLogging: { - serializedName: "Logging", - xmlName: "Logging", - type: { - name: "Composite", - className: "Logging" - } - }, - hourMetrics: { - serializedName: "HourMetrics", - xmlName: "HourMetrics", - type: { - name: "Composite", - className: "Metrics" - } - }, - minuteMetrics: { - serializedName: "MinuteMetrics", - xmlName: "MinuteMetrics", - type: { - name: "Composite", - className: "Metrics" - } - }, - cors: { - serializedName: "Cors", - xmlName: "Cors", - xmlIsWrapped: true, - xmlElementName: "CorsRule", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "CorsRule" - } - } - } - }, - defaultServiceVersion: { - serializedName: "DefaultServiceVersion", - xmlName: "DefaultServiceVersion", - type: { - name: "String" - } - }, - deleteRetentionPolicy: { - serializedName: "DeleteRetentionPolicy", - xmlName: "DeleteRetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } - }, - staticWebsite: { - serializedName: "StaticWebsite", - xmlName: "StaticWebsite", - type: { - name: "Composite", - className: "StaticWebsite" - } - } - } - } -}; -const Logging = { - serializedName: "Logging", - type: { - name: "Composite", - className: "Logging", - modelProperties: { - version: { - serializedName: "Version", - required: true, - xmlName: "Version", - type: { - name: "String" - } - }, - deleteProperty: { - serializedName: "Delete", - required: true, - xmlName: "Delete", - type: { - name: "Boolean" - } - }, - read: { - serializedName: "Read", - required: true, - xmlName: "Read", - type: { - name: "Boolean" - } - }, - write: { - serializedName: "Write", - required: true, - xmlName: "Write", - type: { - name: "Boolean" - } - }, - retentionPolicy: { - serializedName: "RetentionPolicy", - xmlName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } - } - } - } -}; -const RetentionPolicy = { - serializedName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy", - modelProperties: { - enabled: { - serializedName: "Enabled", - required: true, - xmlName: "Enabled", - type: { - name: "Boolean" - } - }, - days: { - constraints: { - InclusiveMinimum: 1 - }, - serializedName: "Days", - xmlName: "Days", - type: { - name: "Number" - } - } - } - } -}; -const Metrics = { - serializedName: "Metrics", - type: { - name: "Composite", - className: "Metrics", - modelProperties: { - version: { - serializedName: "Version", - xmlName: "Version", - type: { - name: "String" - } - }, - enabled: { - serializedName: "Enabled", - required: true, - xmlName: "Enabled", - type: { - name: "Boolean" - } - }, - includeAPIs: { - serializedName: "IncludeAPIs", - xmlName: "IncludeAPIs", - type: { - name: "Boolean" - } - }, - retentionPolicy: { - serializedName: "RetentionPolicy", - xmlName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } - } - } - } -}; -const CorsRule = { - serializedName: "CorsRule", - type: { - name: "Composite", - className: "CorsRule", - modelProperties: { - allowedOrigins: { - serializedName: "AllowedOrigins", - required: true, - xmlName: "AllowedOrigins", - type: { - name: "String" - } - }, - allowedMethods: { - serializedName: "AllowedMethods", - required: true, - xmlName: "AllowedMethods", - type: { - name: "String" - } - }, - allowedHeaders: { - serializedName: "AllowedHeaders", - required: true, - xmlName: "AllowedHeaders", - type: { - name: "String" - } - }, - exposedHeaders: { - serializedName: "ExposedHeaders", - required: true, - xmlName: "ExposedHeaders", - type: { - name: "String" - } - }, - maxAgeInSeconds: { - constraints: { - InclusiveMinimum: 0 - }, - serializedName: "MaxAgeInSeconds", - required: true, - xmlName: "MaxAgeInSeconds", - type: { - name: "Number" - } - } - } - } +const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String", + }, + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String", + }, + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String", + }, + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String", + }, + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0, + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number", + }, + }, + }, + }, }; const StaticWebsite = { serializedName: "StaticWebsite", @@ -76813,32 +70953,32 @@ const StaticWebsite = { required: true, xmlName: "Enabled", type: { - name: "Boolean" - } + name: "Boolean", + }, }, indexDocument: { serializedName: "IndexDocument", xmlName: "IndexDocument", type: { - name: "String" - } + name: "String", + }, }, errorDocument404Path: { serializedName: "ErrorDocument404Path", xmlName: "ErrorDocument404Path", type: { - name: "String" - } + name: "String", + }, }, defaultIndexDocumentPath: { serializedName: "DefaultIndexDocumentPath", xmlName: "DefaultIndexDocumentPath", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const StorageError = { serializedName: "StorageError", @@ -76850,18 +70990,25 @@ const StorageError = { serializedName: "Message", xmlName: "Message", type: { - name: "String" - } + name: "String", + }, }, code: { serializedName: "Code", xmlName: "Code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + authenticationErrorDetail: { + serializedName: "AuthenticationErrorDetail", + xmlName: "AuthenticationErrorDetail", + type: { + name: "String", + }, + }, + }, + }, }; const BlobServiceStatistics = { serializedName: "BlobServiceStatistics", @@ -76875,11 +71022,11 @@ const BlobServiceStatistics = { xmlName: "GeoReplication", type: { name: "Composite", - className: "GeoReplication" - } - } - } - } + className: "GeoReplication", + }, + }, + }, + }, }; const GeoReplication = { serializedName: "GeoReplication", @@ -76893,19 +71040,19 @@ const GeoReplication = { xmlName: "Status", type: { name: "Enum", - allowedValues: ["live", "bootstrap", "unavailable"] - } + allowedValues: ["live", "bootstrap", "unavailable"], + }, }, lastSyncOn: { serializedName: "LastSyncTime", required: true, xmlName: "LastSyncTime", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const ListContainersSegmentResponse = { serializedName: "ListContainersSegmentResponse", @@ -76920,29 +71067,29 @@ const ListContainersSegmentResponse = { xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { - name: "String" - } + name: "String", + }, }, marker: { serializedName: "Marker", xmlName: "Marker", type: { - name: "String" - } + name: "String", + }, }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { - name: "Number" - } + name: "Number", + }, }, containerItems: { serializedName: "ContainerItems", @@ -76955,20 +71102,20 @@ const ListContainersSegmentResponse = { element: { type: { name: "Composite", - className: "ContainerItem" - } - } - } + className: "ContainerItem", + }, + }, + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerItem = { serializedName: "ContainerItem", @@ -76982,41 +71129,41 @@ const ContainerItem = { required: true, xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, deleted: { serializedName: "Deleted", xmlName: "Deleted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, version: { serializedName: "Version", xmlName: "Version", type: { - name: "String" - } + name: "String", + }, }, properties: { serializedName: "Properties", xmlName: "Properties", type: { name: "Composite", - className: "ContainerProperties" - } + className: "ContainerProperties", + }, }, metadata: { serializedName: "Metadata", xmlName: "Metadata", type: { name: "Dictionary", - value: { type: { name: "String" } } - } - } - } - } + value: { type: { name: "String" } }, + }, + }, + }, + }, }; const ContainerProperties = { serializedName: "ContainerProperties", @@ -77029,24 +71176,24 @@ const ContainerProperties = { required: true, xmlName: "Last-Modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "Etag", required: true, xmlName: "Etag", type: { - name: "String" - } + name: "String", + }, }, leaseStatus: { serializedName: "LeaseStatus", xmlName: "LeaseStatus", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, leaseState: { serializedName: "LeaseState", @@ -77058,77 +71205,77 @@ const ContainerProperties = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseDuration: { serializedName: "LeaseDuration", xmlName: "LeaseDuration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, publicAccess: { serializedName: "PublicAccess", xmlName: "PublicAccess", type: { name: "Enum", - allowedValues: ["container", "blob"] - } + allowedValues: ["container", "blob"], + }, }, hasImmutabilityPolicy: { serializedName: "HasImmutabilityPolicy", xmlName: "HasImmutabilityPolicy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, hasLegalHold: { serializedName: "HasLegalHold", xmlName: "HasLegalHold", type: { - name: "Boolean" - } + name: "Boolean", + }, }, defaultEncryptionScope: { serializedName: "DefaultEncryptionScope", xmlName: "DefaultEncryptionScope", type: { - name: "String" - } + name: "String", + }, }, preventEncryptionScopeOverride: { serializedName: "DenyEncryptionScopeOverride", xmlName: "DenyEncryptionScopeOverride", type: { - name: "Boolean" - } + name: "Boolean", + }, }, deletedOn: { serializedName: "DeletedTime", xmlName: "DeletedTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, remainingRetentionDays: { serializedName: "RemainingRetentionDays", xmlName: "RemainingRetentionDays", type: { - name: "Number" - } + name: "Number", + }, }, isImmutableStorageWithVersioningEnabled: { serializedName: "ImmutableStorageWithVersioningEnabled", xmlName: "ImmutableStorageWithVersioningEnabled", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; const KeyInfo = { serializedName: "KeyInfo", @@ -77141,19 +71288,19 @@ const KeyInfo = { required: true, xmlName: "Start", type: { - name: "String" - } + name: "String", + }, }, expiresOn: { serializedName: "Expiry", required: true, xmlName: "Expiry", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const UserDelegationKey = { serializedName: "UserDelegationKey", @@ -77166,59 +71313,59 @@ const UserDelegationKey = { required: true, xmlName: "SignedOid", type: { - name: "String" - } + name: "String", + }, }, signedTenantId: { serializedName: "SignedTid", required: true, xmlName: "SignedTid", type: { - name: "String" - } + name: "String", + }, }, signedStartsOn: { serializedName: "SignedStart", required: true, xmlName: "SignedStart", type: { - name: "String" - } + name: "String", + }, }, signedExpiresOn: { serializedName: "SignedExpiry", required: true, xmlName: "SignedExpiry", type: { - name: "String" - } + name: "String", + }, }, signedService: { serializedName: "SignedService", required: true, xmlName: "SignedService", type: { - name: "String" - } + name: "String", + }, }, signedVersion: { serializedName: "SignedVersion", required: true, xmlName: "SignedVersion", type: { - name: "String" - } + name: "String", + }, }, value: { serializedName: "Value", required: true, xmlName: "Value", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const FilterBlobSegment = { serializedName: "FilterBlobSegment", @@ -77233,16 +71380,16 @@ const FilterBlobSegment = { xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, where: { serializedName: "Where", required: true, xmlName: "Where", type: { - name: "String" - } + name: "String", + }, }, blobs: { serializedName: "Blobs", @@ -77255,20 +71402,20 @@ const FilterBlobSegment = { element: { type: { name: "Composite", - className: "FilterBlobItem" - } - } - } + className: "FilterBlobItem", + }, + }, + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const FilterBlobItem = { serializedName: "FilterBlobItem", @@ -77282,27 +71429,27 @@ const FilterBlobItem = { required: true, xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, containerName: { serializedName: "ContainerName", required: true, xmlName: "ContainerName", type: { - name: "String" - } + name: "String", + }, }, tags: { serializedName: "Tags", xmlName: "Tags", type: { name: "Composite", - className: "BlobTags" - } - } - } - } + className: "BlobTags", + }, + }, + }, + }, }; const BlobTags = { serializedName: "BlobTags", @@ -77322,13 +71469,13 @@ const BlobTags = { element: { type: { name: "Composite", - className: "BlobTag" - } - } - } - } - } - } + className: "BlobTag", + }, + }, + }, + }, + }, + }, }; const BlobTag = { serializedName: "BlobTag", @@ -77342,19 +71489,19 @@ const BlobTag = { required: true, xmlName: "Key", type: { - name: "String" - } + name: "String", + }, }, value: { serializedName: "Value", required: true, xmlName: "Value", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const SignedIdentifier = { serializedName: "SignedIdentifier", @@ -77368,19 +71515,19 @@ const SignedIdentifier = { required: true, xmlName: "Id", type: { - name: "String" - } + name: "String", + }, }, accessPolicy: { serializedName: "AccessPolicy", xmlName: "AccessPolicy", type: { name: "Composite", - className: "AccessPolicy" - } - } - } - } + className: "AccessPolicy", + }, + }, + }, + }, }; const AccessPolicy = { serializedName: "AccessPolicy", @@ -77392,25 +71539,25 @@ const AccessPolicy = { serializedName: "Start", xmlName: "Start", type: { - name: "String" - } + name: "String", + }, }, expiresOn: { serializedName: "Expiry", xmlName: "Expiry", type: { - name: "String" - } + name: "String", + }, }, permissions: { serializedName: "Permission", xmlName: "Permission", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ListBlobsFlatSegmentResponse = { serializedName: "ListBlobsFlatSegmentResponse", @@ -77425,8 +71572,8 @@ const ListBlobsFlatSegmentResponse = { xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, containerName: { serializedName: "ContainerName", @@ -77434,47 +71581,47 @@ const ListBlobsFlatSegmentResponse = { xmlName: "ContainerName", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { - name: "String" - } + name: "String", + }, }, marker: { serializedName: "Marker", xmlName: "Marker", type: { - name: "String" - } + name: "String", + }, }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { - name: "Number" - } + name: "Number", + }, }, segment: { serializedName: "Segment", xmlName: "Blobs", type: { name: "Composite", - className: "BlobFlatListSegment" - } + className: "BlobFlatListSegment", + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobFlatListSegment = { serializedName: "BlobFlatListSegment", @@ -77493,13 +71640,13 @@ const BlobFlatListSegment = { element: { type: { name: "Composite", - className: "BlobItemInternal" - } - } - } - } - } - } + className: "BlobItemInternal", + }, + }, + }, + }, + }, + }, }; const BlobItemInternal = { serializedName: "BlobItemInternal", @@ -77513,80 +71660,80 @@ const BlobItemInternal = { xmlName: "Name", type: { name: "Composite", - className: "BlobName" - } + className: "BlobName", + }, }, deleted: { serializedName: "Deleted", required: true, xmlName: "Deleted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, snapshot: { serializedName: "Snapshot", required: true, xmlName: "Snapshot", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "VersionId", xmlName: "VersionId", type: { - name: "String" - } + name: "String", + }, }, isCurrentVersion: { serializedName: "IsCurrentVersion", xmlName: "IsCurrentVersion", type: { - name: "Boolean" - } + name: "Boolean", + }, }, properties: { serializedName: "Properties", xmlName: "Properties", type: { name: "Composite", - className: "BlobPropertiesInternal" - } + className: "BlobPropertiesInternal", + }, }, metadata: { serializedName: "Metadata", xmlName: "Metadata", type: { name: "Dictionary", - value: { type: { name: "String" } } - } + value: { type: { name: "String" } }, + }, }, blobTags: { serializedName: "BlobTags", xmlName: "Tags", type: { name: "Composite", - className: "BlobTags" - } + className: "BlobTags", + }, }, objectReplicationMetadata: { serializedName: "ObjectReplicationMetadata", xmlName: "OrMetadata", type: { name: "Dictionary", - value: { type: { name: "String" } } - } + value: { type: { name: "String" } }, + }, }, hasVersionsOnly: { serializedName: "HasVersionsOnly", xmlName: "HasVersionsOnly", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; const BlobName = { serializedName: "BlobName", @@ -77599,19 +71746,19 @@ const BlobName = { xmlName: "Encoded", xmlIsAttribute: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, content: { serializedName: "content", xmlName: "content", xmlIsMsText: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", @@ -77624,96 +71771,96 @@ const BlobPropertiesInternal = { serializedName: "Creation-Time", xmlName: "Creation-Time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, lastModified: { serializedName: "Last-Modified", required: true, xmlName: "Last-Modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "Etag", required: true, xmlName: "Etag", type: { - name: "String" - } + name: "String", + }, }, contentLength: { serializedName: "Content-Length", xmlName: "Content-Length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "Content-Type", xmlName: "Content-Type", type: { - name: "String" - } + name: "String", + }, }, contentEncoding: { serializedName: "Content-Encoding", xmlName: "Content-Encoding", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "Content-Language", xmlName: "Content-Language", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "Content-MD5", xmlName: "Content-MD5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentDisposition: { serializedName: "Content-Disposition", xmlName: "Content-Disposition", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "Cache-Control", xmlName: "Cache-Control", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, blobType: { serializedName: "BlobType", xmlName: "BlobType", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, leaseStatus: { serializedName: "LeaseStatus", xmlName: "LeaseStatus", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, leaseState: { serializedName: "LeaseState", @@ -77725,95 +71872,95 @@ const BlobPropertiesInternal = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseDuration: { serializedName: "LeaseDuration", xmlName: "LeaseDuration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, copyId: { serializedName: "CopyId", xmlName: "CopyId", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "CopyStatus", xmlName: "CopyStatus", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, copySource: { serializedName: "CopySource", xmlName: "CopySource", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "CopyProgress", xmlName: "CopyProgress", type: { - name: "String" - } + name: "String", + }, }, copyCompletedOn: { serializedName: "CopyCompletionTime", xmlName: "CopyCompletionTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "CopyStatusDescription", xmlName: "CopyStatusDescription", type: { - name: "String" - } + name: "String", + }, }, serverEncrypted: { serializedName: "ServerEncrypted", xmlName: "ServerEncrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, incrementalCopy: { serializedName: "IncrementalCopy", xmlName: "IncrementalCopy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, destinationSnapshot: { serializedName: "DestinationSnapshot", xmlName: "DestinationSnapshot", type: { - name: "String" - } + name: "String", + }, }, deletedOn: { serializedName: "DeletedTime", xmlName: "DeletedTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, remainingRetentionDays: { serializedName: "RemainingRetentionDays", xmlName: "RemainingRetentionDays", type: { - name: "Number" - } + name: "Number", + }, }, accessTier: { serializedName: "AccessTier", @@ -77835,16 +71982,16 @@ const BlobPropertiesInternal = { "Hot", "Cool", "Archive", - "Cold" - ] - } + "Cold", + ], + }, }, accessTierInferred: { serializedName: "AccessTierInferred", xmlName: "AccessTierInferred", type: { - name: "Boolean" - } + name: "Boolean", + }, }, archiveStatus: { serializedName: "ArchiveStatus", @@ -77854,91 +72001,91 @@ const BlobPropertiesInternal = { allowedValues: [ "rehydrate-pending-to-hot", "rehydrate-pending-to-cool", - "rehydrate-pending-to-cold" - ] - } + "rehydrate-pending-to-cold", + ], + }, }, customerProvidedKeySha256: { serializedName: "CustomerProvidedKeySha256", xmlName: "CustomerProvidedKeySha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "EncryptionScope", xmlName: "EncryptionScope", type: { - name: "String" - } + name: "String", + }, }, accessTierChangedOn: { serializedName: "AccessTierChangeTime", xmlName: "AccessTierChangeTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, tagCount: { serializedName: "TagCount", xmlName: "TagCount", type: { - name: "Number" - } + name: "Number", + }, }, expiresOn: { serializedName: "Expiry-Time", xmlName: "Expiry-Time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isSealed: { serializedName: "Sealed", xmlName: "Sealed", type: { - name: "Boolean" - } + name: "Boolean", + }, }, rehydratePriority: { serializedName: "RehydratePriority", xmlName: "RehydratePriority", type: { name: "Enum", - allowedValues: ["High", "Standard"] - } + allowedValues: ["High", "Standard"], + }, }, lastAccessedOn: { serializedName: "LastAccessTime", xmlName: "LastAccessTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiresOn: { serializedName: "ImmutabilityPolicyUntilDate", xmlName: "ImmutabilityPolicyUntilDate", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "ImmutabilityPolicyMode", xmlName: "ImmutabilityPolicyMode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, legalHold: { serializedName: "LegalHold", xmlName: "LegalHold", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; const ListBlobsHierarchySegmentResponse = { serializedName: "ListBlobsHierarchySegmentResponse", @@ -77953,8 +72100,8 @@ const ListBlobsHierarchySegmentResponse = { xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, containerName: { serializedName: "ContainerName", @@ -77962,54 +72109,54 @@ const ListBlobsHierarchySegmentResponse = { xmlName: "ContainerName", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { - name: "String" - } + name: "String", + }, }, marker: { serializedName: "Marker", xmlName: "Marker", type: { - name: "String" - } + name: "String", + }, }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { - name: "Number" - } + name: "Number", + }, }, delimiter: { serializedName: "Delimiter", xmlName: "Delimiter", type: { - name: "String" - } + name: "String", + }, }, segment: { serializedName: "Segment", xmlName: "Blobs", type: { name: "Composite", - className: "BlobHierarchyListSegment" - } + className: "BlobHierarchyListSegment", + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobHierarchyListSegment = { serializedName: "BlobHierarchyListSegment", @@ -78027,10 +72174,10 @@ const BlobHierarchyListSegment = { element: { type: { name: "Composite", - className: "BlobPrefix" - } - } - } + className: "BlobPrefix", + }, + }, + }, }, blobItems: { serializedName: "BlobItems", @@ -78042,13 +72189,13 @@ const BlobHierarchyListSegment = { element: { type: { name: "Composite", - className: "BlobItemInternal" - } - } - } - } - } - } + className: "BlobItemInternal", + }, + }, + }, + }, + }, + }, }; const BlobPrefix = { serializedName: "BlobPrefix", @@ -78061,11 +72208,11 @@ const BlobPrefix = { xmlName: "Name", type: { name: "Composite", - className: "BlobName" - } - } - } - } + className: "BlobName", + }, + }, + }, + }, }; const BlockLookupList = { serializedName: "BlockLookupList", @@ -78082,10 +72229,10 @@ const BlockLookupList = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, uncommitted: { serializedName: "Uncommitted", @@ -78095,10 +72242,10 @@ const BlockLookupList = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, latest: { serializedName: "Latest", @@ -78108,13 +72255,13 @@ const BlockLookupList = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; const BlockList = { serializedName: "BlockList", @@ -78132,10 +72279,10 @@ const BlockList = { element: { type: { name: "Composite", - className: "Block" - } - } - } + className: "Block", + }, + }, + }, }, uncommittedBlocks: { serializedName: "UncommittedBlocks", @@ -78147,13 +72294,13 @@ const BlockList = { element: { type: { name: "Composite", - className: "Block" - } - } - } - } - } - } + className: "Block", + }, + }, + }, + }, + }, + }, }; const Block = { serializedName: "Block", @@ -78166,19 +72313,19 @@ const Block = { required: true, xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, size: { serializedName: "Size", required: true, xmlName: "Size", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; const PageList = { serializedName: "PageList", @@ -78195,10 +72342,10 @@ const PageList = { element: { type: { name: "Composite", - className: "PageRange" - } - } - } + className: "PageRange", + }, + }, + }, }, clearRange: { serializedName: "ClearRange", @@ -78209,20 +72356,20 @@ const PageList = { element: { type: { name: "Composite", - className: "ClearRange" - } - } - } + className: "ClearRange", + }, + }, + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageRange = { serializedName: "PageRange", @@ -78236,19 +72383,19 @@ const PageRange = { required: true, xmlName: "Start", type: { - name: "Number" - } + name: "Number", + }, }, end: { serializedName: "End", required: true, xmlName: "End", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; const ClearRange = { serializedName: "ClearRange", @@ -78262,19 +72409,19 @@ const ClearRange = { required: true, xmlName: "Start", type: { - name: "Number" - } + name: "Number", + }, }, end: { serializedName: "End", required: true, xmlName: "End", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; const QueryRequest = { serializedName: "QueryRequest", @@ -78288,35 +72435,35 @@ const QueryRequest = { required: true, xmlName: "QueryType", type: { - name: "String" - } + name: "String", + }, }, expression: { serializedName: "Expression", required: true, xmlName: "Expression", type: { - name: "String" - } + name: "String", + }, }, inputSerialization: { serializedName: "InputSerialization", xmlName: "InputSerialization", type: { name: "Composite", - className: "QuerySerialization" - } + className: "QuerySerialization", + }, }, outputSerialization: { serializedName: "OutputSerialization", xmlName: "OutputSerialization", type: { name: "Composite", - className: "QuerySerialization" - } - } - } - } + className: "QuerySerialization", + }, + }, + }, + }, }; const QuerySerialization = { serializedName: "QuerySerialization", @@ -78329,11 +72476,11 @@ const QuerySerialization = { xmlName: "Format", type: { name: "Composite", - className: "QueryFormat" - } - } - } - } + className: "QueryFormat", + }, + }, + }, + }, }; const QueryFormat = { serializedName: "QueryFormat", @@ -78347,42 +72494,43 @@ const QueryFormat = { xmlName: "Type", type: { name: "Enum", - allowedValues: ["delimited", "json", "arrow", "parquet"] - } + allowedValues: ["delimited", "json", "arrow", "parquet"], + }, }, delimitedTextConfiguration: { serializedName: "DelimitedTextConfiguration", xmlName: "DelimitedTextConfiguration", type: { name: "Composite", - className: "DelimitedTextConfiguration" - } + className: "DelimitedTextConfiguration", + }, }, jsonTextConfiguration: { serializedName: "JsonTextConfiguration", xmlName: "JsonTextConfiguration", type: { name: "Composite", - className: "JsonTextConfiguration" - } + className: "JsonTextConfiguration", + }, }, arrowConfiguration: { serializedName: "ArrowConfiguration", xmlName: "ArrowConfiguration", type: { name: "Composite", - className: "ArrowConfiguration" - } + className: "ArrowConfiguration", + }, }, parquetTextConfiguration: { serializedName: "ParquetTextConfiguration", xmlName: "ParquetTextConfiguration", type: { - name: "any" - } - } - } - } + name: "Dictionary", + value: { type: { name: "any" } }, + }, + }, + }, + }, }; const DelimitedTextConfiguration = { serializedName: "DelimitedTextConfiguration", @@ -78395,39 +72543,39 @@ const DelimitedTextConfiguration = { serializedName: "ColumnSeparator", xmlName: "ColumnSeparator", type: { - name: "String" - } + name: "String", + }, }, fieldQuote: { serializedName: "FieldQuote", xmlName: "FieldQuote", type: { - name: "String" - } + name: "String", + }, }, recordSeparator: { serializedName: "RecordSeparator", xmlName: "RecordSeparator", type: { - name: "String" - } + name: "String", + }, }, escapeChar: { serializedName: "EscapeChar", xmlName: "EscapeChar", type: { - name: "String" - } + name: "String", + }, }, headersPresent: { serializedName: "HeadersPresent", xmlName: "HasHeaders", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; const JsonTextConfiguration = { serializedName: "JsonTextConfiguration", @@ -78440,11 +72588,11 @@ const JsonTextConfiguration = { serializedName: "RecordSeparator", xmlName: "RecordSeparator", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ArrowConfiguration = { serializedName: "ArrowConfiguration", @@ -78464,13 +72612,13 @@ const ArrowConfiguration = { element: { type: { name: "Composite", - className: "ArrowField" - } - } - } - } - } - } + className: "ArrowField", + }, + }, + }, + }, + }, + }, }; const ArrowField = { serializedName: "ArrowField", @@ -78484,32 +72632,32 @@ const ArrowField = { required: true, xmlName: "Type", type: { - name: "String" - } + name: "String", + }, }, name: { serializedName: "Name", xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, precision: { serializedName: "Precision", xmlName: "Precision", type: { - name: "Number" - } + name: "Number", + }, }, scale: { serializedName: "Scale", xmlName: "Scale", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; const ServiceSetPropertiesHeaders = { serializedName: "Service_setPropertiesHeaders", @@ -78521,32 +72669,32 @@ const ServiceSetPropertiesHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceSetPropertiesExceptionHeaders = { serializedName: "Service_setPropertiesExceptionHeaders", @@ -78558,11 +72706,11 @@ const ServiceSetPropertiesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceGetPropertiesHeaders = { serializedName: "Service_getPropertiesHeaders", @@ -78574,32 +72722,32 @@ const ServiceGetPropertiesHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceGetPropertiesExceptionHeaders = { serializedName: "Service_getPropertiesExceptionHeaders", @@ -78611,11 +72759,11 @@ const ServiceGetPropertiesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceGetStatisticsHeaders = { serializedName: "Service_getStatisticsHeaders", @@ -78627,39 +72775,39 @@ const ServiceGetStatisticsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceGetStatisticsExceptionHeaders = { serializedName: "Service_getStatisticsExceptionHeaders", @@ -78671,11 +72819,11 @@ const ServiceGetStatisticsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceListContainersSegmentHeaders = { serializedName: "Service_listContainersSegmentHeaders", @@ -78687,32 +72835,32 @@ const ServiceListContainersSegmentHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceListContainersSegmentExceptionHeaders = { serializedName: "Service_listContainersSegmentExceptionHeaders", @@ -78724,11 +72872,11 @@ const ServiceListContainersSegmentExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceGetUserDelegationKeyHeaders = { serializedName: "Service_getUserDelegationKeyHeaders", @@ -78740,39 +72888,39 @@ const ServiceGetUserDelegationKeyHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceGetUserDelegationKeyExceptionHeaders = { serializedName: "Service_getUserDelegationKeyExceptionHeaders", @@ -78784,11 +72932,11 @@ const ServiceGetUserDelegationKeyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceGetAccountInfoHeaders = { serializedName: "Service_getAccountInfoHeaders", @@ -78800,29 +72948,29 @@ const ServiceGetAccountInfoHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, skuName: { serializedName: "x-ms-sku-name", @@ -78834,9 +72982,9 @@ const ServiceGetAccountInfoHeaders = { "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", - "Premium_LRS" - ] - } + "Premium_LRS", + ], + }, }, accountKind: { serializedName: "x-ms-account-kind", @@ -78848,26 +72996,26 @@ const ServiceGetAccountInfoHeaders = { "BlobStorage", "StorageV2", "FileStorage", - "BlockBlobStorage" - ] - } + "BlockBlobStorage", + ], + }, }, isHierarchicalNamespaceEnabled: { serializedName: "x-ms-is-hns-enabled", xmlName: "x-ms-is-hns-enabled", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceGetAccountInfoExceptionHeaders = { serializedName: "Service_getAccountInfoExceptionHeaders", @@ -78879,11 +73027,11 @@ const ServiceGetAccountInfoExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceSubmitBatchHeaders = { serializedName: "Service_submitBatchHeaders", @@ -78895,39 +73043,39 @@ const ServiceSubmitBatchHeaders = { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceSubmitBatchExceptionHeaders = { serializedName: "Service_submitBatchExceptionHeaders", @@ -78939,11 +73087,11 @@ const ServiceSubmitBatchExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceFilterBlobsHeaders = { serializedName: "Service_filterBlobsHeaders", @@ -78955,39 +73103,39 @@ const ServiceFilterBlobsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ServiceFilterBlobsExceptionHeaders = { serializedName: "Service_filterBlobsExceptionHeaders", @@ -78999,11 +73147,11 @@ const ServiceFilterBlobsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerCreateHeaders = { serializedName: "Container_createHeaders", @@ -79015,53 +73163,53 @@ const ContainerCreateHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerCreateExceptionHeaders = { serializedName: "Container_createExceptionHeaders", @@ -79073,11 +73221,11 @@ const ContainerCreateExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerGetPropertiesHeaders = { serializedName: "Container_getPropertiesHeaders", @@ -79087,34 +73235,34 @@ const ContainerGetPropertiesHeaders = { modelProperties: { metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -79126,98 +73274,98 @@ const ContainerGetPropertiesHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobPublicAccess: { serializedName: "x-ms-blob-public-access", xmlName: "x-ms-blob-public-access", type: { name: "Enum", - allowedValues: ["container", "blob"] - } + allowedValues: ["container", "blob"], + }, }, hasImmutabilityPolicy: { serializedName: "x-ms-has-immutability-policy", xmlName: "x-ms-has-immutability-policy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, hasLegalHold: { serializedName: "x-ms-has-legal-hold", xmlName: "x-ms-has-legal-hold", type: { - name: "Boolean" - } + name: "Boolean", + }, }, defaultEncryptionScope: { serializedName: "x-ms-default-encryption-scope", xmlName: "x-ms-default-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, denyEncryptionScopeOverride: { serializedName: "x-ms-deny-encryption-scope-override", xmlName: "x-ms-deny-encryption-scope-override", type: { - name: "Boolean" - } + name: "Boolean", + }, }, isImmutableStorageWithVersioningEnabled: { serializedName: "x-ms-immutable-storage-with-versioning-enabled", xmlName: "x-ms-immutable-storage-with-versioning-enabled", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerGetPropertiesExceptionHeaders = { serializedName: "Container_getPropertiesExceptionHeaders", @@ -79229,11 +73377,11 @@ const ContainerGetPropertiesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerDeleteHeaders = { serializedName: "Container_deleteHeaders", @@ -79245,39 +73393,39 @@ const ContainerDeleteHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerDeleteExceptionHeaders = { serializedName: "Container_deleteExceptionHeaders", @@ -79289,11 +73437,11 @@ const ContainerDeleteExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerSetMetadataHeaders = { serializedName: "Container_setMetadataHeaders", @@ -79305,53 +73453,53 @@ const ContainerSetMetadataHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerSetMetadataExceptionHeaders = { serializedName: "Container_setMetadataExceptionHeaders", @@ -79363,11 +73511,11 @@ const ContainerSetMetadataExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerGetAccessPolicyHeaders = { serializedName: "Container_getAccessPolicyHeaders", @@ -79380,60 +73528,60 @@ const ContainerGetAccessPolicyHeaders = { xmlName: "x-ms-blob-public-access", type: { name: "Enum", - allowedValues: ["container", "blob"] - } + allowedValues: ["container", "blob"], + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerGetAccessPolicyExceptionHeaders = { serializedName: "Container_getAccessPolicyExceptionHeaders", @@ -79445,11 +73593,11 @@ const ContainerGetAccessPolicyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerSetAccessPolicyHeaders = { serializedName: "Container_setAccessPolicyHeaders", @@ -79461,53 +73609,53 @@ const ContainerSetAccessPolicyHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerSetAccessPolicyExceptionHeaders = { serializedName: "Container_setAccessPolicyExceptionHeaders", @@ -79519,11 +73667,11 @@ const ContainerSetAccessPolicyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerRestoreHeaders = { serializedName: "Container_restoreHeaders", @@ -79535,39 +73683,39 @@ const ContainerRestoreHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerRestoreExceptionHeaders = { serializedName: "Container_restoreExceptionHeaders", @@ -79579,11 +73727,11 @@ const ContainerRestoreExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerRenameHeaders = { serializedName: "Container_renameHeaders", @@ -79595,39 +73743,39 @@ const ContainerRenameHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerRenameExceptionHeaders = { serializedName: "Container_renameExceptionHeaders", @@ -79639,11 +73787,11 @@ const ContainerRenameExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerSubmitBatchHeaders = { serializedName: "Container_submitBatchHeaders", @@ -79655,25 +73803,25 @@ const ContainerSubmitBatchHeaders = { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerSubmitBatchExceptionHeaders = { serializedName: "Container_submitBatchExceptionHeaders", @@ -79685,11 +73833,11 @@ const ContainerSubmitBatchExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerFilterBlobsHeaders = { serializedName: "Container_filterBlobsHeaders", @@ -79701,32 +73849,32 @@ const ContainerFilterBlobsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const ContainerFilterBlobsExceptionHeaders = { serializedName: "Container_filterBlobsExceptionHeaders", @@ -79738,11 +73886,11 @@ const ContainerFilterBlobsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", @@ -79754,53 +73902,53 @@ const ContainerAcquireLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const ContainerAcquireLeaseExceptionHeaders = { serializedName: "Container_acquireLeaseExceptionHeaders", @@ -79812,11 +73960,11 @@ const ContainerAcquireLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerReleaseLeaseHeaders = { serializedName: "Container_releaseLeaseHeaders", @@ -79828,46 +73976,46 @@ const ContainerReleaseLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const ContainerReleaseLeaseExceptionHeaders = { serializedName: "Container_releaseLeaseExceptionHeaders", @@ -79879,11 +74027,11 @@ const ContainerReleaseLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerRenewLeaseHeaders = { serializedName: "Container_renewLeaseHeaders", @@ -79895,53 +74043,53 @@ const ContainerRenewLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const ContainerRenewLeaseExceptionHeaders = { serializedName: "Container_renewLeaseExceptionHeaders", @@ -79953,11 +74101,11 @@ const ContainerRenewLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerBreakLeaseHeaders = { serializedName: "Container_breakLeaseHeaders", @@ -79969,53 +74117,53 @@ const ContainerBreakLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseTime: { serializedName: "x-ms-lease-time", xmlName: "x-ms-lease-time", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const ContainerBreakLeaseExceptionHeaders = { serializedName: "Container_breakLeaseExceptionHeaders", @@ -80027,11 +74175,11 @@ const ContainerBreakLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerChangeLeaseHeaders = { serializedName: "Container_changeLeaseHeaders", @@ -80043,53 +74191,53 @@ const ContainerChangeLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const ContainerChangeLeaseExceptionHeaders = { serializedName: "Container_changeLeaseExceptionHeaders", @@ -80101,11 +74249,11 @@ const ContainerChangeLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerListBlobFlatSegmentHeaders = { serializedName: "Container_listBlobFlatSegmentHeaders", @@ -80117,46 +74265,46 @@ const ContainerListBlobFlatSegmentHeaders = { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "Container_listBlobFlatSegmentExceptionHeaders", @@ -80168,11 +74316,11 @@ const ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerListBlobHierarchySegmentHeaders = { serializedName: "Container_listBlobHierarchySegmentHeaders", @@ -80184,46 +74332,46 @@ const ContainerListBlobHierarchySegmentHeaders = { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", @@ -80235,11 +74383,11 @@ const ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerGetAccountInfoHeaders = { serializedName: "Container_getAccountInfoHeaders", @@ -80251,29 +74399,29 @@ const ContainerGetAccountInfoHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, skuName: { serializedName: "x-ms-sku-name", @@ -80285,9 +74433,9 @@ const ContainerGetAccountInfoHeaders = { "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", - "Premium_LRS" - ] - } + "Premium_LRS", + ], + }, }, accountKind: { serializedName: "x-ms-account-kind", @@ -80299,12 +74447,19 @@ const ContainerGetAccountInfoHeaders = { "BlobStorage", "StorageV2", "FileStorage", - "BlockBlobStorage" - ] - } - } - } - } + "BlockBlobStorage", + ], + }, + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean", + }, + }, + }, + }, }; const ContainerGetAccountInfoExceptionHeaders = { serializedName: "Container_getAccountInfoExceptionHeaders", @@ -80316,11 +74471,11 @@ const ContainerGetAccountInfoExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobDownloadHeaders = { serializedName: "Blob_downloadHeaders", @@ -80332,169 +74487,169 @@ const BlobDownloadHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, createdOn: { serializedName: "x-ms-creation-time", xmlName: "x-ms-creation-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" }, objectReplicationPolicyId: { serializedName: "x-ms-or-policy-id", xmlName: "x-ms-or-policy-id", type: { - name: "String" - } + name: "String", + }, }, objectReplicationRules: { serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", xmlName: "x-ms-or", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-or-" }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, contentRange: { serializedName: "content-range", xmlName: "content-range", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { - name: "String" - } + name: "String", + }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, copyCompletedOn: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { - name: "String" - } + name: "String", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { - name: "String" - } + name: "String", + }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -80506,161 +74661,161 @@ const BlobDownloadHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, isCurrentVersion: { serializedName: "x-ms-is-current-version", xmlName: "x-ms-is-current-version", type: { - name: "Boolean" - } + name: "Boolean", + }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, blobContentMD5: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, tagCount: { serializedName: "x-ms-tag-count", xmlName: "x-ms-tag-count", type: { - name: "Number" - } + name: "Number", + }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { - name: "Boolean" - } + name: "Boolean", + }, }, lastAccessed: { serializedName: "x-ms-last-access-time", xmlName: "x-ms-last-access-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiresOn: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } + name: "String", + }, }, contentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } - } - } - } + name: "ByteArray", + }, + }, + }, + }, }; const BlobDownloadExceptionHeaders = { serializedName: "Blob_downloadExceptionHeaders", @@ -80672,11 +74827,11 @@ const BlobDownloadExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobGetPropertiesHeaders = { serializedName: "Blob_getPropertiesHeaders", @@ -80688,113 +74843,113 @@ const BlobGetPropertiesHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, createdOn: { serializedName: "x-ms-creation-time", xmlName: "x-ms-creation-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" }, objectReplicationPolicyId: { serializedName: "x-ms-or-policy-id", xmlName: "x-ms-or-policy-id", type: { - name: "String" - } + name: "String", + }, }, objectReplicationRules: { serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", xmlName: "x-ms-or", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-or-" }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, copyCompletedOn: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { - name: "String" - } + name: "String", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { - name: "String" - } + name: "String", + }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, isIncrementalCopy: { serializedName: "x-ms-incremental-copy", xmlName: "x-ms-incremental-copy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, destinationSnapshot: { serializedName: "x-ms-copy-destination-snapshot", xmlName: "x-ms-copy-destination-snapshot", type: { - name: "String" - } + name: "String", + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -80806,253 +74961,253 @@ const BlobGetPropertiesHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { - name: "String" - } + name: "String", + }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { - name: "String" - } + name: "String", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, accessTier: { serializedName: "x-ms-access-tier", xmlName: "x-ms-access-tier", type: { - name: "String" - } + name: "String", + }, }, accessTierInferred: { serializedName: "x-ms-access-tier-inferred", xmlName: "x-ms-access-tier-inferred", type: { - name: "Boolean" - } + name: "Boolean", + }, }, archiveStatus: { serializedName: "x-ms-archive-status", xmlName: "x-ms-archive-status", type: { - name: "String" - } + name: "String", + }, }, accessTierChangedOn: { serializedName: "x-ms-access-tier-change-time", xmlName: "x-ms-access-tier-change-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, isCurrentVersion: { serializedName: "x-ms-is-current-version", xmlName: "x-ms-is-current-version", type: { - name: "Boolean" - } + name: "Boolean", + }, }, tagCount: { serializedName: "x-ms-tag-count", xmlName: "x-ms-tag-count", type: { - name: "Number" - } + name: "Number", + }, }, expiresOn: { serializedName: "x-ms-expiry-time", xmlName: "x-ms-expiry-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { - name: "Boolean" - } + name: "Boolean", + }, }, rehydratePriority: { serializedName: "x-ms-rehydrate-priority", xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", - allowedValues: ["High", "Standard"] - } + allowedValues: ["High", "Standard"], + }, }, lastAccessed: { serializedName: "x-ms-last-access-time", xmlName: "x-ms-last-access-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiresOn: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobGetPropertiesExceptionHeaders = { serializedName: "Blob_getPropertiesExceptionHeaders", @@ -81064,11 +75219,11 @@ const BlobGetPropertiesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobDeleteHeaders = { serializedName: "Blob_deleteHeaders", @@ -81080,39 +75235,39 @@ const BlobDeleteHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobDeleteExceptionHeaders = { serializedName: "Blob_deleteExceptionHeaders", @@ -81124,11 +75279,11 @@ const BlobDeleteExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobUndeleteHeaders = { serializedName: "Blob_undeleteHeaders", @@ -81140,39 +75295,39 @@ const BlobUndeleteHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobUndeleteExceptionHeaders = { serializedName: "Blob_undeleteExceptionHeaders", @@ -81184,11 +75339,11 @@ const BlobUndeleteExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetExpiryHeaders = { serializedName: "Blob_setExpiryHeaders", @@ -81200,46 +75355,46 @@ const BlobSetExpiryHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobSetExpiryExceptionHeaders = { serializedName: "Blob_setExpiryExceptionHeaders", @@ -81251,11 +75406,11 @@ const BlobSetExpiryExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetHttpHeadersHeaders = { serializedName: "Blob_setHttpHeadersHeaders", @@ -81267,60 +75422,60 @@ const BlobSetHttpHeadersHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetHttpHeadersExceptionHeaders = { serializedName: "Blob_setHttpHeadersExceptionHeaders", @@ -81332,11 +75487,11 @@ const BlobSetHttpHeadersExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetImmutabilityPolicyHeaders = { serializedName: "Blob_setImmutabilityPolicyHeaders", @@ -81348,47 +75503,47 @@ const BlobSetImmutabilityPolicyHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiry: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - } - } - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + }, + }, }; const BlobSetImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", @@ -81400,11 +75555,11 @@ const BlobSetImmutabilityPolicyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobDeleteImmutabilityPolicyHeaders = { serializedName: "Blob_deleteImmutabilityPolicyHeaders", @@ -81416,32 +75571,32 @@ const BlobDeleteImmutabilityPolicyHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobDeleteImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", @@ -81453,11 +75608,11 @@ const BlobDeleteImmutabilityPolicyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetLegalHoldHeaders = { serializedName: "Blob_setLegalHoldHeaders", @@ -81469,39 +75624,39 @@ const BlobSetLegalHoldHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; const BlobSetLegalHoldExceptionHeaders = { serializedName: "Blob_setLegalHoldExceptionHeaders", @@ -81513,11 +75668,11 @@ const BlobSetLegalHoldExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetMetadataHeaders = { serializedName: "Blob_setMetadataHeaders", @@ -81529,81 +75684,81 @@ const BlobSetMetadataHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetMetadataExceptionHeaders = { serializedName: "Blob_setMetadataExceptionHeaders", @@ -81615,11 +75770,11 @@ const BlobSetMetadataExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobAcquireLeaseHeaders = { serializedName: "Blob_acquireLeaseHeaders", @@ -81631,53 +75786,53 @@ const BlobAcquireLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobAcquireLeaseExceptionHeaders = { serializedName: "Blob_acquireLeaseExceptionHeaders", @@ -81689,11 +75844,11 @@ const BlobAcquireLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobReleaseLeaseHeaders = { serializedName: "Blob_releaseLeaseHeaders", @@ -81705,46 +75860,46 @@ const BlobReleaseLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobReleaseLeaseExceptionHeaders = { serializedName: "Blob_releaseLeaseExceptionHeaders", @@ -81756,11 +75911,11 @@ const BlobReleaseLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobRenewLeaseHeaders = { serializedName: "Blob_renewLeaseHeaders", @@ -81772,53 +75927,53 @@ const BlobRenewLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobRenewLeaseExceptionHeaders = { serializedName: "Blob_renewLeaseExceptionHeaders", @@ -81830,11 +75985,11 @@ const BlobRenewLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobChangeLeaseHeaders = { serializedName: "Blob_changeLeaseHeaders", @@ -81846,53 +76001,53 @@ const BlobChangeLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobChangeLeaseExceptionHeaders = { serializedName: "Blob_changeLeaseExceptionHeaders", @@ -81904,11 +76059,11 @@ const BlobChangeLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobBreakLeaseHeaders = { serializedName: "Blob_breakLeaseHeaders", @@ -81920,53 +76075,53 @@ const BlobBreakLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseTime: { serializedName: "x-ms-lease-time", xmlName: "x-ms-lease-time", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobBreakLeaseExceptionHeaders = { serializedName: "Blob_breakLeaseExceptionHeaders", @@ -81978,11 +76133,11 @@ const BlobBreakLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobCreateSnapshotHeaders = { serializedName: "Blob_createSnapshotHeaders", @@ -81994,74 +76149,74 @@ const BlobCreateSnapshotHeaders = { serializedName: "x-ms-snapshot", xmlName: "x-ms-snapshot", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobCreateSnapshotExceptionHeaders = { serializedName: "Blob_createSnapshotExceptionHeaders", @@ -82073,11 +76228,11 @@ const BlobCreateSnapshotExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobStartCopyFromURLHeaders = { serializedName: "Blob_startCopyFromURLHeaders", @@ -82089,75 +76244,75 @@ const BlobStartCopyFromURLHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobStartCopyFromURLExceptionHeaders = { serializedName: "Blob_startCopyFromURLExceptionHeaders", @@ -82169,11 +76324,11 @@ const BlobStartCopyFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobCopyFromURLHeaders = { serializedName: "Blob_copyFromURLHeaders", @@ -82185,96 +76340,96 @@ const BlobCopyFromURLHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { defaultValue: "success", isConstant: true, serializedName: "x-ms-copy-status", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobCopyFromURLExceptionHeaders = { serializedName: "Blob_copyFromURLExceptionHeaders", @@ -82286,11 +76441,11 @@ const BlobCopyFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobAbortCopyFromURLHeaders = { serializedName: "Blob_abortCopyFromURLHeaders", @@ -82302,39 +76457,39 @@ const BlobAbortCopyFromURLHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobAbortCopyFromURLExceptionHeaders = { serializedName: "Blob_abortCopyFromURLExceptionHeaders", @@ -82346,11 +76501,11 @@ const BlobAbortCopyFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetTierHeaders = { serializedName: "Blob_setTierHeaders", @@ -82362,32 +76517,32 @@ const BlobSetTierHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetTierExceptionHeaders = { serializedName: "Blob_setTierExceptionHeaders", @@ -82399,11 +76554,11 @@ const BlobSetTierExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobGetAccountInfoHeaders = { serializedName: "Blob_getAccountInfoHeaders", @@ -82415,29 +76570,29 @@ const BlobGetAccountInfoHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, skuName: { serializedName: "x-ms-sku-name", @@ -82449,9 +76604,9 @@ const BlobGetAccountInfoHeaders = { "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", - "Premium_LRS" - ] - } + "Premium_LRS", + ], + }, }, accountKind: { serializedName: "x-ms-account-kind", @@ -82463,12 +76618,19 @@ const BlobGetAccountInfoHeaders = { "BlobStorage", "StorageV2", "FileStorage", - "BlockBlobStorage" - ] - } - } - } - } + "BlockBlobStorage", + ], + }, + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean", + }, + }, + }, + }, }; const BlobGetAccountInfoExceptionHeaders = { serializedName: "Blob_getAccountInfoExceptionHeaders", @@ -82480,11 +76642,11 @@ const BlobGetAccountInfoExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobQueryHeaders = { serializedName: "Blob_queryHeaders", @@ -82496,145 +76658,146 @@ const BlobQueryHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } - } + value: { type: { name: "String" } }, + }, }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, contentRange: { serializedName: "content-range", xmlName: "content-range", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { - name: "String" - } + name: "String", + }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, copyCompletionTime: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { - name: "String" - } + name: "String", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { - name: "String" - } + name: "String", + }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -82646,104 +76809,104 @@ const BlobQueryHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, blobContentMD5: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } + name: "String", + }, }, contentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } - } - } - } + name: "ByteArray", + }, + }, + }, + }, }; const BlobQueryExceptionHeaders = { serializedName: "Blob_queryExceptionHeaders", @@ -82755,11 +76918,11 @@ const BlobQueryExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobGetTagsHeaders = { serializedName: "Blob_getTagsHeaders", @@ -82771,39 +76934,39 @@ const BlobGetTagsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobGetTagsExceptionHeaders = { serializedName: "Blob_getTagsExceptionHeaders", @@ -82815,11 +76978,11 @@ const BlobGetTagsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetTagsHeaders = { serializedName: "Blob_setTagsHeaders", @@ -82831,39 +76994,39 @@ const BlobSetTagsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetTagsExceptionHeaders = { serializedName: "Blob_setTagsExceptionHeaders", @@ -82875,11 +77038,11 @@ const BlobSetTagsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobCreateHeaders = { serializedName: "PageBlob_createHeaders", @@ -82891,88 +77054,88 @@ const PageBlobCreateHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobCreateExceptionHeaders = { serializedName: "PageBlob_createExceptionHeaders", @@ -82984,11 +77147,11 @@ const PageBlobCreateExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUploadPagesHeaders = { serializedName: "PageBlob_uploadPagesHeaders", @@ -83000,95 +77163,95 @@ const PageBlobUploadPagesHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUploadPagesExceptionHeaders = { serializedName: "PageBlob_uploadPagesExceptionHeaders", @@ -83100,11 +77263,11 @@ const PageBlobUploadPagesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobClearPagesHeaders = { serializedName: "PageBlob_clearPagesHeaders", @@ -83116,74 +77279,74 @@ const PageBlobClearPagesHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobClearPagesExceptionHeaders = { serializedName: "PageBlob_clearPagesExceptionHeaders", @@ -83195,11 +77358,11 @@ const PageBlobClearPagesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUploadPagesFromURLHeaders = { serializedName: "PageBlob_uploadPagesFromURLHeaders", @@ -83211,88 +77374,88 @@ const PageBlobUploadPagesFromURLHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", @@ -83304,11 +77467,11 @@ const PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobGetPageRangesHeaders = { serializedName: "PageBlob_getPageRangesHeaders", @@ -83320,60 +77483,60 @@ const PageBlobGetPageRangesHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobGetPageRangesExceptionHeaders = { serializedName: "PageBlob_getPageRangesExceptionHeaders", @@ -83385,11 +77548,11 @@ const PageBlobGetPageRangesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobGetPageRangesDiffHeaders = { serializedName: "PageBlob_getPageRangesDiffHeaders", @@ -83401,60 +77564,60 @@ const PageBlobGetPageRangesDiffHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", @@ -83466,11 +77629,11 @@ const PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobResizeHeaders = { serializedName: "PageBlob_resizeHeaders", @@ -83482,60 +77645,60 @@ const PageBlobResizeHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobResizeExceptionHeaders = { serializedName: "PageBlob_resizeExceptionHeaders", @@ -83547,11 +77710,11 @@ const PageBlobResizeExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUpdateSequenceNumberHeaders = { serializedName: "PageBlob_updateSequenceNumberHeaders", @@ -83563,60 +77726,60 @@ const PageBlobUpdateSequenceNumberHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", @@ -83628,11 +77791,11 @@ const PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobCopyIncrementalHeaders = { serializedName: "PageBlob_copyIncrementalHeaders", @@ -83644,68 +77807,68 @@ const PageBlobCopyIncrementalHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobCopyIncrementalExceptionHeaders = { serializedName: "PageBlob_copyIncrementalExceptionHeaders", @@ -83717,11 +77880,11 @@ const PageBlobCopyIncrementalExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobCreateHeaders = { serializedName: "AppendBlob_createHeaders", @@ -83733,88 +77896,88 @@ const AppendBlobCreateHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobCreateExceptionHeaders = { serializedName: "AppendBlob_createExceptionHeaders", @@ -83826,11 +77989,11 @@ const AppendBlobCreateExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobAppendBlockHeaders = { serializedName: "AppendBlob_appendBlockHeaders", @@ -83842,102 +78005,102 @@ const AppendBlobAppendBlockHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobAppendOffset: { serializedName: "x-ms-blob-append-offset", xmlName: "x-ms-blob-append-offset", type: { - name: "String" - } + name: "String", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobAppendBlockExceptionHeaders = { serializedName: "AppendBlob_appendBlockExceptionHeaders", @@ -83949,11 +78112,11 @@ const AppendBlobAppendBlockExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobAppendBlockFromUrlHeaders = { serializedName: "AppendBlob_appendBlockFromUrlHeaders", @@ -83965,95 +78128,95 @@ const AppendBlobAppendBlockFromUrlHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobAppendOffset: { serializedName: "x-ms-blob-append-offset", xmlName: "x-ms-blob-append-offset", type: { - name: "String" - } + name: "String", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", @@ -84065,11 +78228,11 @@ const AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobSealHeaders = { serializedName: "AppendBlob_sealHeaders", @@ -84081,53 +78244,53 @@ const AppendBlobSealHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; const AppendBlobSealExceptionHeaders = { serializedName: "AppendBlob_sealExceptionHeaders", @@ -84139,11 +78302,11 @@ const AppendBlobSealExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobUploadHeaders = { serializedName: "BlockBlob_uploadHeaders", @@ -84155,88 +78318,88 @@ const BlockBlobUploadHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobUploadExceptionHeaders = { serializedName: "BlockBlob_uploadExceptionHeaders", @@ -84248,11 +78411,11 @@ const BlockBlobUploadExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobPutBlobFromUrlHeaders = { serializedName: "BlockBlob_putBlobFromUrlHeaders", @@ -84264,88 +78427,88 @@ const BlockBlobPutBlobFromUrlHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", @@ -84357,11 +78520,11 @@ const BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobStageBlockHeaders = { serializedName: "BlockBlob_stageBlockHeaders", @@ -84373,74 +78536,74 @@ const BlockBlobStageBlockHeaders = { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobStageBlockExceptionHeaders = { serializedName: "BlockBlob_stageBlockExceptionHeaders", @@ -84452,11 +78615,11 @@ const BlockBlobStageBlockExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobStageBlockFromURLHeaders = { serializedName: "BlockBlob_stageBlockFromURLHeaders", @@ -84468,75 +78631,75 @@ const BlockBlobStageBlockFromURLHeaders = { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } -}; + name: "String", + }, + }, + }, + }, +}; const BlockBlobStageBlockFromURLExceptionHeaders = { serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", type: { @@ -84547,11 +78710,11 @@ const BlockBlobStageBlockFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobCommitBlockListHeaders = { serializedName: "BlockBlob_commitBlockListHeaders", @@ -84563,95 +78726,95 @@ const BlockBlobCommitBlockListHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobCommitBlockListExceptionHeaders = { serializedName: "BlockBlob_commitBlockListExceptionHeaders", @@ -84663,11 +78826,11 @@ const BlockBlobCommitBlockListExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobGetBlockListHeaders = { serializedName: "BlockBlob_getBlockListHeaders", @@ -84679,67 +78842,67 @@ const BlockBlobGetBlockListHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobGetBlockListExceptionHeaders = { serializedName: "BlockBlob_getBlockListExceptionHeaders", @@ -84751,194 +78914,194 @@ const BlockBlobGetBlockListExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; var Mappers = /*#__PURE__*/Object.freeze({ __proto__: null, - BlobServiceProperties: BlobServiceProperties, - Logging: Logging, - RetentionPolicy: RetentionPolicy, - Metrics: Metrics, - CorsRule: CorsRule, - StaticWebsite: StaticWebsite, - StorageError: StorageError, - BlobServiceStatistics: BlobServiceStatistics, - GeoReplication: GeoReplication, - ListContainersSegmentResponse: ListContainersSegmentResponse, - ContainerItem: ContainerItem, - ContainerProperties: ContainerProperties, - KeyInfo: KeyInfo, - UserDelegationKey: UserDelegationKey, - FilterBlobSegment: FilterBlobSegment, - FilterBlobItem: FilterBlobItem, - BlobTags: BlobTags, - BlobTag: BlobTag, - SignedIdentifier: SignedIdentifier, AccessPolicy: AccessPolicy, - ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, + AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, + AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, + AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, + AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, + AppendBlobCreateHeaders: AppendBlobCreateHeaders, + AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, + AppendBlobSealHeaders: AppendBlobSealHeaders, + ArrowConfiguration: ArrowConfiguration, + ArrowField: ArrowField, + BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, + BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, + BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, + BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, + BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, + BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, + BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, + BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, + BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, + BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, + BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, + BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, + BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, + BlobDeleteHeaders: BlobDeleteHeaders, + BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, + BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, + BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, + BlobDownloadHeaders: BlobDownloadHeaders, BlobFlatListSegment: BlobFlatListSegment, + BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, + BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, + BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, + BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, + BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, + BlobGetTagsHeaders: BlobGetTagsHeaders, + BlobHierarchyListSegment: BlobHierarchyListSegment, BlobItemInternal: BlobItemInternal, BlobName: BlobName, - BlobPropertiesInternal: BlobPropertiesInternal, - ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, - BlobHierarchyListSegment: BlobHierarchyListSegment, BlobPrefix: BlobPrefix, - BlockLookupList: BlockLookupList, - BlockList: BlockList, + BlobPropertiesInternal: BlobPropertiesInternal, + BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, + BlobQueryHeaders: BlobQueryHeaders, + BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, + BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, + BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, + BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, + BlobServiceProperties: BlobServiceProperties, + BlobServiceStatistics: BlobServiceStatistics, + BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, + BlobSetExpiryHeaders: BlobSetExpiryHeaders, + BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, + BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, + BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, + BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, + BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, + BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, + BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, + BlobSetMetadataHeaders: BlobSetMetadataHeaders, + BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, + BlobSetTagsHeaders: BlobSetTagsHeaders, + BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, + BlobSetTierHeaders: BlobSetTierHeaders, + BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, + BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, + BlobTag: BlobTag, + BlobTags: BlobTags, + BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, + BlobUndeleteHeaders: BlobUndeleteHeaders, Block: Block, - PageList: PageList, - PageRange: PageRange, + BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, + BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, + BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders, + BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, + BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, + BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, + BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, + BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, + BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, + BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, + BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, + BlockBlobUploadHeaders: BlockBlobUploadHeaders, + BlockList: BlockList, + BlockLookupList: BlockLookupList, ClearRange: ClearRange, - QueryRequest: QueryRequest, - QuerySerialization: QuerySerialization, - QueryFormat: QueryFormat, - DelimitedTextConfiguration: DelimitedTextConfiguration, - JsonTextConfiguration: JsonTextConfiguration, - ArrowConfiguration: ArrowConfiguration, - ArrowField: ArrowField, - ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, - ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, - ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, - ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, - ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, - ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, - ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, - ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, - ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, - ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, - ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, - ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, - ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, - ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, - ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, - ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, - ContainerCreateHeaders: ContainerCreateHeaders, - ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, - ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, - ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, - ContainerDeleteHeaders: ContainerDeleteHeaders, - ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, - ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, - ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, - ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, - ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, - ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, - ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, - ContainerRestoreHeaders: ContainerRestoreHeaders, - ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, - ContainerRenameHeaders: ContainerRenameHeaders, - ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, - ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, - ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, - ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, - ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, - ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, - ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, - ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, - ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, - ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, - ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, + ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, - ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, - ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, + ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, + ContainerCreateHeaders: ContainerCreateHeaders, + ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, + ContainerDeleteHeaders: ContainerDeleteHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, + ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, + ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, + ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, + ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, + ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, + ContainerItem: ContainerItem, ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, - ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, - ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, - ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, - BlobDownloadHeaders: BlobDownloadHeaders, - BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, - BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, - BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, - BlobDeleteHeaders: BlobDeleteHeaders, - BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, - BlobUndeleteHeaders: BlobUndeleteHeaders, - BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, - BlobSetExpiryHeaders: BlobSetExpiryHeaders, - BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, - BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, - BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, - BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, - BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, - BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, - BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, - BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, - BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, - BlobSetMetadataHeaders: BlobSetMetadataHeaders, - BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, - BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, - BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, - BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, - BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, - BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, - BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, - BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, - BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, - BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, - BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, - BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, - BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, - BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, - BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, - BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, - BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, - BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, - BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, - BlobSetTierHeaders: BlobSetTierHeaders, - BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, - BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, - BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, - BlobQueryHeaders: BlobQueryHeaders, - BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, - BlobGetTagsHeaders: BlobGetTagsHeaders, - BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, - BlobSetTagsHeaders: BlobSetTagsHeaders, - BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, - PageBlobCreateHeaders: PageBlobCreateHeaders, - PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, - PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, - PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, - PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerProperties: ContainerProperties, + ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, + ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, + ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, + ContainerRenameHeaders: ContainerRenameHeaders, + ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, + ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, + ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, + ContainerRestoreHeaders: ContainerRestoreHeaders, + ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, + ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, + ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, + ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, + ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, + CorsRule: CorsRule, + DelimitedTextConfiguration: DelimitedTextConfiguration, + FilterBlobItem: FilterBlobItem, + FilterBlobSegment: FilterBlobSegment, + GeoReplication: GeoReplication, + JsonTextConfiguration: JsonTextConfiguration, + KeyInfo: KeyInfo, + ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, + ListContainersSegmentResponse: ListContainersSegmentResponse, + Logging: Logging, + Metrics: Metrics, PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, - PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, - PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, - PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, - PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, - PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, + PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, + PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, + PageBlobCreateHeaders: PageBlobCreateHeaders, PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, - PageBlobResizeHeaders: PageBlobResizeHeaders, + PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, + PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, - PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobResizeHeaders: PageBlobResizeHeaders, PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, - PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, - PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, - AppendBlobCreateHeaders: AppendBlobCreateHeaders, - AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, - AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, - AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, - AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, - AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, - AppendBlobSealHeaders: AppendBlobSealHeaders, - AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, - BlockBlobUploadHeaders: BlockBlobUploadHeaders, - BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, - BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, - BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, - BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, - BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, - BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, - BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, - BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, - BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, - BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, - BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders + PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, + PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, + PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, + PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, + PageList: PageList, + PageRange: PageRange, + QueryFormat: QueryFormat, + QueryRequest: QueryRequest, + QuerySerialization: QuerySerialization, + RetentionPolicy: RetentionPolicy, + ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, + ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, + ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, + ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, + ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, + ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, + ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, + ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, + ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, + ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, + ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, + ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, + ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, + ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, + ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, + ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, + SignedIdentifier: SignedIdentifier, + StaticWebsite: StaticWebsite, + StorageError: StorageError, + UserDelegationKey: UserDelegationKey }); /* @@ -84955,13 +79118,13 @@ const contentType = { isConstant: true, serializedName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobServiceProperties = { parameterPath: "blobServiceProperties", - mapper: BlobServiceProperties + mapper: BlobServiceProperties, }; const accept = { parameterPath: "accept", @@ -84970,9 +79133,9 @@ const accept = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; const url = { parameterPath: "url", @@ -84981,10 +79144,10 @@ const url = { required: true, xmlName: "url", type: { - name: "String" - } + name: "String", + }, }, - skipEncoding: true + skipEncoding: true, }; const restype = { parameterPath: "restype", @@ -84993,9 +79156,9 @@ const restype = { isConstant: true, serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp = { parameterPath: "comp", @@ -85004,33 +79167,33 @@ const comp = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const timeoutInSeconds = { parameterPath: ["options", "timeoutInSeconds"], mapper: { constraints: { - InclusiveMinimum: 0 + InclusiveMinimum: 0, }, serializedName: "timeout", xmlName: "timeout", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const version = { parameterPath: "version", mapper: { - defaultValue: "2023-11-03", + defaultValue: "2024-11-04", isConstant: true, serializedName: "x-ms-version", type: { - name: "String" - } - } + name: "String", + }, + }, }; const requestId = { parameterPath: ["options", "requestId"], @@ -85038,9 +79201,9 @@ const requestId = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const accept1 = { parameterPath: "accept", @@ -85049,9 +79212,9 @@ const accept1 = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp1 = { parameterPath: "comp", @@ -85060,9 +79223,9 @@ const comp1 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp2 = { parameterPath: "comp", @@ -85071,9 +79234,9 @@ const comp2 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const prefix = { parameterPath: ["options", "prefix"], @@ -85081,9 +79244,9 @@ const prefix = { serializedName: "prefix", xmlName: "prefix", type: { - name: "String" - } - } + name: "String", + }, + }, }; const marker = { parameterPath: ["options", "marker"], @@ -85091,22 +79254,22 @@ const marker = { serializedName: "marker", xmlName: "marker", type: { - name: "String" - } - } + name: "String", + }, + }, }; const maxPageSize = { parameterPath: ["options", "maxPageSize"], mapper: { constraints: { - InclusiveMinimum: 1 + InclusiveMinimum: 1, }, serializedName: "maxresults", xmlName: "maxresults", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const include = { parameterPath: ["options", "include"], @@ -85119,16 +79282,16 @@ const include = { element: { type: { name: "Enum", - allowedValues: ["metadata", "deleted", "system"] - } - } - } + allowedValues: ["metadata", "deleted", "system"], + }, + }, + }, }, - collectionFormat: coreHttp.QueryCollectionFormat.Csv + collectionFormat: "CSV", }; const keyInfo = { parameterPath: "keyInfo", - mapper: KeyInfo + mapper: KeyInfo, }; const comp3 = { parameterPath: "comp", @@ -85137,9 +79300,9 @@ const comp3 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const restype1 = { parameterPath: "restype", @@ -85148,9 +79311,9 @@ const restype1 = { isConstant: true, serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; const body = { parameterPath: "body", @@ -85159,9 +79322,9 @@ const body = { required: true, xmlName: "body", type: { - name: "Stream" - } - } + name: "Stream", + }, + }, }; const comp4 = { parameterPath: "comp", @@ -85170,9 +79333,9 @@ const comp4 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const contentLength = { parameterPath: "contentLength", @@ -85181,9 +79344,9 @@ const contentLength = { required: true, xmlName: "Content-Length", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const multipartContentType = { parameterPath: "multipartContentType", @@ -85192,9 +79355,9 @@ const multipartContentType = { required: true, xmlName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp5 = { parameterPath: "comp", @@ -85203,9 +79366,9 @@ const comp5 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const where = { parameterPath: ["options", "where"], @@ -85213,9 +79376,9 @@ const where = { serializedName: "where", xmlName: "where", type: { - name: "String" - } - } + name: "String", + }, + }, }; const restype2 = { parameterPath: "restype", @@ -85224,21 +79387,21 @@ const restype2 = { isConstant: true, serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; const metadata = { parameterPath: ["options", "metadata"], mapper: { serializedName: "x-ms-meta", xmlName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" - } + }, }; const access = { parameterPath: ["options", "access"], @@ -85247,37 +79410,37 @@ const access = { xmlName: "x-ms-blob-public-access", type: { name: "Enum", - allowedValues: ["container", "blob"] - } - } + allowedValues: ["container", "blob"], + }, + }, }; const defaultEncryptionScope = { parameterPath: [ "options", "containerEncryptionScope", - "defaultEncryptionScope" + "defaultEncryptionScope", ], mapper: { serializedName: "x-ms-default-encryption-scope", xmlName: "x-ms-default-encryption-scope", type: { - name: "String" - } - } + name: "String", + }, + }, }; const preventEncryptionScopeOverride = { parameterPath: [ "options", "containerEncryptionScope", - "preventEncryptionScopeOverride" + "preventEncryptionScopeOverride", ], mapper: { serializedName: "x-ms-deny-encryption-scope-override", xmlName: "x-ms-deny-encryption-scope-override", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const leaseId = { parameterPath: ["options", "leaseAccessConditions", "leaseId"], @@ -85285,9 +79448,9 @@ const leaseId = { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const ifModifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], @@ -85295,9 +79458,9 @@ const ifModifiedSince = { serializedName: "If-Modified-Since", xmlName: "If-Modified-Since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; const ifUnmodifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], @@ -85305,9 +79468,9 @@ const ifUnmodifiedSince = { serializedName: "If-Unmodified-Since", xmlName: "If-Unmodified-Since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; const comp6 = { parameterPath: "comp", @@ -85316,9 +79479,9 @@ const comp6 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp7 = { parameterPath: "comp", @@ -85327,9 +79490,9 @@ const comp7 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const containerAcl = { parameterPath: ["options", "containerAcl"], @@ -85343,11 +79506,11 @@ const containerAcl = { element: { type: { name: "Composite", - className: "SignedIdentifier" - } - } - } - } + className: "SignedIdentifier", + }, + }, + }, + }, }; const comp8 = { parameterPath: "comp", @@ -85356,9 +79519,9 @@ const comp8 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const deletedContainerName = { parameterPath: ["options", "deletedContainerName"], @@ -85366,9 +79529,9 @@ const deletedContainerName = { serializedName: "x-ms-deleted-container-name", xmlName: "x-ms-deleted-container-name", type: { - name: "String" - } - } + name: "String", + }, + }, }; const deletedContainerVersion = { parameterPath: ["options", "deletedContainerVersion"], @@ -85376,9 +79539,9 @@ const deletedContainerVersion = { serializedName: "x-ms-deleted-container-version", xmlName: "x-ms-deleted-container-version", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp9 = { parameterPath: "comp", @@ -85387,9 +79550,9 @@ const comp9 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceContainerName = { parameterPath: "sourceContainerName", @@ -85398,9 +79561,9 @@ const sourceContainerName = { required: true, xmlName: "x-ms-source-container-name", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceLeaseId = { parameterPath: ["options", "sourceLeaseId"], @@ -85408,9 +79571,9 @@ const sourceLeaseId = { serializedName: "x-ms-source-lease-id", xmlName: "x-ms-source-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp10 = { parameterPath: "comp", @@ -85419,9 +79582,9 @@ const comp10 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const action = { parameterPath: "action", @@ -85430,9 +79593,9 @@ const action = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const duration = { parameterPath: ["options", "duration"], @@ -85440,9 +79603,9 @@ const duration = { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const proposedLeaseId = { parameterPath: ["options", "proposedLeaseId"], @@ -85450,9 +79613,9 @@ const proposedLeaseId = { serializedName: "x-ms-proposed-lease-id", xmlName: "x-ms-proposed-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const action1 = { parameterPath: "action", @@ -85461,9 +79624,9 @@ const action1 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const leaseId1 = { parameterPath: "leaseId", @@ -85472,9 +79635,9 @@ const leaseId1 = { required: true, xmlName: "x-ms-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const action2 = { parameterPath: "action", @@ -85483,9 +79646,9 @@ const action2 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const action3 = { parameterPath: "action", @@ -85494,9 +79657,9 @@ const action3 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const breakPeriod = { parameterPath: ["options", "breakPeriod"], @@ -85504,9 +79667,9 @@ const breakPeriod = { serializedName: "x-ms-lease-break-period", xmlName: "x-ms-lease-break-period", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const action4 = { parameterPath: "action", @@ -85515,9 +79678,9 @@ const action4 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const proposedLeaseId1 = { parameterPath: "proposedLeaseId", @@ -85526,9 +79689,9 @@ const proposedLeaseId1 = { required: true, xmlName: "x-ms-proposed-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const include1 = { parameterPath: ["options", "include"], @@ -85551,13 +79714,13 @@ const include1 = { "tags", "immutabilitypolicy", "legalhold", - "deletedwithversions" - ] - } - } - } + "deletedwithversions", + ], + }, + }, + }, }, - collectionFormat: coreHttp.QueryCollectionFormat.Csv + collectionFormat: "CSV", }; const delimiter = { parameterPath: "delimiter", @@ -85566,9 +79729,9 @@ const delimiter = { required: true, xmlName: "delimiter", type: { - name: "String" - } - } + name: "String", + }, + }, }; const snapshot = { parameterPath: ["options", "snapshot"], @@ -85576,9 +79739,9 @@ const snapshot = { serializedName: "snapshot", xmlName: "snapshot", type: { - name: "String" - } - } + name: "String", + }, + }, }; const versionId = { parameterPath: ["options", "versionId"], @@ -85586,9 +79749,9 @@ const versionId = { serializedName: "versionid", xmlName: "versionid", type: { - name: "String" - } - } + name: "String", + }, + }, }; const range = { parameterPath: ["options", "range"], @@ -85596,9 +79759,9 @@ const range = { serializedName: "x-ms-range", xmlName: "x-ms-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; const rangeGetContentMD5 = { parameterPath: ["options", "rangeGetContentMD5"], @@ -85606,9 +79769,9 @@ const rangeGetContentMD5 = { serializedName: "x-ms-range-get-content-md5", xmlName: "x-ms-range-get-content-md5", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const rangeGetContentCRC64 = { parameterPath: ["options", "rangeGetContentCRC64"], @@ -85616,9 +79779,9 @@ const rangeGetContentCRC64 = { serializedName: "x-ms-range-get-content-crc64", xmlName: "x-ms-range-get-content-crc64", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const encryptionKey = { parameterPath: ["options", "cpkInfo", "encryptionKey"], @@ -85626,9 +79789,9 @@ const encryptionKey = { serializedName: "x-ms-encryption-key", xmlName: "x-ms-encryption-key", type: { - name: "String" - } - } + name: "String", + }, + }, }; const encryptionKeySha256 = { parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], @@ -85636,9 +79799,9 @@ const encryptionKeySha256 = { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } - } + name: "String", + }, + }, }; const encryptionAlgorithm = { parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], @@ -85646,9 +79809,9 @@ const encryptionAlgorithm = { serializedName: "x-ms-encryption-algorithm", xmlName: "x-ms-encryption-algorithm", type: { - name: "String" - } - } + name: "String", + }, + }, }; const ifMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], @@ -85656,9 +79819,9 @@ const ifMatch = { serializedName: "If-Match", xmlName: "If-Match", type: { - name: "String" - } - } + name: "String", + }, + }, }; const ifNoneMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], @@ -85666,9 +79829,9 @@ const ifNoneMatch = { serializedName: "If-None-Match", xmlName: "If-None-Match", type: { - name: "String" - } - } + name: "String", + }, + }, }; const ifTags = { parameterPath: ["options", "modifiedAccessConditions", "ifTags"], @@ -85676,9 +79839,9 @@ const ifTags = { serializedName: "x-ms-if-tags", xmlName: "x-ms-if-tags", type: { - name: "String" - } - } + name: "String", + }, + }, }; const deleteSnapshots = { parameterPath: ["options", "deleteSnapshots"], @@ -85687,9 +79850,9 @@ const deleteSnapshots = { xmlName: "x-ms-delete-snapshots", type: { name: "Enum", - allowedValues: ["include", "only"] - } - } + allowedValues: ["include", "only"], + }, + }, }; const blobDeleteType = { parameterPath: ["options", "blobDeleteType"], @@ -85697,9 +79860,9 @@ const blobDeleteType = { serializedName: "deletetype", xmlName: "deletetype", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp11 = { parameterPath: "comp", @@ -85708,9 +79871,9 @@ const comp11 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const expiryOptions = { parameterPath: "expiryOptions", @@ -85719,9 +79882,9 @@ const expiryOptions = { required: true, xmlName: "x-ms-expiry-option", type: { - name: "String" - } - } + name: "String", + }, + }, }; const expiresOn = { parameterPath: ["options", "expiresOn"], @@ -85729,9 +79892,9 @@ const expiresOn = { serializedName: "x-ms-expiry-time", xmlName: "x-ms-expiry-time", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobCacheControl = { parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], @@ -85739,9 +79902,9 @@ const blobCacheControl = { serializedName: "x-ms-blob-cache-control", xmlName: "x-ms-blob-cache-control", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobContentType = { parameterPath: ["options", "blobHttpHeaders", "blobContentType"], @@ -85749,9 +79912,9 @@ const blobContentType = { serializedName: "x-ms-blob-content-type", xmlName: "x-ms-blob-content-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobContentMD5 = { parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], @@ -85759,9 +79922,9 @@ const blobContentMD5 = { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; const blobContentEncoding = { parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], @@ -85769,9 +79932,9 @@ const blobContentEncoding = { serializedName: "x-ms-blob-content-encoding", xmlName: "x-ms-blob-content-encoding", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobContentLanguage = { parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], @@ -85779,9 +79942,9 @@ const blobContentLanguage = { serializedName: "x-ms-blob-content-language", xmlName: "x-ms-blob-content-language", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobContentDisposition = { parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], @@ -85789,9 +79952,9 @@ const blobContentDisposition = { serializedName: "x-ms-blob-content-disposition", xmlName: "x-ms-blob-content-disposition", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp12 = { parameterPath: "comp", @@ -85800,9 +79963,9 @@ const comp12 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const immutabilityPolicyExpiry = { parameterPath: ["options", "immutabilityPolicyExpiry"], @@ -85810,9 +79973,9 @@ const immutabilityPolicyExpiry = { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; const immutabilityPolicyMode = { parameterPath: ["options", "immutabilityPolicyMode"], @@ -85821,9 +79984,9 @@ const immutabilityPolicyMode = { xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, }; const comp13 = { parameterPath: "comp", @@ -85832,9 +79995,9 @@ const comp13 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const legalHold = { parameterPath: "legalHold", @@ -85843,9 +80006,9 @@ const legalHold = { required: true, xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const encryptionScope = { parameterPath: ["options", "encryptionScope"], @@ -85853,9 +80016,9 @@ const encryptionScope = { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp14 = { parameterPath: "comp", @@ -85864,9 +80027,9 @@ const comp14 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const tier = { parameterPath: ["options", "tier"], @@ -85890,10 +80053,10 @@ const tier = { "Hot", "Cool", "Archive", - "Cold" - ] - } - } + "Cold", + ], + }, + }, }; const rehydratePriority = { parameterPath: ["options", "rehydratePriority"], @@ -85902,37 +80065,37 @@ const rehydratePriority = { xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", - allowedValues: ["High", "Standard"] - } - } + allowedValues: ["High", "Standard"], + }, + }, }; const sourceIfModifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", - "sourceIfModifiedSince" + "sourceIfModifiedSince", ], mapper: { serializedName: "x-ms-source-if-modified-since", xmlName: "x-ms-source-if-modified-since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; const sourceIfUnmodifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", - "sourceIfUnmodifiedSince" + "sourceIfUnmodifiedSince", ], mapper: { serializedName: "x-ms-source-if-unmodified-since", xmlName: "x-ms-source-if-unmodified-since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; const sourceIfMatch = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], @@ -85940,23 +80103,23 @@ const sourceIfMatch = { serializedName: "x-ms-source-if-match", xmlName: "x-ms-source-if-match", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceIfNoneMatch = { parameterPath: [ "options", "sourceModifiedAccessConditions", - "sourceIfNoneMatch" + "sourceIfNoneMatch", ], mapper: { serializedName: "x-ms-source-if-none-match", xmlName: "x-ms-source-if-none-match", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceIfTags = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], @@ -85964,9 +80127,9 @@ const sourceIfTags = { serializedName: "x-ms-source-if-tags", xmlName: "x-ms-source-if-tags", type: { - name: "String" - } - } + name: "String", + }, + }, }; const copySource = { parameterPath: "copySource", @@ -85975,9 +80138,9 @@ const copySource = { required: true, xmlName: "x-ms-copy-source", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobTagsString = { parameterPath: ["options", "blobTagsString"], @@ -85985,9 +80148,9 @@ const blobTagsString = { serializedName: "x-ms-tags", xmlName: "x-ms-tags", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sealBlob = { parameterPath: ["options", "sealBlob"], @@ -85995,9 +80158,9 @@ const sealBlob = { serializedName: "x-ms-seal-blob", xmlName: "x-ms-seal-blob", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const legalHold1 = { parameterPath: ["options", "legalHold"], @@ -86005,9 +80168,9 @@ const legalHold1 = { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const xMsRequiresSync = { parameterPath: "xMsRequiresSync", @@ -86016,9 +80179,9 @@ const xMsRequiresSync = { isConstant: true, serializedName: "x-ms-requires-sync", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceContentMD5 = { parameterPath: ["options", "sourceContentMD5"], @@ -86026,9 +80189,9 @@ const sourceContentMD5 = { serializedName: "x-ms-source-content-md5", xmlName: "x-ms-source-content-md5", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; const copySourceAuthorization = { parameterPath: ["options", "copySourceAuthorization"], @@ -86036,9 +80199,9 @@ const copySourceAuthorization = { serializedName: "x-ms-copy-source-authorization", xmlName: "x-ms-copy-source-authorization", type: { - name: "String" - } - } + name: "String", + }, + }, }; const copySourceTags = { parameterPath: ["options", "copySourceTags"], @@ -86047,9 +80210,9 @@ const copySourceTags = { xmlName: "x-ms-copy-source-tag-option", type: { name: "Enum", - allowedValues: ["REPLACE", "COPY"] - } - } + allowedValues: ["REPLACE", "COPY"], + }, + }, }; const comp15 = { parameterPath: "comp", @@ -86058,9 +80221,9 @@ const comp15 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const copyActionAbortConstant = { parameterPath: "copyActionAbortConstant", @@ -86069,9 +80232,9 @@ const copyActionAbortConstant = { isConstant: true, serializedName: "x-ms-copy-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const copyId = { parameterPath: "copyId", @@ -86080,9 +80243,9 @@ const copyId = { required: true, xmlName: "copyid", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp16 = { parameterPath: "comp", @@ -86091,9 +80254,9 @@ const comp16 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const tier1 = { parameterPath: "tier", @@ -86118,14 +80281,14 @@ const tier1 = { "Hot", "Cool", "Archive", - "Cold" - ] - } - } + "Cold", + ], + }, + }, }; const queryRequest = { parameterPath: ["options", "queryRequest"], - mapper: QueryRequest + mapper: QueryRequest, }; const comp17 = { parameterPath: "comp", @@ -86134,9 +80297,9 @@ const comp17 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp18 = { parameterPath: "comp", @@ -86145,13 +80308,13 @@ const comp18 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const tags = { parameterPath: ["options", "tags"], - mapper: BlobTags + mapper: BlobTags, }; const transactionalContentMD5 = { parameterPath: ["options", "transactionalContentMD5"], @@ -86159,9 +80322,9 @@ const transactionalContentMD5 = { serializedName: "Content-MD5", xmlName: "Content-MD5", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; const transactionalContentCrc64 = { parameterPath: ["options", "transactionalContentCrc64"], @@ -86169,9 +80332,9 @@ const transactionalContentCrc64 = { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; const blobType = { parameterPath: "blobType", @@ -86180,9 +80343,9 @@ const blobType = { isConstant: true, serializedName: "x-ms-blob-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobContentLength = { parameterPath: "blobContentLength", @@ -86191,19 +80354,20 @@ const blobContentLength = { required: true, xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const blobSequenceNumber = { parameterPath: ["options", "blobSequenceNumber"], mapper: { + defaultValue: 0, serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const contentType1 = { parameterPath: ["options", "contentType"], @@ -86212,9 +80376,9 @@ const contentType1 = { isConstant: true, serializedName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const body1 = { parameterPath: "body", @@ -86223,9 +80387,9 @@ const body1 = { required: true, xmlName: "body", type: { - name: "Stream" - } - } + name: "Stream", + }, + }, }; const accept2 = { parameterPath: "accept", @@ -86234,9 +80398,9 @@ const accept2 = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp19 = { parameterPath: "comp", @@ -86245,9 +80409,9 @@ const comp19 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const pageWrite = { parameterPath: "pageWrite", @@ -86256,51 +80420,51 @@ const pageWrite = { isConstant: true, serializedName: "x-ms-page-write", type: { - name: "String" - } - } + name: "String", + }, + }, }; const ifSequenceNumberLessThanOrEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", - "ifSequenceNumberLessThanOrEqualTo" + "ifSequenceNumberLessThanOrEqualTo", ], mapper: { serializedName: "x-ms-if-sequence-number-le", xmlName: "x-ms-if-sequence-number-le", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const ifSequenceNumberLessThan = { parameterPath: [ "options", "sequenceNumberAccessConditions", - "ifSequenceNumberLessThan" + "ifSequenceNumberLessThan", ], mapper: { serializedName: "x-ms-if-sequence-number-lt", xmlName: "x-ms-if-sequence-number-lt", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const ifSequenceNumberEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", - "ifSequenceNumberEqualTo" + "ifSequenceNumberEqualTo", ], mapper: { serializedName: "x-ms-if-sequence-number-eq", xmlName: "x-ms-if-sequence-number-eq", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const pageWrite1 = { parameterPath: "pageWrite", @@ -86309,9 +80473,9 @@ const pageWrite1 = { isConstant: true, serializedName: "x-ms-page-write", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceUrl = { parameterPath: "sourceUrl", @@ -86320,9 +80484,9 @@ const sourceUrl = { required: true, xmlName: "x-ms-copy-source", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceRange = { parameterPath: "sourceRange", @@ -86331,9 +80495,9 @@ const sourceRange = { required: true, xmlName: "x-ms-source-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceContentCrc64 = { parameterPath: ["options", "sourceContentCrc64"], @@ -86341,9 +80505,9 @@ const sourceContentCrc64 = { serializedName: "x-ms-source-content-crc64", xmlName: "x-ms-source-content-crc64", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; const range1 = { parameterPath: "range", @@ -86352,9 +80516,9 @@ const range1 = { required: true, xmlName: "x-ms-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp20 = { parameterPath: "comp", @@ -86363,9 +80527,9 @@ const comp20 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const prevsnapshot = { parameterPath: ["options", "prevsnapshot"], @@ -86373,9 +80537,9 @@ const prevsnapshot = { serializedName: "prevsnapshot", xmlName: "prevsnapshot", type: { - name: "String" - } - } + name: "String", + }, + }, }; const prevSnapshotUrl = { parameterPath: ["options", "prevSnapshotUrl"], @@ -86383,9 +80547,9 @@ const prevSnapshotUrl = { serializedName: "x-ms-previous-snapshot-url", xmlName: "x-ms-previous-snapshot-url", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sequenceNumberAction = { parameterPath: "sequenceNumberAction", @@ -86395,9 +80559,9 @@ const sequenceNumberAction = { xmlName: "x-ms-sequence-number-action", type: { name: "Enum", - allowedValues: ["max", "update", "increment"] - } - } + allowedValues: ["max", "update", "increment"], + }, + }, }; const comp21 = { parameterPath: "comp", @@ -86406,9 +80570,9 @@ const comp21 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobType1 = { parameterPath: "blobType", @@ -86417,9 +80581,9 @@ const blobType1 = { isConstant: true, serializedName: "x-ms-blob-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp22 = { parameterPath: "comp", @@ -86428,9 +80592,9 @@ const comp22 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const maxSize = { parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], @@ -86438,23 +80602,23 @@ const maxSize = { serializedName: "x-ms-blob-condition-maxsize", xmlName: "x-ms-blob-condition-maxsize", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const appendPosition = { parameterPath: [ "options", "appendPositionAccessConditions", - "appendPosition" + "appendPosition", ], mapper: { serializedName: "x-ms-blob-condition-appendpos", xmlName: "x-ms-blob-condition-appendpos", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const sourceRange1 = { parameterPath: ["options", "sourceRange"], @@ -86462,9 +80626,9 @@ const sourceRange1 = { serializedName: "x-ms-source-range", xmlName: "x-ms-source-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp23 = { parameterPath: "comp", @@ -86473,9 +80637,9 @@ const comp23 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobType2 = { parameterPath: "blobType", @@ -86484,9 +80648,9 @@ const blobType2 = { isConstant: true, serializedName: "x-ms-blob-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const copySourceBlobProperties = { parameterPath: ["options", "copySourceBlobProperties"], @@ -86494,9 +80658,9 @@ const copySourceBlobProperties = { serializedName: "x-ms-copy-source-blob-properties", xmlName: "x-ms-copy-source-blob-properties", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const comp24 = { parameterPath: "comp", @@ -86505,9 +80669,9 @@ const comp24 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blockId = { parameterPath: "blockId", @@ -86516,13 +80680,13 @@ const blockId = { required: true, xmlName: "blockid", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blocks = { parameterPath: "blocks", - mapper: BlockLookupList + mapper: BlockLookupList, }; const comp25 = { parameterPath: "comp", @@ -86531,9 +80695,9 @@ const comp25 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const listType = { parameterPath: "listType", @@ -86544,9 +80708,9 @@ const listType = { xmlName: "blocklisttype", type: { name: "Enum", - allowedValues: ["committed", "uncommitted", "all"] - } - } + allowedValues: ["committed", "uncommitted", "all"], + }, + }, }; /* @@ -86556,8 +80720,8 @@ const listType = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a Service. */ -class Service { +/** Class containing Service operations. */ +class ServiceImpl { /** * Initialize a new instance of the class Service class. * @param client Reference to the service client @@ -86572,11 +80736,7 @@ class Service { * @param options The options parameters. */ setProperties(blobServiceProperties, options) { - const operationArguments = { - blobServiceProperties, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); } /** * gets the properties of a storage account's Blob service, including properties for Storage Analytics @@ -86584,10 +80744,7 @@ class Service { * @param options The options parameters. */ getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$2); } /** * Retrieves statistics related to replication for the Blob service. It is only available on the @@ -86596,20 +80753,14 @@ class Service { * @param options The options parameters. */ getStatistics(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); } /** * The List Containers Segment operation returns a list of the containers under the specified account * @param options The options parameters. */ listContainersSegment(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); } /** * Retrieves a user delegation key for the Blob service. This is only a valid operation when using @@ -86618,21 +80769,14 @@ class Service { * @param options The options parameters. */ getUserDelegationKey(keyInfo, options) { - const operationArguments = { - keyInfo, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$2); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. @@ -86643,13 +80787,7 @@ class Service { * @param options The options parameters. */ submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec$1); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a @@ -86658,43 +80796,40 @@ class Service { * @param options The options parameters. */ filterBlobs(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec$1); } } // Operation Specifications -const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$5 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", responses: { 202: { - headersMapper: ServiceSetPropertiesHeaders + headersMapper: ServiceSetPropertiesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceSetPropertiesExceptionHeaders - } + headersMapper: ServiceSetPropertiesExceptionHeaders, + }, }, requestBody: blobServiceProperties, queryParameters: [ restype, comp, - timeoutInSeconds + timeoutInSeconds, ], urlParameters: [url], headerParameters: [ contentType, accept, version, - requestId + requestId, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const getPropertiesOperationSpec$2 = { path: "/", @@ -86702,26 +80837,26 @@ const getPropertiesOperationSpec$2 = { responses: { 200: { bodyMapper: BlobServiceProperties, - headersMapper: ServiceGetPropertiesHeaders + headersMapper: ServiceGetPropertiesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetPropertiesExceptionHeaders - } + headersMapper: ServiceGetPropertiesExceptionHeaders, + }, }, queryParameters: [ restype, comp, - timeoutInSeconds + timeoutInSeconds, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const getStatisticsOperationSpec = { path: "/", @@ -86729,26 +80864,26 @@ const getStatisticsOperationSpec = { responses: { 200: { bodyMapper: BlobServiceStatistics, - headersMapper: ServiceGetStatisticsHeaders + headersMapper: ServiceGetStatisticsHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetStatisticsExceptionHeaders - } + headersMapper: ServiceGetStatisticsExceptionHeaders, + }, }, queryParameters: [ restype, timeoutInSeconds, - comp1 + comp1, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const listContainersSegmentOperationSpec = { path: "/", @@ -86756,12 +80891,12 @@ const listContainersSegmentOperationSpec = { responses: { 200: { bodyMapper: ListContainersSegmentResponse, - headersMapper: ServiceListContainersSegmentHeaders + headersMapper: ServiceListContainersSegmentHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceListContainersSegmentExceptionHeaders - } + headersMapper: ServiceListContainersSegmentExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, @@ -86769,16 +80904,16 @@ const listContainersSegmentOperationSpec = { prefix, marker, maxPageSize, - include + include, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const getUserDelegationKeyOperationSpec = { path: "/", @@ -86786,48 +80921,56 @@ const getUserDelegationKeyOperationSpec = { responses: { 200: { bodyMapper: UserDelegationKey, - headersMapper: ServiceGetUserDelegationKeyHeaders + headersMapper: ServiceGetUserDelegationKeyHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetUserDelegationKeyExceptionHeaders - } + headersMapper: ServiceGetUserDelegationKeyExceptionHeaders, + }, }, requestBody: keyInfo, queryParameters: [ restype, timeoutInSeconds, - comp3 + comp3, ], urlParameters: [url], headerParameters: [ contentType, accept, version, - requestId + requestId, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const getAccountInfoOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { 200: { - headersMapper: ServiceGetAccountInfoHeaders + headersMapper: ServiceGetAccountInfoHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetAccountInfoExceptionHeaders - } + headersMapper: ServiceGetAccountInfoExceptionHeaders, + }, }, - queryParameters: [comp, restype1], + queryParameters: [ + comp, + timeoutInSeconds, + restype1, + ], urlParameters: [url], - headerParameters: [version, accept1], + headerParameters: [ + version, + requestId, + accept1, + ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const submitBatchOperationSpec$1 = { path: "/", @@ -86836,30 +80979,29 @@ const submitBatchOperationSpec$1 = { 202: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: ServiceSubmitBatchHeaders + headersMapper: ServiceSubmitBatchHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceSubmitBatchExceptionHeaders - } + headersMapper: ServiceSubmitBatchExceptionHeaders, + }, }, requestBody: body, queryParameters: [timeoutInSeconds, comp4], urlParameters: [url], headerParameters: [ - contentType, accept, version, requestId, contentLength, - multipartContentType + multipartContentType, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const filterBlobsOperationSpec$1 = { path: "/", @@ -86867,28 +81009,28 @@ const filterBlobsOperationSpec$1 = { responses: { 200: { bodyMapper: FilterBlobSegment, - headersMapper: ServiceFilterBlobsHeaders + headersMapper: ServiceFilterBlobsHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceFilterBlobsExceptionHeaders - } + headersMapper: ServiceFilterBlobsExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, comp5, - where + where, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; /* @@ -86898,8 +81040,8 @@ const filterBlobsOperationSpec$1 = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a Container. */ -class Container { +/** Class containing Container operations. */ +class ContainerImpl { /** * Initialize a new instance of the class Container class. * @param client Reference to the service client @@ -86913,10 +81055,7 @@ class Container { * @param options The options parameters. */ create(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); + return this.client.sendOperationRequest({ options }, createOperationSpec$2); } /** * returns all user-defined metadata and system properties for the specified container. The data @@ -86924,10 +81063,7 @@ class Container { * @param options The options parameters. */ getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$1); } /** * operation marks the specified container for deletion. The container and any blobs contained within @@ -86935,20 +81071,14 @@ class Container { * @param options The options parameters. */ delete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); + return this.client.sendOperationRequest({ options }, deleteOperationSpec$1); } /** * operation sets one or more user-defined name-value pairs for the specified container. * @param options The options parameters. */ setMetadata(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec$1); } /** * gets the permissions for the specified container. The permissions indicate whether container data @@ -86956,10 +81086,7 @@ class Container { * @param options The options parameters. */ getAccessPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); } /** * sets the permissions for the specified container. The permissions indicate whether blobs in a @@ -86967,20 +81094,14 @@ class Container { * @param options The options parameters. */ setAccessPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); } /** * Restores a previously-deleted container. * @param options The options parameters. */ restore(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); + return this.client.sendOperationRequest({ options }, restoreOperationSpec); } /** * Renames an existing container. @@ -86988,11 +81109,7 @@ class Container { * @param options The options parameters. */ rename(sourceContainerName, options) { - const operationArguments = { - sourceContainerName, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renameOperationSpec); + return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. @@ -87003,13 +81120,7 @@ class Container { * @param options The options parameters. */ submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); } /** * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given @@ -87017,10 +81128,7 @@ class Container { * @param options The options parameters. */ filterBlobs(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -87028,10 +81136,7 @@ class Container { * @param options The options parameters. */ acquireLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -87040,11 +81145,7 @@ class Container { * @param options The options parameters. */ releaseLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -87053,11 +81154,7 @@ class Container { * @param options The options parameters. */ renewLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -87065,10 +81162,7 @@ class Container { * @param options The options parameters. */ breakLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -87080,22 +81174,14 @@ class Container { * @param options The options parameters. */ changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec$1); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param options The options parameters. */ listBlobFlatSegment(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container @@ -87106,36 +81192,29 @@ class Container { * @param options The options parameters. */ listBlobHierarchySegment(delimiter, options) { - const operationArguments = { - delimiter, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$1); } } // Operation Specifications -const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$4 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const createOperationSpec$2 = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { - headersMapper: ContainerCreateHeaders + headersMapper: ContainerCreateHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerCreateExceptionHeaders - } + headersMapper: ContainerCreateExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], @@ -87146,22 +81225,22 @@ const createOperationSpec$2 = { metadata, access, defaultEncryptionScope, - preventEncryptionScopeOverride + preventEncryptionScopeOverride, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const getPropertiesOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { - headersMapper: ContainerGetPropertiesHeaders + headersMapper: ContainerGetPropertiesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerGetPropertiesExceptionHeaders - } + headersMapper: ContainerGetPropertiesExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], @@ -87169,22 +81248,22 @@ const getPropertiesOperationSpec$1 = { version, requestId, accept1, - leaseId + leaseId, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const deleteOperationSpec$1 = { path: "/{containerName}", httpMethod: "DELETE", responses: { 202: { - headersMapper: ContainerDeleteHeaders + headersMapper: ContainerDeleteHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerDeleteExceptionHeaders - } + headersMapper: ContainerDeleteExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], @@ -87194,27 +81273,27 @@ const deleteOperationSpec$1 = { accept1, leaseId, ifModifiedSince, - ifUnmodifiedSince + ifUnmodifiedSince, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const setMetadataOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerSetMetadataHeaders + headersMapper: ContainerSetMetadataHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerSetMetadataExceptionHeaders - } + headersMapper: ContainerSetMetadataExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp6 + comp6, ], urlParameters: [url], headerParameters: [ @@ -87223,10 +81302,10 @@ const setMetadataOperationSpec$1 = { accept1, metadata, leaseId, - ifModifiedSince + ifModifiedSince, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const getAccessPolicyOperationSpec = { path: "/{containerName}", @@ -87237,53 +81316,53 @@ const getAccessPolicyOperationSpec = { type: { name: "Sequence", element: { - type: { name: "Composite", className: "SignedIdentifier" } - } + type: { name: "Composite", className: "SignedIdentifier" }, + }, }, serializedName: "SignedIdentifiers", xmlName: "SignedIdentifiers", xmlIsWrapped: true, - xmlElementName: "SignedIdentifier" + xmlElementName: "SignedIdentifier", }, - headersMapper: ContainerGetAccessPolicyHeaders + headersMapper: ContainerGetAccessPolicyHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerGetAccessPolicyExceptionHeaders - } + headersMapper: ContainerGetAccessPolicyExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp7 + comp7, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, - leaseId + leaseId, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const setAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerSetAccessPolicyHeaders + headersMapper: ContainerSetAccessPolicyHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerSetAccessPolicyExceptionHeaders - } + headersMapper: ContainerSetAccessPolicyExceptionHeaders, + }, }, requestBody: containerAcl, queryParameters: [ timeoutInSeconds, restype2, - comp7 + comp7, ], urlParameters: [url], headerParameters: [ @@ -87294,29 +81373,29 @@ const setAccessPolicyOperationSpec = { access, leaseId, ifModifiedSince, - ifUnmodifiedSince + ifUnmodifiedSince, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const restoreOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { - headersMapper: ContainerRestoreHeaders + headersMapper: ContainerRestoreHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerRestoreExceptionHeaders - } + headersMapper: ContainerRestoreExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp8 + comp8, ], urlParameters: [url], headerParameters: [ @@ -87324,27 +81403,27 @@ const restoreOperationSpec = { requestId, accept1, deletedContainerName, - deletedContainerVersion + deletedContainerVersion, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const renameOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerRenameHeaders + headersMapper: ContainerRenameHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerRenameExceptionHeaders - } + headersMapper: ContainerRenameExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp9 + comp9, ], urlParameters: [url], headerParameters: [ @@ -87352,10 +81431,10 @@ const renameOperationSpec = { requestId, accept1, sourceContainerName, - sourceLeaseId + sourceLeaseId, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const submitBatchOperationSpec = { path: "/{containerName}", @@ -87364,34 +81443,33 @@ const submitBatchOperationSpec = { 202: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: ContainerSubmitBatchHeaders + headersMapper: ContainerSubmitBatchHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerSubmitBatchExceptionHeaders - } + headersMapper: ContainerSubmitBatchExceptionHeaders, + }, }, requestBody: body, queryParameters: [ timeoutInSeconds, comp4, - restype2 + restype2, ], urlParameters: [url], headerParameters: [ - contentType, accept, version, requestId, contentLength, - multipartContentType + multipartContentType, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const filterBlobsOperationSpec = { path: "/{containerName}", @@ -87399,12 +81477,12 @@ const filterBlobsOperationSpec = { responses: { 200: { bodyMapper: FilterBlobSegment, - headersMapper: ContainerFilterBlobsHeaders + headersMapper: ContainerFilterBlobsHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerFilterBlobsExceptionHeaders - } + headersMapper: ContainerFilterBlobsExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, @@ -87412,33 +81490,33 @@ const filterBlobsOperationSpec = { maxPageSize, comp5, where, - restype2 + restype2, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const acquireLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { - headersMapper: ContainerAcquireLeaseHeaders + headersMapper: ContainerAcquireLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerAcquireLeaseExceptionHeaders - } + headersMapper: ContainerAcquireLeaseExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp10 + comp10, ], urlParameters: [url], headerParameters: [ @@ -87449,27 +81527,27 @@ const acquireLeaseOperationSpec$1 = { ifUnmodifiedSince, action, duration, - proposedLeaseId + proposedLeaseId, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const releaseLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerReleaseLeaseHeaders + headersMapper: ContainerReleaseLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerReleaseLeaseExceptionHeaders - } + headersMapper: ContainerReleaseLeaseExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp10 + comp10, ], urlParameters: [url], headerParameters: [ @@ -87479,27 +81557,27 @@ const releaseLeaseOperationSpec$1 = { ifModifiedSince, ifUnmodifiedSince, action1, - leaseId1 + leaseId1, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const renewLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerRenewLeaseHeaders + headersMapper: ContainerRenewLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerRenewLeaseExceptionHeaders - } + headersMapper: ContainerRenewLeaseExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp10 + comp10, ], urlParameters: [url], headerParameters: [ @@ -87509,27 +81587,27 @@ const renewLeaseOperationSpec$1 = { ifModifiedSince, ifUnmodifiedSince, leaseId1, - action2 + action2, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const breakLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 202: { - headersMapper: ContainerBreakLeaseHeaders + headersMapper: ContainerBreakLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerBreakLeaseExceptionHeaders - } + headersMapper: ContainerBreakLeaseExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp10 + comp10, ], urlParameters: [url], headerParameters: [ @@ -87539,27 +81617,27 @@ const breakLeaseOperationSpec$1 = { ifModifiedSince, ifUnmodifiedSince, action3, - breakPeriod + breakPeriod, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const changeLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerChangeLeaseHeaders + headersMapper: ContainerChangeLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerChangeLeaseExceptionHeaders - } + headersMapper: ContainerChangeLeaseExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp10 + comp10, ], urlParameters: [url], headerParameters: [ @@ -87570,10 +81648,10 @@ const changeLeaseOperationSpec$1 = { ifUnmodifiedSince, leaseId1, action4, - proposedLeaseId1 + proposedLeaseId1, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const listBlobFlatSegmentOperationSpec = { path: "/{containerName}", @@ -87581,12 +81659,12 @@ const listBlobFlatSegmentOperationSpec = { responses: { 200: { bodyMapper: ListBlobsFlatSegmentResponse, - headersMapper: ContainerListBlobFlatSegmentHeaders + headersMapper: ContainerListBlobFlatSegmentHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerListBlobFlatSegmentExceptionHeaders - } + headersMapper: ContainerListBlobFlatSegmentExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, @@ -87595,16 +81673,16 @@ const listBlobFlatSegmentOperationSpec = { marker, maxPageSize, restype2, - include1 + include1, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", @@ -87612,12 +81690,12 @@ const listBlobHierarchySegmentOperationSpec = { responses: { 200: { bodyMapper: ListBlobsHierarchySegmentResponse, - headersMapper: ContainerListBlobHierarchySegmentHeaders + headersMapper: ContainerListBlobHierarchySegmentHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders - } + headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, @@ -87627,34 +81705,42 @@ const listBlobHierarchySegmentOperationSpec = { maxPageSize, restype2, include1, - delimiter + delimiter, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const getAccountInfoOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { - headersMapper: ContainerGetAccountInfoHeaders + headersMapper: ContainerGetAccountInfoHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerGetAccountInfoExceptionHeaders - } + headersMapper: ContainerGetAccountInfoExceptionHeaders, + }, }, - queryParameters: [comp, restype1], + queryParameters: [ + comp, + timeoutInSeconds, + restype1, + ], urlParameters: [url], - headerParameters: [version, accept1], + headerParameters: [ + version, + requestId, + accept1, + ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; /* @@ -87664,8 +81750,8 @@ const getAccountInfoOperationSpec$1 = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a Blob. */ -class Blob$1 { +/** Class containing Blob operations. */ +class BlobImpl { /** * Initialize a new instance of the class Blob class. * @param client Reference to the service client @@ -87679,10 +81765,7 @@ class Blob$1 { * @param options The options parameters. */ download(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + return this.client.sendOperationRequest({ options }, downloadOperationSpec); } /** * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system @@ -87690,10 +81773,7 @@ class Blob$1 { * @param options The options parameters. */ getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is @@ -87711,20 +81791,14 @@ class Blob$1 { * @param options The options parameters. */ delete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + return this.client.sendOperationRequest({ options }, deleteOperationSpec); } /** * Undelete a blob that was previously soft deleted * @param options The options parameters. */ undelete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + return this.client.sendOperationRequest({ options }, undeleteOperationSpec); } /** * Sets the time a blob will expire and be deleted. @@ -87732,41 +81806,28 @@ class Blob$1 { * @param options The options parameters. */ setExpiry(expiryOptions, options) { - const operationArguments = { - expiryOptions, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); } /** * The Set HTTP Headers operation sets system properties on the blob * @param options The options parameters. */ setHttpHeaders(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); } /** * The Set Immutability Policy operation sets the immutability policy on the blob * @param options The options parameters. */ setImmutabilityPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); } /** * The Delete Immutability Policy operation deletes the immutability policy on the blob * @param options The options parameters. */ deleteImmutabilityPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); } /** * The Set Legal Hold operation sets a legal hold on the blob. @@ -87774,11 +81835,7 @@ class Blob$1 { * @param options The options parameters. */ setLegalHold(legalHold, options) { - const operationArguments = { - legalHold, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); } /** * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more @@ -87786,10 +81843,7 @@ class Blob$1 { * @param options The options parameters. */ setMetadata(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -87797,10 +81851,7 @@ class Blob$1 { * @param options The options parameters. */ acquireLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -87809,11 +81860,7 @@ class Blob$1 { * @param options The options parameters. */ releaseLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -87822,11 +81869,7 @@ class Blob$1 { * @param options The options parameters. */ renewLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -87838,12 +81881,7 @@ class Blob$1 { * @param options The options parameters. */ changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -87851,20 +81889,14 @@ class Blob$1 { * @param options The options parameters. */ breakLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); } /** * The Create Snapshot operation creates a read-only snapshot of a blob * @param options The options parameters. */ createSnapshot(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); + return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); } /** * The Start Copy From URL operation copies a blob or an internet resource to a new blob. @@ -87875,11 +81907,7 @@ class Blob$1 { * @param options The options parameters. */ startCopyFromURL(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); + return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); } /** * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return @@ -87891,11 +81919,7 @@ class Blob$1 { * @param options The options parameters. */ copyFromURL(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); } /** * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination @@ -87905,11 +81929,7 @@ class Blob$1 { * @param options The options parameters. */ abortCopyFromURL(copyId, options) { - const operationArguments = { - copyId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); } /** * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium @@ -87921,21 +81941,14 @@ class Blob$1 { * @param options The options parameters. */ setTier(tier, options) { - const operationArguments = { - tier, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); } /** * The Query operation enables users to select/project on blob data by providing simple query @@ -87943,34 +81956,25 @@ class Blob$1 { * @param options The options parameters. */ query(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, queryOperationSpec); + return this.client.sendOperationRequest({ options }, queryOperationSpec); } /** * The Get Tags operation enables users to get the tags associated with a blob. * @param options The options parameters. */ getTags(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + return this.client.sendOperationRequest({ options }, getTagsOperationSpec); } /** * The Set Tags operation enables users to set tags on a blob. * @param options The options parameters. */ setTags(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); + return this.client.sendOperationRequest({ options }, setTagsOperationSpec); } } // Operation Specifications -const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$3 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", @@ -87978,26 +81982,26 @@ const downloadOperationSpec = { 200: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: BlobDownloadHeaders + headersMapper: BlobDownloadHeaders, }, 206: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: BlobDownloadHeaders + headersMapper: BlobDownloadHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobDownloadExceptionHeaders - } + headersMapper: BlobDownloadExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, - versionId + versionId, ], urlParameters: [url], headerParameters: [ @@ -88015,27 +82019,27 @@ const downloadOperationSpec = { encryptionAlgorithm, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const getPropertiesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { 200: { - headersMapper: BlobGetPropertiesHeaders + headersMapper: BlobGetPropertiesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobGetPropertiesExceptionHeaders - } + headersMapper: BlobGetPropertiesExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, - versionId + versionId, ], urlParameters: [url], headerParameters: [ @@ -88050,28 +82054,28 @@ const getPropertiesOperationSpec = { encryptionAlgorithm, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const deleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { 202: { - headersMapper: BlobDeleteHeaders + headersMapper: BlobDeleteHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobDeleteExceptionHeaders - } + headersMapper: BlobDeleteExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, - blobDeleteType + blobDeleteType, ], urlParameters: [url], headerParameters: [ @@ -88084,44 +82088,44 @@ const deleteOperationSpec = { ifMatch, ifNoneMatch, ifTags, - deleteSnapshots + deleteSnapshots, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const undeleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobUndeleteHeaders + headersMapper: BlobUndeleteHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobUndeleteExceptionHeaders - } + headersMapper: BlobUndeleteExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp8], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setExpiryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetExpiryHeaders + headersMapper: BlobSetExpiryHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetExpiryExceptionHeaders - } + headersMapper: BlobSetExpiryExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp11], urlParameters: [url], @@ -88130,22 +82134,22 @@ const setExpiryOperationSpec = { requestId, accept1, expiryOptions, - expiresOn + expiresOn, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetHttpHeadersHeaders + headersMapper: BlobSetHttpHeadersHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetHttpHeadersExceptionHeaders - } + headersMapper: BlobSetHttpHeadersExceptionHeaders, + }, }, queryParameters: [comp, timeoutInSeconds], urlParameters: [url], @@ -88164,22 +82168,22 @@ const setHttpHeadersOperationSpec = { blobContentMD5, blobContentEncoding, blobContentLanguage, - blobContentDisposition + blobContentDisposition, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetImmutabilityPolicyHeaders + headersMapper: BlobSetImmutabilityPolicyHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetImmutabilityPolicyExceptionHeaders - } + headersMapper: BlobSetImmutabilityPolicyExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp12], urlParameters: [url], @@ -88189,44 +82193,44 @@ const setImmutabilityPolicyOperationSpec = { accept1, ifUnmodifiedSince, immutabilityPolicyExpiry, - immutabilityPolicyMode + immutabilityPolicyMode, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const deleteImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { 200: { - headersMapper: BlobDeleteImmutabilityPolicyHeaders + headersMapper: BlobDeleteImmutabilityPolicyHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders - } + headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp12], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setLegalHoldOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetLegalHoldHeaders + headersMapper: BlobSetLegalHoldHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetLegalHoldExceptionHeaders - } + headersMapper: BlobSetLegalHoldExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp13], urlParameters: [url], @@ -88234,22 +82238,22 @@ const setLegalHoldOperationSpec = { version, requestId, accept1, - legalHold + legalHold, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setMetadataOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetMetadataHeaders + headersMapper: BlobSetMetadataHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetMetadataExceptionHeaders - } + headersMapper: BlobSetMetadataExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp6], urlParameters: [url], @@ -88267,22 +82271,22 @@ const setMetadataOperationSpec = { ifMatch, ifNoneMatch, ifTags, - encryptionScope + encryptionScope, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const acquireLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlobAcquireLeaseHeaders + headersMapper: BlobAcquireLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobAcquireLeaseExceptionHeaders - } + headersMapper: BlobAcquireLeaseExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], @@ -88297,22 +82301,22 @@ const acquireLeaseOperationSpec = { proposedLeaseId, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const releaseLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobReleaseLeaseHeaders + headersMapper: BlobReleaseLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobReleaseLeaseExceptionHeaders - } + headersMapper: BlobReleaseLeaseExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], @@ -88326,22 +82330,22 @@ const releaseLeaseOperationSpec = { leaseId1, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const renewLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobRenewLeaseHeaders + headersMapper: BlobRenewLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobRenewLeaseExceptionHeaders - } + headersMapper: BlobRenewLeaseExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], @@ -88355,22 +82359,22 @@ const renewLeaseOperationSpec = { action2, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const changeLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobChangeLeaseHeaders + headersMapper: BlobChangeLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobChangeLeaseExceptionHeaders - } + headersMapper: BlobChangeLeaseExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], @@ -88385,22 +82389,22 @@ const changeLeaseOperationSpec = { proposedLeaseId1, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const breakLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: BlobBreakLeaseHeaders + headersMapper: BlobBreakLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobBreakLeaseExceptionHeaders - } + headersMapper: BlobBreakLeaseExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], @@ -88414,22 +82418,22 @@ const breakLeaseOperationSpec = { breakPeriod, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const createSnapshotOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlobCreateSnapshotHeaders + headersMapper: BlobCreateSnapshotHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobCreateSnapshotExceptionHeaders - } + headersMapper: BlobCreateSnapshotExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp14], urlParameters: [url], @@ -88447,22 +82451,22 @@ const createSnapshotOperationSpec = { ifMatch, ifNoneMatch, ifTags, - encryptionScope + encryptionScope, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: BlobStartCopyFromURLHeaders + headersMapper: BlobStartCopyFromURLHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobStartCopyFromURLExceptionHeaders - } + headersMapper: BlobStartCopyFromURLExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], @@ -88489,22 +82493,22 @@ const startCopyFromURLOperationSpec = { copySource, blobTagsString, sealBlob, - legalHold1 + legalHold1, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const copyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: BlobCopyFromURLHeaders + headersMapper: BlobCopyFromURLHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobCopyFromURLExceptionHeaders - } + headersMapper: BlobCopyFromURLExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], @@ -88533,27 +82537,27 @@ const copyFromURLOperationSpec = { xMsRequiresSync, sourceContentMD5, copySourceAuthorization, - copySourceTags + copySourceTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 204: { - headersMapper: BlobAbortCopyFromURLHeaders + headersMapper: BlobAbortCopyFromURLHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobAbortCopyFromURLExceptionHeaders - } + headersMapper: BlobAbortCopyFromURLExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, comp15, - copyId + copyId, ], urlParameters: [url], headerParameters: [ @@ -88561,31 +82565,31 @@ const abortCopyFromURLOperationSpec = { requestId, accept1, leaseId, - copyActionAbortConstant + copyActionAbortConstant, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setTierOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetTierHeaders + headersMapper: BlobSetTierHeaders, }, 202: { - headersMapper: BlobSetTierHeaders + headersMapper: BlobSetTierHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetTierExceptionHeaders - } + headersMapper: BlobSetTierExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, - comp16 + comp16, ], urlParameters: [url], headerParameters: [ @@ -88595,28 +82599,36 @@ const setTierOperationSpec = { leaseId, ifTags, rehydratePriority, - tier1 + tier1, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const getAccountInfoOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { - headersMapper: BlobGetAccountInfoHeaders + headersMapper: BlobGetAccountInfoHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobGetAccountInfoExceptionHeaders - } + headersMapper: BlobGetAccountInfoExceptionHeaders, + }, }, - queryParameters: [comp, restype1], + queryParameters: [ + comp, + timeoutInSeconds, + restype1, + ], urlParameters: [url], - headerParameters: [version, accept1], + headerParameters: [ + version, + requestId, + accept1, + ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const queryOperationSpec = { path: "/{containerName}/{blob}", @@ -88625,27 +82637,27 @@ const queryOperationSpec = { 200: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: BlobQueryHeaders + headersMapper: BlobQueryHeaders, }, 206: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: BlobQueryHeaders + headersMapper: BlobQueryHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobQueryExceptionHeaders - } + headersMapper: BlobQueryExceptionHeaders, + }, }, requestBody: queryRequest, queryParameters: [ timeoutInSeconds, snapshot, - comp17 + comp17, ], urlParameters: [url], headerParameters: [ @@ -88661,12 +82673,12 @@ const queryOperationSpec = { encryptionAlgorithm, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const getTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -88674,18 +82686,18 @@ const getTagsOperationSpec = { responses: { 200: { bodyMapper: BlobTags, - headersMapper: BlobGetTagsHeaders + headersMapper: BlobGetTagsHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobGetTagsExceptionHeaders - } + headersMapper: BlobGetTagsExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, - comp18 + comp18, ], urlParameters: [url], headerParameters: [ @@ -88693,28 +82705,28 @@ const getTagsOperationSpec = { requestId, accept1, leaseId, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 204: { - headersMapper: BlobSetTagsHeaders + headersMapper: BlobSetTagsHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetTagsExceptionHeaders - } + headersMapper: BlobSetTagsExceptionHeaders, + }, }, requestBody: tags, queryParameters: [ timeoutInSeconds, versionId, - comp18 + comp18, ], urlParameters: [url], headerParameters: [ @@ -88725,12 +82737,12 @@ const setTagsOperationSpec = { leaseId, ifTags, transactionalContentMD5, - transactionalContentCrc64 + transactionalContentCrc64, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; /* @@ -88740,8 +82752,8 @@ const setTagsOperationSpec = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a PageBlob. */ -class PageBlob { +/** Class containing PageBlob operations. */ +class PageBlobImpl { /** * Initialize a new instance of the class PageBlob class. * @param client Reference to the service client @@ -88757,12 +82769,7 @@ class PageBlob { * @param options The options parameters. */ create(contentLength, blobContentLength, options) { - const operationArguments = { - contentLength, - blobContentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); + return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec$1); } /** * The Upload Pages operation writes a range of pages to a page blob @@ -88771,12 +82778,7 @@ class PageBlob { * @param options The options parameters. */ uploadPages(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); } /** * The Clear Pages operation clears a set of pages from a page blob @@ -88784,11 +82786,7 @@ class PageBlob { * @param options The options parameters. */ clearPages(contentLength, options) { - const operationArguments = { - contentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); } /** * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a @@ -88802,14 +82800,7 @@ class PageBlob { * @param options The options parameters. */ uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { - const operationArguments = { - sourceUrl, - sourceRange, - contentLength, - range, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); } /** * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a @@ -88817,10 +82808,7 @@ class PageBlob { * @param options The options parameters. */ getPageRanges(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); } /** * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were @@ -88828,10 +82816,7 @@ class PageBlob { * @param options The options parameters. */ getPageRangesDiff(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); } /** * Resize the Blob @@ -88840,11 +82825,7 @@ class PageBlob { * @param options The options parameters. */ resize(blobContentLength, options) { - const operationArguments = { - blobContentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); } /** * Update the sequence number of the blob @@ -88854,11 +82835,7 @@ class PageBlob { * @param options The options parameters. */ updateSequenceNumber(sequenceNumberAction, options) { - const operationArguments = { - sequenceNumberAction, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); } /** * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. @@ -88873,27 +82850,22 @@ class PageBlob { * @param options The options parameters. */ copyIncremental(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); + return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); } } // Operation Specifications -const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const xmlSerializer$2 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const createOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: PageBlobCreateHeaders + headersMapper: PageBlobCreateHeaders, }, default: { bodyMapper: StorageError, - headersMapper: PageBlobCreateExceptionHeaders - } + headersMapper: PageBlobCreateExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], @@ -88926,22 +82898,22 @@ const createOperationSpec$1 = { legalHold1, blobType, blobContentLength, - blobSequenceNumber + blobSequenceNumber, ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$2, }; const uploadPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: PageBlobUploadPagesHeaders + headersMapper: PageBlobUploadPagesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesExceptionHeaders - } + headersMapper: PageBlobUploadPagesExceptionHeaders, + }, }, requestBody: body1, queryParameters: [timeoutInSeconds, comp19], @@ -88968,22 +82940,24 @@ const uploadPagesOperationSpec = { pageWrite, ifSequenceNumberLessThanOrEqualTo, ifSequenceNumberLessThan, - ifSequenceNumberEqualTo + ifSequenceNumberEqualTo, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer: serializer$2 + serializer: xmlSerializer$2, }; const clearPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: PageBlobClearPagesHeaders + headersMapper: PageBlobClearPagesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: PageBlobClearPagesExceptionHeaders - } + headersMapper: PageBlobClearPagesExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], @@ -89006,22 +82980,22 @@ const clearPagesOperationSpec = { ifSequenceNumberLessThanOrEqualTo, ifSequenceNumberLessThan, ifSequenceNumberEqualTo, - pageWrite1 + pageWrite1, ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$2, }; const uploadPagesFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: PageBlobUploadPagesFromURLHeaders + headersMapper: PageBlobUploadPagesFromURLHeaders, }, default: { bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesFromURLExceptionHeaders - } + headersMapper: PageBlobUploadPagesFromURLExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], @@ -89053,10 +83027,10 @@ const uploadPagesFromURLOperationSpec = { sourceUrl, sourceRange, sourceContentCrc64, - range1 + range1, ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$2, }; const getPageRangesOperationSpec = { path: "/{containerName}/{blob}", @@ -89064,19 +83038,19 @@ const getPageRangesOperationSpec = { responses: { 200: { bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesHeaders + headersMapper: PageBlobGetPageRangesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesExceptionHeaders - } + headersMapper: PageBlobGetPageRangesExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, snapshot, - comp20 + comp20, ], urlParameters: [url], headerParameters: [ @@ -89089,10 +83063,10 @@ const getPageRangesOperationSpec = { range, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$2, }; const getPageRangesDiffOperationSpec = { path: "/{containerName}/{blob}", @@ -89100,12 +83074,12 @@ const getPageRangesDiffOperationSpec = { responses: { 200: { bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesDiffHeaders + headersMapper: PageBlobGetPageRangesDiffHeaders, }, default: { bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesDiffExceptionHeaders - } + headersMapper: PageBlobGetPageRangesDiffExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, @@ -89113,7 +83087,7 @@ const getPageRangesDiffOperationSpec = { maxPageSize, snapshot, comp20, - prevsnapshot + prevsnapshot, ], urlParameters: [url], headerParameters: [ @@ -89127,22 +83101,22 @@ const getPageRangesDiffOperationSpec = { ifMatch, ifNoneMatch, ifTags, - prevSnapshotUrl + prevSnapshotUrl, ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$2, }; const resizeOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: PageBlobResizeHeaders + headersMapper: PageBlobResizeHeaders, }, default: { bodyMapper: StorageError, - headersMapper: PageBlobResizeExceptionHeaders - } + headersMapper: PageBlobResizeExceptionHeaders, + }, }, queryParameters: [comp, timeoutInSeconds], urlParameters: [url], @@ -89160,22 +83134,22 @@ const resizeOperationSpec = { ifNoneMatch, ifTags, encryptionScope, - blobContentLength + blobContentLength, ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$2, }; const updateSequenceNumberOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: PageBlobUpdateSequenceNumberHeaders + headersMapper: PageBlobUpdateSequenceNumberHeaders, }, default: { bodyMapper: StorageError, - headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders - } + headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders, + }, }, queryParameters: [comp, timeoutInSeconds], urlParameters: [url], @@ -89190,22 +83164,22 @@ const updateSequenceNumberOperationSpec = { ifNoneMatch, ifTags, blobSequenceNumber, - sequenceNumberAction + sequenceNumberAction, ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$2, }; const copyIncrementalOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: PageBlobCopyIncrementalHeaders + headersMapper: PageBlobCopyIncrementalHeaders, }, default: { bodyMapper: StorageError, - headersMapper: PageBlobCopyIncrementalExceptionHeaders - } + headersMapper: PageBlobCopyIncrementalExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp21], urlParameters: [url], @@ -89218,10 +83192,10 @@ const copyIncrementalOperationSpec = { ifMatch, ifNoneMatch, ifTags, - copySource + copySource, ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$2, }; /* @@ -89231,8 +83205,8 @@ const copyIncrementalOperationSpec = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a AppendBlob. */ -class AppendBlob { +/** Class containing AppendBlob operations. */ +class AppendBlobImpl { /** * Initialize a new instance of the class AppendBlob class. * @param client Reference to the service client @@ -89246,11 +83220,7 @@ class AppendBlob { * @param options The options parameters. */ create(contentLength, options) { - const operationArguments = { - contentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); + return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob. The @@ -89261,12 +83231,7 @@ class AppendBlob { * @param options The options parameters. */ appendBlock(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob where @@ -89278,12 +83243,7 @@ class AppendBlob { * @param options The options parameters. */ appendBlockFromUrl(sourceUrl, contentLength, options) { - const operationArguments = { - sourceUrl, - contentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); } /** * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version @@ -89291,26 +83251,22 @@ class AppendBlob { * @param options The options parameters. */ seal(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, sealOperationSpec); + return this.client.sendOperationRequest({ options }, sealOperationSpec); } } // Operation Specifications -const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const xmlSerializer$1 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const createOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: AppendBlobCreateHeaders + headersMapper: AppendBlobCreateHeaders, }, default: { bodyMapper: StorageError, - headersMapper: AppendBlobCreateExceptionHeaders - } + headersMapper: AppendBlobCreateExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], @@ -89340,22 +83296,22 @@ const createOperationSpec = { encryptionScope, blobTagsString, legalHold1, - blobType1 + blobType1, ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$1, }; const appendBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: AppendBlobAppendBlockHeaders + headersMapper: AppendBlobAppendBlockHeaders, }, default: { bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockExceptionHeaders - } + headersMapper: AppendBlobAppendBlockExceptionHeaders, + }, }, requestBody: body1, queryParameters: [timeoutInSeconds, comp22], @@ -89379,22 +83335,24 @@ const appendBlockOperationSpec = { contentType1, accept2, maxSize, - appendPosition + appendPosition, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer: serializer$1 + serializer: xmlSerializer$1, }; const appendBlockFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: AppendBlobAppendBlockFromUrlHeaders + headersMapper: AppendBlobAppendBlockFromUrlHeaders, }, default: { bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders - } + headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp22], urlParameters: [url], @@ -89424,22 +83382,22 @@ const appendBlockFromUrlOperationSpec = { sourceContentCrc64, maxSize, appendPosition, - sourceRange1 + sourceRange1, ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$1, }; const sealOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: AppendBlobSealHeaders + headersMapper: AppendBlobSealHeaders, }, default: { bodyMapper: StorageError, - headersMapper: AppendBlobSealExceptionHeaders - } + headersMapper: AppendBlobSealExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp23], urlParameters: [url], @@ -89452,10 +83410,10 @@ const sealOperationSpec = { ifUnmodifiedSince, ifMatch, ifNoneMatch, - appendPosition + appendPosition, ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$1, }; /* @@ -89465,8 +83423,8 @@ const sealOperationSpec = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a BlockBlob. */ -class BlockBlob { +/** Class containing BlockBlob operations. */ +class BlockBlobImpl { /** * Initialize a new instance of the class BlockBlob class. * @param client Reference to the service client @@ -89484,12 +83442,7 @@ class BlockBlob { * @param options The options parameters. */ upload(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); } /** * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read @@ -89505,12 +83458,7 @@ class BlockBlob { * @param options The options parameters. */ putBlobFromUrl(contentLength, copySource, options) { - const operationArguments = { - contentLength, - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); } /** * The Stage Block operation creates a new block to be committed as part of a blob @@ -89522,13 +83470,7 @@ class BlockBlob { * @param options The options parameters. */ stageBlock(blockId, contentLength, body, options) { - const operationArguments = { - blockId, - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); } /** * The Stage Block operation creates a new block to be committed as part of a blob where the contents @@ -89541,13 +83483,7 @@ class BlockBlob { * @param options The options parameters. */ stageBlockFromURL(blockId, contentLength, sourceUrl, options) { - const operationArguments = { - blockId, - contentLength, - sourceUrl, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); } /** * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the @@ -89561,11 +83497,7 @@ class BlockBlob { * @param options The options parameters. */ commitBlockList(blocks, options) { - const operationArguments = { - blocks, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); } /** * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block @@ -89575,27 +83507,22 @@ class BlockBlob { * @param options The options parameters. */ getBlockList(listType, options) { - const operationArguments = { - listType, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); } } // Operation Specifications -const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const xmlSerializer = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlockBlobUploadHeaders + headersMapper: BlockBlobUploadHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobUploadExceptionHeaders - } + headersMapper: BlockBlobUploadExceptionHeaders, + }, }, requestBody: body1, queryParameters: [timeoutInSeconds], @@ -89630,22 +83557,24 @@ const uploadOperationSpec = { transactionalContentCrc64, contentType1, accept2, - blobType2 + blobType2, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer + serializer: xmlSerializer, }; const putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlockBlobPutBlobFromUrlHeaders + headersMapper: BlockBlobPutBlobFromUrlHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders - } + headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], @@ -89684,28 +83613,28 @@ const putBlobFromUrlOperationSpec = { copySourceTags, transactionalContentMD5, blobType2, - copySourceBlobProperties + copySourceBlobProperties, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const stageBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlockBlobStageBlockHeaders + headersMapper: BlockBlobStageBlockHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockExceptionHeaders - } + headersMapper: BlockBlobStageBlockExceptionHeaders, + }, }, requestBody: body1, queryParameters: [ timeoutInSeconds, comp24, - blockId + blockId, ], urlParameters: [url], headerParameters: [ @@ -89720,27 +83649,29 @@ const stageBlockOperationSpec = { transactionalContentMD5, transactionalContentCrc64, contentType1, - accept2 + accept2, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer + serializer: xmlSerializer, }; const stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlockBlobStageBlockFromURLHeaders + headersMapper: BlockBlobStageBlockFromURLHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockFromURLExceptionHeaders - } + headersMapper: BlockBlobStageBlockFromURLExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, comp24, - blockId + blockId, ], urlParameters: [url], headerParameters: [ @@ -89761,22 +83692,22 @@ const stageBlockFromURLOperationSpec = { copySourceAuthorization, sourceUrl, sourceContentCrc64, - sourceRange1 + sourceRange1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const commitBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlockBlobCommitBlockListHeaders + headersMapper: BlockBlobCommitBlockListHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobCommitBlockListExceptionHeaders - } + headersMapper: BlockBlobCommitBlockListExceptionHeaders, + }, }, requestBody: blocks, queryParameters: [timeoutInSeconds, comp25], @@ -89809,12 +83740,12 @@ const commitBlockListOperationSpec = { blobTagsString, legalHold1, transactionalContentMD5, - transactionalContentCrc64 + transactionalContentCrc64, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer, }; const getBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -89822,18 +83753,18 @@ const getBlockListOperationSpec = { responses: { 200: { bodyMapper: BlockList, - headersMapper: BlockBlobGetBlockListHeaders + headersMapper: BlockBlobGetBlockListHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobGetBlockListExceptionHeaders - } + headersMapper: BlockBlobGetBlockListExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, comp25, - listType + listType, ], urlParameters: [url], headerParameters: [ @@ -89841,3637 +83772,2866 @@ const getBlockListOperationSpec = { requestId, accept1, leaseId, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; -// Copyright (c) Microsoft Corporation. -/** - * The `@azure/logger` configuration for this package. - */ -const logger = logger$1.createClientLogger("storage-blob"); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const SDK_VERSION = "12.17.0"; -const SERVICE_VERSION = "2023-11-03"; -const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB -const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB -const BLOCK_BLOB_MAX_BLOCKS = 50000; -const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB -const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB -const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; -const REQUEST_TIMEOUT = 100 * 1000; // In ms -/** - * The OAuth scope to use with Azure Storage. +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -const StorageOAuthScopes = "https://storage.azure.com/.default"; -const URLConstants = { - Parameters: { - FORCE_BROWSER_NO_CACHE: "_", - SIGNATURE: "sig", - SNAPSHOT: "snapshot", - VERSIONID: "versionid", - TIMEOUT: "timeout", - }, -}; -const HTTPURLConnection = { - HTTP_ACCEPTED: 202, - HTTP_CONFLICT: 409, - HTTP_NOT_FOUND: 404, - HTTP_PRECON_FAILED: 412, - HTTP_RANGE_NOT_SATISFIABLE: 416, -}; -const HeaderConstants = { - AUTHORIZATION: "Authorization", - AUTHORIZATION_SCHEME: "Bearer", - CONTENT_ENCODING: "Content-Encoding", - CONTENT_ID: "Content-ID", - CONTENT_LANGUAGE: "Content-Language", - CONTENT_LENGTH: "Content-Length", - CONTENT_MD5: "Content-Md5", - CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", - CONTENT_TYPE: "Content-Type", - COOKIE: "Cookie", - DATE: "date", - IF_MATCH: "if-match", - IF_MODIFIED_SINCE: "if-modified-since", - IF_NONE_MATCH: "if-none-match", - IF_UNMODIFIED_SINCE: "if-unmodified-since", - PREFIX_FOR_STORAGE: "x-ms-", - RANGE: "Range", - USER_AGENT: "User-Agent", - X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", - X_MS_COPY_SOURCE: "x-ms-copy-source", - X_MS_DATE: "x-ms-date", - X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version", +let StorageClient$1 = class StorageClient extends coreHttpCompat__namespace.ExtendedServiceClient { + /** + * Initializes a new instance of the StorageClient class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + var _a, _b; + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + const defaults = { + requestContentType: "application/json; charset=utf-8", + }; + const packageDetails = `azsdk-js-azure-storage-blob/12.25.0`; + const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix + ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: { + userAgentPrefix, + }, endpoint: (_b = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" }); + super(optionsWithDefaults); + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2024-11-04"; + this.service = new ServiceImpl(this); + this.container = new ContainerImpl(this); + this.blob = new BlobImpl(this); + this.pageBlob = new PageBlobImpl(this); + this.appendBlob = new AppendBlobImpl(this); + this.blockBlob = new BlockBlobImpl(this); + } }; -const ETagNone = ""; -const ETagAny = "*"; -const SIZE_1_MB = 1 * 1024 * 1024; -const BATCH_MAX_REQUEST = 256; -const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; -const HTTP_LINE_ENDING = "\r\n"; -const HTTP_VERSION_1_1 = "HTTP/1.1"; -const EncryptionAlgorithmAES25 = "AES256"; -const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; -const StorageBlobLoggingAllowedHeaderNames = [ - "Access-Control-Allow-Origin", - "Cache-Control", - "Content-Length", - "Content-Type", - "Date", - "Request-Id", - "traceparent", - "Transfer-Encoding", - "User-Agent", - "x-ms-client-request-id", - "x-ms-date", - "x-ms-error-code", - "x-ms-request-id", - "x-ms-return-client-request-id", - "x-ms-version", - "Accept-Ranges", - "Content-Disposition", - "Content-Encoding", - "Content-Language", - "Content-MD5", - "Content-Range", - "ETag", - "Last-Modified", - "Server", - "Vary", - "x-ms-content-crc64", - "x-ms-copy-action", - "x-ms-copy-completion-time", - "x-ms-copy-id", - "x-ms-copy-progress", - "x-ms-copy-status", - "x-ms-has-immutability-policy", - "x-ms-has-legal-hold", - "x-ms-lease-state", - "x-ms-lease-status", - "x-ms-range", - "x-ms-request-server-encrypted", - "x-ms-server-encrypted", - "x-ms-snapshot", - "x-ms-source-range", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "x-ms-access-tier", - "x-ms-access-tier-change-time", - "x-ms-access-tier-inferred", - "x-ms-account-kind", - "x-ms-archive-status", - "x-ms-blob-append-offset", - "x-ms-blob-cache-control", - "x-ms-blob-committed-block-count", - "x-ms-blob-condition-appendpos", - "x-ms-blob-condition-maxsize", - "x-ms-blob-content-disposition", - "x-ms-blob-content-encoding", - "x-ms-blob-content-language", - "x-ms-blob-content-length", - "x-ms-blob-content-md5", - "x-ms-blob-content-type", - "x-ms-blob-public-access", - "x-ms-blob-sequence-number", - "x-ms-blob-type", - "x-ms-copy-destination-snapshot", - "x-ms-creation-time", - "x-ms-default-encryption-scope", - "x-ms-delete-snapshots", - "x-ms-delete-type-permanent", - "x-ms-deny-encryption-scope-override", - "x-ms-encryption-algorithm", - "x-ms-if-sequence-number-eq", - "x-ms-if-sequence-number-le", - "x-ms-if-sequence-number-lt", - "x-ms-incremental-copy", - "x-ms-lease-action", - "x-ms-lease-break-period", - "x-ms-lease-duration", - "x-ms-lease-id", - "x-ms-lease-time", - "x-ms-page-write", - "x-ms-proposed-lease-id", - "x-ms-range-get-content-md5", - "x-ms-rehydrate-priority", - "x-ms-sequence-number-action", - "x-ms-sku-name", - "x-ms-source-content-md5", - "x-ms-source-if-match", - "x-ms-source-if-modified-since", - "x-ms-source-if-none-match", - "x-ms-source-if-unmodified-since", - "x-ms-tag-count", - "x-ms-encryption-key-sha256", - "x-ms-if-tags", - "x-ms-source-if-tags", -]; -const StorageBlobLoggingAllowedQueryParameters = [ - "comp", - "maxresults", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "se", - "si", - "sip", - "sp", - "spr", - "sr", - "srt", - "ss", - "st", - "sv", - "include", - "marker", - "prefix", - "copyid", - "restype", - "blockid", - "blocklisttype", - "delimiter", - "prevsnapshot", - "ske", - "skoid", - "sks", - "skt", - "sktid", - "skv", - "snapshot", -]; -const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; -const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; -/// List of ports used for path style addressing. -/// Path style addressing means that storage account is put in URI's Path segment in instead of in host. -const PathStylePorts = [ - "10000", - "10001", - "10002", - "10003", - "10004", - "10100", - "10101", - "10102", - "10103", - "10104", - "11000", - "11001", - "11002", - "11003", - "11004", - "11100", - "11101", - "11102", - "11103", - "11104", -]; // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Reserved URL characters must be properly escaped for Storage services like Blob or File. - * - * ## URL encode and escape strategy for JS SDKs - * - * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. - * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL - * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. - * - * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. - * - * This is what legacy V2 SDK does, simple and works for most of the cases. - * - When customer URL string is "http://account.blob.core.windows.net/con/b:", - * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. - * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", - * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. - * - * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is - * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. - * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. - * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. - * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: - * - * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. - * - * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. - * - When customer URL string is "http://account.blob.core.windows.net/con/b:", - * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. - * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", - * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. - * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", - * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. - * - * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string - * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. - * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. - * And following URL strings are invalid: - * - "http://account.blob.core.windows.net/con/b%" - * - "http://account.blob.core.windows.net/con/b%2" - * - "http://account.blob.core.windows.net/con/b%G" - * - * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. - * - * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` - * - * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata - * - * @param url - + * @internal */ -function escapeURLPath(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let path = urlParsed.getPath(); - path = path || "/"; - path = escape(path); - urlParsed.setPath(path); - return urlParsed.toString(); -} -function getProxyUriFromDevConnString(connectionString) { - // Development Connection String - // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key - let proxyUri = ""; - if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { - // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri - const matchCredentials = connectionString.split(";"); - for (const element of matchCredentials) { - if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { - proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; - } +class StorageContextClient extends StorageClient$1 { + async sendOperationRequest(operationArguments, operationSpec) { + const operationSpecToSend = Object.assign({}, operationSpec); + if (operationSpecToSend.path === "/{containerName}" || + operationSpecToSend.path === "/{containerName}/{blob}") { + operationSpecToSend.path = ""; } + return super.sendOperationRequest(operationArguments, operationSpecToSend); } - return proxyUri; } -function getValueInConnString(connectionString, argument) { - const elements = connectionString.split(";"); - for (const element of elements) { - if (element.trim().startsWith(argument)) { - return element.trim().match(argument + "=(.*)")[1]; - } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline)); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = getCredentialFromPipeline(pipeline); + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; } - return ""; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Extracts the parts of an Azure Storage account connection string. + * Creates a span using the global tracer. + * @internal + */ +const tracingClient = coreTracing.createTracingClient({ + packageName: "@azure/storage-blob", + packageVersion: SDK_VERSION, + namespace: "Microsoft.Storage", +}); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param connectionString - Connection string. - * @returns String key value pairs of the storage account's url and credentials. + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. */ -function extractConnectionStringParts(connectionString) { - let proxyUri = ""; - if (connectionString.startsWith("UseDevelopmentStorage=true")) { - // Development connection string - proxyUri = getProxyUriFromDevConnString(connectionString); - connectionString = DevelopmentConnectionString; +class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } - // Matching BlobEndpoint in the Account connection string - let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); - // Slicing off '/' at the end if exists - // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) - blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; - if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && - connectionString.search("AccountKey=") !== -1) { - // Account connection string - let defaultEndpointsProtocol = ""; - let accountName = ""; - let accountKey = Buffer.from("accountKey", "base64"); - let endpointSuffix = ""; - // Get account name and key - accountName = getValueInConnString(connectionString, "AccountName"); - accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); - if (!blobEndpoint) { - // BlobEndpoint is not present in the Account connection string - // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` - defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); - const protocol = defaultEndpointsProtocol.toLowerCase(); - if (protocol !== "https" && protocol !== "http") { - throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); - } - endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); - if (!endpointSuffix) { - throw new Error("Invalid EndpointSuffix in the provided Connection String"); + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); } - blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; } - if (!accountName) { - throw new Error("Invalid AccountName in the provided Connection String"); + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; } - else if (accountKey.length === 0) { - throw new Error("Invalid AccountKey in the provided Connection String"); + if (permissionLike.add) { + blobSASPermissions.add = true; } - return { - kind: "AccountConnString", - url: blobEndpoint, - accountName, - accountKey, - proxyUri, - }; + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; } - else { - // SAS connection string - const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); - let accountName = getValueInConnString(connectionString, "AccountName"); - // if accountName is empty, try to read it from BlobEndpoint - if (!accountName) { - accountName = getAccountNameFromUrl(blobEndpoint); + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); } - if (!blobEndpoint) { - throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + if (this.add) { + permissions.push("a"); } - else if (!accountSas) { - throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + if (this.create) { + permissions.push("c"); } - return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); } } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Internal escape method implemented Strategy Two mentioned in escapeURL() description. - * - * @param text - - */ -function escape(text) { - return encodeURIComponent(text) - .replace(/%2F/g, "/") // Don't escape for "/" - .replace(/'/g, "%27") // Escape for "'" - .replace(/\+/g, "%20") - .replace(/%25/g, "%"); // Revert encoded "%" -} -/** - * Append a string to URL path. Will remove duplicated "/" in front of the string - * when URL path ends with a "/". - * - * @param url - Source URL string - * @param name - String to be appended to URL - * @returns An updated URL string - */ -function appendToURLPath(url, name) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let path = urlParsed.getPath(); - path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; - urlParsed.setPath(path); - const normalizedUrl = new URL(urlParsed.toString()); - return normalizedUrl.toString(); -} -/** - * Set URL parameter name and value. If name exists in URL parameters, old value - * will be replaced by name key. If not provide value, the parameter will be deleted. - * - * @param url - Source URL string - * @param name - Parameter name - * @param value - Parameter value - * @returns An updated URL string - */ -function setURLParameter(url, name, value) { - const urlParsed = coreHttp.URLBuilder.parse(url); - urlParsed.setQueryParameter(name, value); - return urlParsed.toString(); -} -/** - * Get URL parameter by name. - * - * @param url - - * @param name - - */ -function getURLParameter(url, name) { - const urlParsed = coreHttp.URLBuilder.parse(url); - return urlParsed.getQueryParameterValue(name); -} -/** - * Set URL host. - * - * @param url - Source URL string - * @param host - New host string - * @returns An updated URL string - */ -function setURLHost(url, host) { - const urlParsed = coreHttp.URLBuilder.parse(url); - urlParsed.setHost(host); - return urlParsed.toString(); -} -/** - * Get URL path from an URL string. - * - * @param url - Source URL string - */ -function getURLPath(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - return urlParsed.getPath(); -} -/** - * Get URL scheme from an URL string. - * - * @param url - Source URL string - */ -function getURLScheme(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - return urlParsed.getScheme(); -} -/** - * Get URL path and query from an URL string. - * - * @param url - Source URL string + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. */ -function getURLPathAndQuery(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - const pathString = urlParsed.getPath(); - if (!pathString) { - throw new RangeError("Invalid url without valid path."); +class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; } - let queryString = urlParsed.getQuery() || ""; - queryString = queryString.trim(); - if (queryString !== "") { - queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; } - return `${pathString}${queryString}`; -} -/** - * Get URL query key value pairs from an URL string. - * - * @param url - - */ -function getURLQueries(url) { - let queryString = coreHttp.URLBuilder.parse(url).getQuery(); - if (!queryString) { - return {}; + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; } - queryString = queryString.trim(); - queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; - let querySubStrings = queryString.split("&"); - querySubStrings = querySubStrings.filter((value) => { - const indexOfEqual = value.indexOf("="); - const lastIndexOfEqual = value.lastIndexOf("="); - return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); - }); - const queries = {}; - for (const querySubString of querySubStrings) { - const splitResults = querySubString.split("="); - const key = splitResults[0]; - const value = splitResults[1]; - queries[key] = value; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); } - return queries; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Append a string to URL query. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param url - Source URL string. - * @param queryParts - String to be appended to the URL query. - * @returns An updated URL string. + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas */ -function appendToURLQuery(url, queryParts) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let query = urlParsed.getQuery(); - if (query) { - query += "&" + queryParts; +class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); } - else { - query = queryParts; + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } - urlParsed.setQuery(query); - return urlParsed.toString(); } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Rounds a date off to seconds. + * Generate SasIPRange format string. For example: * - * @param date - - * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; - * If false, YYYY-MM-DDThh:mm:ssZ will be returned. - * @returns Date string in ISO8061 format, with or without 7 milliseconds component - */ -function truncatedISO8061Date(date, withMilliseconds = true) { - // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" - const dateString = date.toISOString(); - return withMilliseconds - ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" - : dateString.substring(0, dateString.length - 5) + "Z"; -} -/** - * Base64 encode. + * "8.8.8.8" or "1.1.1.1-255.255.255.255" * - * @param content - + * @param ipRange - */ -function base64encode(content) { - return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64"); +function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Generate a 64 bytes base64 block ID string. - * - * @param blockIndex - + * Protocols for generated SAS. */ -function generateBlockID(blockIDPrefix, blockIndex) { - // To generate a 64 bytes base64 string, source string should be 48 - const maxSourceStringLength = 48; - // A blob can have a maximum of 100,000 uncommitted blocks at any given time - const maxBlockIndexLength = 6; - const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; - if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { - blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); - } - const res = blockIDPrefix + - padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); - return base64encode(res); -} +exports.SASProtocol = void 0; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(exports.SASProtocol || (exports.SASProtocol = {})); /** - * Delay specified time interval. + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). * - * @param timeInMs - - * @param aborter - - * @param abortError - + * NOTE: Instances of this class are immutable. */ -async function delay(timeInMs, aborter, abortError) { - return new Promise((resolve, reject) => { - /* eslint-disable-next-line prefer-const */ - let timeout; - const abortHandler = () => { - if (timeout !== undefined) { - clearTimeout(timeout); - } - reject(abortError); - }; - const resolveHandler = () => { - if (aborter !== undefined) { - aborter.removeEventListener("abort", abortHandler); +class SASQueryParameters { + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } + else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; } - resolve(); - }; - timeout = setTimeout(resolveHandler, timeInMs); - if (aborter !== undefined) { - aborter.addEventListener("abort", abortHandler); } - }); -} -/** - * String.prototype.padStart() - * - * @param currentString - - * @param targetLength - - * @param padString - - */ -function padStart(currentString, targetLength, padString = " ") { - // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes - if (String.prototype.padStart) { - return currentString.padStart(targetLength, padString); } - padString = padString || " "; - if (currentString.length > targetLength) { - return currentString; + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", // Signed object ID + "sktid", // Signed tenant ID + "skt", // Signed key start time + "ske", // Signed key expiry time + "sks", // Signed key service + "skv", // Signed key version + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); } - else { - targetLength = targetLength - currentString.length; - if (targetLength > padString.length) { - padString += padString.repeat(targetLength / padString.length); + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); } - return padString.slice(0, targetLength) + currentString; } } -/** - * If two strings are equal when compared case insensitive. - * - * @param str1 - - * @param str2 - - */ -function iEqual(str1, str2) { - return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; } -/** - * Extracts account name from the url - * @param url - url to extract the account name from - * @returns with the account name - */ -function getAccountNameFromUrl(url) { - const parsedUrl = coreHttp.URLBuilder.parse(url); - let accountName; - try { - if (parsedUrl.getHost().split(".")[1] === "blob") { - // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; - accountName = parsedUrl.getHost().split(".")[0]; - } - else if (isIpEndpointStyle(parsedUrl)) { - // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ - // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ - // .getPath() -> /devstoreaccount1/ - accountName = parsedUrl.getPath().split("/")[1]; +function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); } else { - // Custom domain case: "https://customdomain.com/containername/blob". - accountName = ""; + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); } - return accountName; } - catch (error) { - throw new Error("Unable to extract accountName with provided information."); + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } } -} -function isIpEndpointStyle(parsedUrl) { - if (parsedUrl.getHost() === undefined) { - return false; + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } + else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } } - const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); - // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. - // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. - // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. - // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. - return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || - (parsedUrl.getPort() !== undefined && PathStylePorts.includes(parsedUrl.getPort()))); + throw new RangeError("'version' must be >= '2015-04-05'."); } /** - * Convert Tags to encoded string. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. * - * @param tags - + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - */ -function toBlobTagsString(tags) { - if (tags === undefined) { - return undefined; +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - const tagPairs = []; - for (const key in tags) { - if (Object.prototype.hasOwnProperty.call(tags, key)) { - const value = tags[key]; - tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } - return tagPairs.join("&"); + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign: stringToSign, + }; } /** - * Convert Tags type to BlobTags. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. * - * @param tags - + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - */ -function toBlobTags(tags) { - if (tags === undefined) { - return undefined; +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - const res = { - blobTagSet: [], - }; - for (const key in tags) { - if (Object.prototype.hasOwnProperty.call(tags, key)) { - const value = tags[key]; - res.blobTagSet.push({ - key, - value, - }); + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } } - return res; + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign: stringToSign, + }; } /** - * Covert BlobTags to Tags type. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. * - * @param tags - + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - */ -function toTags(tags) { - if (tags === undefined) { - return undefined; +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - const res = {}; - for (const blobTag of tags.blobTagSet) { - res[blobTag.key] = blobTag.value; + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } } - return res; + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope), + stringToSign: stringToSign, + }; } /** - * Convert BlobQueryTextConfiguration to QuerySerialization type. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. * - * @param textConfiguration - + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - */ -function toQuerySerialization(textConfiguration) { - if (textConfiguration === undefined) { - return undefined; +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } - switch (textConfiguration.kind) { - case "csv": - return { - format: { - type: "delimited", - delimitedTextConfiguration: { - columnSeparator: textConfiguration.columnSeparator || ",", - fieldQuote: textConfiguration.fieldQuote || "", - recordSeparator: textConfiguration.recordSeparator, - escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false, - }, - }, - }; - case "json": - return { - format: { - type: "json", - jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator, - }, - }, - }; - case "arrow": - return { - format: { - type: "arrow", - arrowConfiguration: { - schema: textConfiguration.schema, - }, - }, - }; - case "parquet": - return { - format: { - type: "parquet", - }, - }; - default: - throw Error("Invalid BlobQueryTextConfiguration."); + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } } -} -function parseObjectReplicationRecord(objectReplicationRecord) { - if (!objectReplicationRecord) { - return undefined; + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } } - if ("policy-id" in objectReplicationRecord) { - // If the dictionary contains a key with policy id, we are not required to do any parsing since - // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. - return undefined; + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), + stringToSign: stringToSign, + }; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } - const orProperties = []; - for (const key in objectReplicationRecord) { - const ids = key.split("_"); - const policyPrefix = "or-"; - if (ids[0].startsWith(policyPrefix)) { - ids[0] = ids[0].substring(policyPrefix.length); + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; } - const rule = { - ruleId: ids[1], - replicationStatus: objectReplicationRecord[key], - }; - const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); - if (policyIndex > -1) { - orProperties[policyIndex].rules.push(rule); + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } else { - orProperties.push({ - policyId: ids[0], - rules: [rule], - }); + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } - return orProperties; + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), + stringToSign: stringToSign, + }; } /** - * Attach a TokenCredential to an object. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. * - * @param thing - - * @param credential - + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - */ -function attachCredential(thing, credential) { - thing.credential = credential; - return thing; -} -function httpAuthorizationToString(httpAuthorization) { - return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; -} -function BlobNameToString(name) { - if (name.encoded) { - return decodeURIComponent(name.content); +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } - else { - return name.content; + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } } -} -function ConvertInternalResponseOfListBlobFlat(internalResponse) { - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }), - } }); -} -function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { - var _a; - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }), - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }), - } }); -} -function* ExtractPageRangeInfoItems(getPageRangesSegment) { - let pageRange = []; - let clearRange = []; - if (getPageRangesSegment.pageRange) - pageRange = getPageRangesSegment.pageRange; - if (getPageRangesSegment.clearRange) - clearRange = getPageRangesSegment.clearRange; - let pageRangeIndex = 0; - let clearRangeIndex = 0; - while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { - if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false, - }; - ++pageRangeIndex; + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } else { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true, - }; - ++clearRangeIndex; + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } - for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false, - }; - } - for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true, - }; + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), + stringToSign: stringToSign, + }; +} +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); } + return elements.join(""); } -/** - * Escape the blobName but keep path separator ('/'). - */ -function EscapePath(blobName) { - const split = blobName.split("/"); - for (let i = 0; i < split.length; i++) { - split[i] = encodeURIComponent(split[i]); +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); } - return split.join("/"); + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: - * - * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. - * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL - * thus avoid the browser cache. - * - * 2. Remove cookie header for security - * - * 3. Remove content-length header to avoid browsers warning + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. */ -class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { +class BlobLeaseClient { /** - * Creates an instance of StorageBrowserPolicy. - * @param nextPolicy - - * @param options - + * Gets the lease Id. + * + * @readonly */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + get leaseId() { + return this._leaseId; } /** - * Sends out request. + * Gets the url. * - * @param request - + * @readonly */ - async sendRequest(request) { - if (coreHttp.isNode) { - return this._nextPolicy.sendRequest(request); + get url() { + return this._url; + } + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = client.storageClientContext; + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = clientContext.container; } - if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { - request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + else { + this._isContainer = false; + this._containerOrBlobOperation = clientContext.blob; } - request.headers.remove(HeaderConstants.COOKIE); - // According to XHR standards, content-length should be fully controlled by browsers - request.headers.remove(HeaderConstants.CONTENT_LENGTH); - return this._nextPolicy.sendRequest(request); + if (!leaseId) { + leaseId = coreUtil.randomUUID(); + } + this._leaseId = leaseId; } -} - -// Copyright (c) Microsoft Corporation. -/** - * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. - */ -class StorageBrowserPolicyFactory { /** - * Creates a StorageBrowserPolicyFactory object. + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * - * @param nextPolicy - - * @param options - + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. */ - create(nextPolicy, options) { - return new StorageBrowserPolicy(nextPolicy, options); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * RetryPolicy types. - */ -exports.StorageRetryPolicyType = void 0; -(function (StorageRetryPolicyType) { - /** - * Exponential retry. Retry time delay grows exponentially. - */ - StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; - /** - * Linear retry. Retry time delay grows linearly. - */ - StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; -})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); -// Default values of StorageRetryOptions -const DEFAULT_RETRY_OPTIONS = { - maxRetryDelayInMs: 120 * 1000, - maxTries: 4, - retryDelayInMs: 4 * 1000, - retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, - secondaryHost: "", - tryTimeoutInMs: undefined, // Use server side default timeout strategy -}; -const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); -/** - * Retry policy with exponential retry and linear retry implemented. - */ -class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { - /** - * Creates an instance of RetryPolicy. - * - * @param nextPolicy - - * @param options - - * @param retryOptions - - */ - constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { - super(nextPolicy, options); - // Initialize retry options - this.retryOptions = { - retryPolicyType: retryOptions.retryPolicyType - ? retryOptions.retryPolicyType - : DEFAULT_RETRY_OPTIONS.retryPolicyType, - maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 - ? Math.floor(retryOptions.maxTries) - : DEFAULT_RETRY_OPTIONS.maxTries, - tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 - ? retryOptions.tryTimeoutInMs - : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, - retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 - ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs - ? retryOptions.maxRetryDelayInMs - : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) - : DEFAULT_RETRY_OPTIONS.retryDelayInMs, - maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 - ? retryOptions.maxRetryDelayInMs - : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, - secondaryHost: retryOptions.secondaryHost - ? retryOptions.secondaryHost - : DEFAULT_RETRY_OPTIONS.secondaryHost, - }; + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.acquireLease({ + abortSignal: options.abortSignal, + duration, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + proposedLeaseId: this._leaseId, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Sends request. + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * - * @param request - + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. */ - async sendRequest(request) { - return this.attemptSendRequest(request, false, 1); + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + this._leaseId = proposedLeaseId; + return response; + }); } /** - * Decide and perform next retry. Won't mutate request parameter. + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * - * @param request - - * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then - * the resource was not found. This may be due to replication delay. So, in this - * case, we'll never try the secondary again for this operation. - * @param attempt - How many retries has been attempted to performed, starting from 1, which includes - * the attempt will be performed by this method call. + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. */ - async attemptSendRequest(request, secondaryHas404, attempt) { - const newRequest = request.clone(); - const isPrimaryRetry = secondaryHas404 || - !this.retryOptions.secondaryHost || - !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || - attempt % 2 === 1; - if (!isPrimaryRetry) { - newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); - } - // Set the server-side timeout query parameter "timeout=[seconds]" - if (this.retryOptions.tryTimeoutInMs) { - newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); - } - let response; - try { - logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await this._nextPolicy.sendRequest(newRequest); - if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { - return response; - } - secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); - } - catch (err) { - logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); - if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { - throw err; - } + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - await this.delay(isPrimaryRetry, attempt, request.abortSignal); - return this.attemptSendRequest(request, secondaryHas404, ++attempt); + return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Decide whether to retry according to last HTTP response and retry counters. + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * - * @param isPrimaryRetry - - * @param attempt - - * @param response - - * @param err - + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. */ - shouldRetry(isPrimaryRetry, attempt, response, err) { - if (attempt >= this.retryOptions.maxTries) { - logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions - .maxTries}, no further try.`); - return false; - } - // Handle network failures, you may need to customize the list when you implement - // your own http client - const retriableErrors = [ - "ETIMEDOUT", - "ESOCKETTIMEDOUT", - "ECONNREFUSED", - "ECONNRESET", - "ENOENT", - "ENOTFOUND", - "TIMEOUT", - "EPIPE", - "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js - ]; - if (err) { - for (const retriableError of retriableErrors) { - if (err.name.toUpperCase().includes(retriableError) || - err.message.toUpperCase().includes(retriableError) || - (err.code && err.code.toString().toUpperCase() === retriableError)) { - logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); - return true; - } - } - } - // If attempt was against the secondary & it returned a StatusNotFound (404), then - // the resource was not found. This may be due to replication delay. So, in this - // case, we'll never try the secondary again for this operation. - if (response || err) { - const statusCode = response ? response.status : err ? err.statusCode : 0; - if (!isPrimaryRetry && statusCode === 404) { - logger.info(`RetryPolicy: Secondary access with 404, will retry.`); - return true; - } - // Server internal error or server timeout - if (statusCode === 503 || statusCode === 500) { - logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); - return true; - } - } - if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { - logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); - return true; + async renewLease(options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - return false; + return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { + var _a; + return this._containerOrBlobOperation.renewLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + }); + }); } /** - * Delay a calculated time between retries. + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * - * @param isPrimaryRetry - - * @param attempt - - * @param abortSignal - + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. */ - async delay(isPrimaryRetry, attempt, abortSignal) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (this.retryOptions.retryPolicyType) { - case exports.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); - break; - case exports.StorageRetryPolicyType.FIXED: - delayTimeInMs = this.retryOptions.retryDelayInMs; - break; - } - } - else { - delayTimeInMs = Math.random() * 1000; + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); + return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { + var _a; + const operationOptions = { + abortSignal: options.abortSignal, + breakPeriod, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + }; + return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions)); + }); } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. */ -class StorageRetryPolicyFactory { - /** - * Creates an instance of StorageRetryPolicyFactory. - * @param retryOptions - - */ - constructor(retryOptions) { - this.retryOptions = retryOptions; - } +class RetriableReadableStream extends stream.Readable { /** - * Creates a StorageRetryPolicy object. + * Creates an instance of RetriableReadableStream. * - * @param nextPolicy - + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read * @param options - */ - create(nextPolicy, options) { - return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.sourceErrorOrEndHandler(); + this.source.destroy(); + return; + } + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceAbortedHandler = () => { + const abortError = new abortController.AbortError("The operation was aborted."); + this.destroy(abortError); + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + // needed for Node14 + this.source.on("aborted", this.sourceAbortedHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + this.source.removeListener("aborted", this.sourceAbortedHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Credential policy used to sign HTTP(S) requests before sending. This is an - * abstract class. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. */ -class CredentialPolicy extends coreHttp.BaseRequestPolicy { +class BlobDownloadResponse { /** - * Sends out request. + * Indicates that the service supports + * requests for partial file content. * - * @param request - + * @readonly */ - sendRequest(request) { - return this._nextPolicy.sendRequest(this.signRequest(request)); + get acceptRanges() { + return this.originalResponse.acceptRanges; } /** - * Child classes must implement this method with request signing. This method - * will be executed in {@link sendRequest}. + * Returns if it was previously specified + * for the file. * - * @param request - + * @readonly */ - signRequest(request) { - // Child classes must override this method with request signing. This method - // will be executed in sendRequest(). - return request; + get cacheControl() { + return this.originalResponse.cacheControl; } -} - -// Copyright (c) Microsoft Corporation. -/** - * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources - * or for use with Shared Access Signatures (SAS). - */ -class AnonymousCredentialPolicy extends CredentialPolicy { /** - * Creates an instance of AnonymousCredentialPolicy. - * @param nextPolicy - - * @param options - + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + get contentDisposition() { + return this.originalResponse.contentDisposition; } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Credential is an abstract class for Azure Storage HTTP requests signing. This - * class will host an credentialPolicyCreator factory which generates CredentialPolicy. - */ -class Credential { /** - * Creates a RequestPolicy object. + * Returns the value that was specified + * for the Content-Encoding request header. * - * @param _nextPolicy - - * @param _options - + * @readonly */ - create(_nextPolicy, _options) { - throw new Error("Method should be implemented in children classes."); + get contentEncoding() { + return this.originalResponse.contentEncoding; } -} - -// Copyright (c) Microsoft Corporation. -/** - * AnonymousCredential provides a credentialPolicyCreator member used to create - * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with - * HTTP(S) requests that read public resources or for use with Shared Access - * Signatures (SAS). - */ -class AnonymousCredential extends Credential { /** - * Creates an {@link AnonymousCredentialPolicy} object. + * Returns the value that was specified + * for the Content-Language request header. * - * @param nextPolicy - - * @param options - + * @readonly */ - create(nextPolicy, options) { - return new AnonymousCredentialPolicy(nextPolicy, options); + get contentLanguage() { + return this.originalResponse.contentLanguage; } -} - -// Copyright (c) Microsoft Corporation. -/** - * TelemetryPolicy is a policy used to tag user-agent header for every requests. - */ -class TelemetryPolicy extends coreHttp.BaseRequestPolicy { /** - * Creates an instance of TelemetryPolicy. - * @param nextPolicy - - * @param options - - * @param telemetry - + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly */ - constructor(nextPolicy, options, telemetry) { - super(nextPolicy, options); - this.telemetry = telemetry; + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; } /** - * Sends out request. + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. * - * @param request - + * @readonly */ - async sendRequest(request) { - if (coreHttp.isNode) { - if (!request.headers) { - request.headers = new coreHttp.HttpHeaders(); - } - if (!request.headers.get(HeaderConstants.USER_AGENT)) { - request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); - } - } - return this._nextPolicy.sendRequest(request); + get blobType() { + return this.originalResponse.blobType; } -} - -// Copyright (c) Microsoft Corporation. -/** - * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. - */ -class TelemetryPolicyFactory { /** - * Creates an instance of TelemetryPolicyFactory. - * @param telemetry - + * The number of bytes present in the + * response body. + * + * @readonly */ - constructor(telemetry) { - const userAgentInfo = []; - if (coreHttp.isNode) { - if (telemetry) { - const telemetryString = telemetry.userAgentPrefix || ""; - if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { - userAgentInfo.push(telemetryString); - } - } - // e.g. azsdk-js-storageblob/10.0.0 - const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; - if (userAgentInfo.indexOf(libInfo) === -1) { - userAgentInfo.push(libInfo); - } - // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) - let runtimeInfo = `(NODE-VERSION ${process.version})`; - if (os__namespace) { - runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; - } - if (userAgentInfo.indexOf(runtimeInfo) === -1) { - userAgentInfo.push(runtimeInfo); - } - } - this.telemetryString = userAgentInfo.join(" "); + get contentLength() { + return this.originalResponse.contentLength; } /** - * Creates a TelemetryPolicy object. + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. * - * @param nextPolicy - - * @param options - + * @readonly */ - create(nextPolicy, options) { - return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + get contentMD5() { + return this.originalResponse.contentMD5; } -} - -// Copyright (c) Microsoft Corporation. -const _defaultHttpClient = new coreHttp.DefaultHttpClient(); -function getCachedDefaultHttpClient() { - return _defaultHttpClient; -} - -// Copyright (c) Microsoft Corporation. -/** - * A set of constants used internally when processing requests. - */ -const Constants = { - DefaultScope: "/.default", /** - * Defines constants for use with HTTP headers. + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization", - }, -}; -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry -}; -/** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token - */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } + get contentRange() { + return this.originalResponse.contentRange; } - let token = await tryGetAccessToken(); - while (token === null) { - await coreHttp.delay(retryIntervalInMs); - token = await tryGetAccessToken(); + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); - }, - }; + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } /** - * Starts a refresh job or returns the existing job if one is already - * running. + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; + get copyId() { + return this.originalResponse.copyId; } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -/** - * We will retrieve the challenge only if the response status code was 401, - * and if the response contained the header "WWW-Authenticate" with a non-empty value. - */ -function getChallenge(response) { - const challenge = response.headers.get("WWW-Authenticate"); - if (response.status === 401 && challenge) { - return challenge; + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; } - return; -} -/** - * Converts: `Bearer a="b" c="d"`. - * Into: `[ { a: 'b', c: 'd' }]`. - * - * @internal - */ -function parseChallenge(challenge) { - const bearerChallenge = challenge.slice("Bearer ".length); - const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); - const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); - // Key-value pairs to plain object: - return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - let getToken = createTokenCycler(credential, scopes); - class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const getTokenInternal = getToken; - const token = (await getTokenInternal({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - })).token; - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - const response = await this._nextPolicy.sendRequest(webResource); - if ((response === null || response === void 0 ? void 0 : response.status) === 401) { - const challenge = getChallenge(response); - if (challenge) { - const challengeInfo = parseChallenge(challenge); - const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; - const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); - const pathSegments = parsedAuthUri.getPath().split("/"); - const tenantId = pathSegments[1]; - const getTokenForChallenge = createTokenCycler(credential, challengeScopes); - const tokenForChallenge = (await getTokenForChallenge({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - tenantId: tenantId, - })).token; - getToken = getTokenForChallenge; - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); - return this._nextPolicy.sendRequest(webResource); - } - } - return response; - } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; } - return { - create: (nextPolicy, options) => { - return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); - }, - }; -} - -// Copyright (c) Microsoft Corporation. -/** - * A helper to decide if a given argument satisfies the Pipeline contract - * @param pipeline - An argument that may be a Pipeline - * @returns true when the argument satisfies the Pipeline contract - */ -function isPipelineLike(pipeline) { - if (!pipeline || typeof pipeline !== "object") { - return false; + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; } - const castPipeline = pipeline; - return (Array.isArray(castPipeline.factories) && - typeof castPipeline.options === "object" && - typeof castPipeline.toServiceClientOptions === "function"); -} -/** - * A Pipeline class containing HTTP request policies. - * You can create a default Pipeline by calling {@link newPipeline}. - * Or you can create a Pipeline with your own policies by the constructor of Pipeline. - * - * Refer to {@link newPipeline} and provided policies before implementing your - * customized Pipeline. - */ -class Pipeline { /** - * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. * - * @param factories - - * @param options - + * @readonly */ - constructor(factories, options = {}) { - this.factories = factories; - // when options.httpClient is not specified, passing in a DefaultHttpClient instance to - // avoid each client creating its own http client. - this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; } /** - * Transfer Pipeline object to ServiceClientOptions object which is required by - * ServiceClient constructor. + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. * - * @returns The ServiceClientOptions object from this Pipeline. + * @readonly */ - toServiceClientOptions() { - return { - httpClient: this.options.httpClient, - requestPolicyFactories: this.factories, - }; + get leaseDuration() { + return this.originalResponse.leaseDuration; } -} -/** - * Creates a new Pipeline object with Credential provided. - * - * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. - * @param pipelineOptions - Optional. Options. - * @returns A new Pipeline object. - */ -function newPipeline(credential, pipelineOptions = {}) { - var _a; - if (credential === undefined) { - credential = new AnonymousCredential(); + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; } - // Order is important. Closer to the API at the top & closer to the network at the bottom. - // The credential's policy factory must appear close to the wire so it can sign any - // changes made by other factories (like UniqueRequestIDPolicyFactory) - const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); - const factories = [ - coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), - coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), - telemetryPolicy, - coreHttp.generateClientRequestIdPolicy(), - new StorageBrowserPolicyFactory(), - new StorageRetryPolicyFactory(pipelineOptions.retryOptions), - // Default deserializationPolicy is provided by protocol layer - // Use customized XML char key of "#" so we could deserialize metadata - // with "_" key - coreHttp.deserializationPolicy(undefined, { xmlCharKey: "#" }), - coreHttp.logPolicy({ - logger: logger.info, - allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, - }), - ]; - if (coreHttp.isNode) { - // policies only available in Node.js runtime, not in browsers - factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); - factories.push(coreHttp.disableResponseDecompressionPolicy()); + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; } - factories.push(coreHttp.isTokenCredential(credential) - ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) - : credential); - return new Pipeline(factories, pipelineOptions); -} - -// Copyright (c) Microsoft Corporation. -/** - * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. - */ -class StorageSharedKeyCredentialPolicy extends CredentialPolicy { /** - * Creates an instance of StorageSharedKeyCredentialPolicy. - * @param nextPolicy - - * @param options - - * @param factory - + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly */ - constructor(nextPolicy, options, factory) { - super(nextPolicy, options); - this.factory = factory; + get date() { + return this.originalResponse.date; } /** - * Signs request. + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. * - * @param request - + * @readonly */ - signRequest(request) { - request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); - if (request.body && - (typeof request.body === "string" || request.body !== undefined) && - request.body.length > 0) { - request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); - } - const stringToSign = [ - request.method.toUpperCase(), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), - this.getHeaderValueToSign(request, HeaderConstants.DATE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.RANGE), - ].join("\n") + - "\n" + - this.getCanonicalizedHeadersString(request) + - this.getCanonicalizedResourceString(request); - const signature = this.factory.computeHMACSHA256(stringToSign); - request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); - // console.log(`[URL]:${request.url}`); - // console.log(`[HEADERS]:${request.headers.toString()}`); - // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); - // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); - return request; + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; } /** - * Retrieve header value according to shared key sign rules. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. * - * @param request - - * @param headerName - + * @readonly */ - getHeaderValueToSign(request, headerName) { - const value = request.headers.get(headerName); - if (!value) { - return ""; - } - // When using version 2015-02-21 or later, if Content-Length is zero, then - // set the Content-Length part of the StringToSign to an empty string. - // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key - if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { - return ""; - } - return value; + get etag() { + return this.originalResponse.etag; } /** - * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: - * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. - * 2. Convert each HTTP header name to lowercase. - * 3. Sort the headers lexicographically by header name, in ascending order. - * Each header may appear only once in the string. - * 4. Replace any linear whitespace in the header value with a single space. - * 5. Trim any whitespace around the colon in the header. - * 6. Finally, append a new-line character to each canonicalized header in the resulting list. - * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * The number of tags associated with the blob * - * @param request - + * @readonly */ - getCanonicalizedHeadersString(request) { - let headersArray = request.headers.headersArray().filter((value) => { - return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); - }); - headersArray.sort((a, b) => { - return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); - }); - // Remove duplicate headers - headersArray = headersArray.filter((value, index, array) => { - if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { - return false; - } - return true; - }); - let canonicalizedHeadersStringToSign = ""; - headersArray.forEach((header) => { - canonicalizedHeadersStringToSign += `${header.name - .toLowerCase() - .trimRight()}:${header.value.trimLeft()}\n`; - }); - return canonicalizedHeadersStringToSign; + get tagCount() { + return this.originalResponse.tagCount; } /** - * Retrieves the webResource canonicalized resource string. + * The error code. * - * @param request - + * @readonly */ - getCanonicalizedResourceString(request) { - const path = getURLPath(request.url) || "/"; - let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path}`; - const queries = getURLQueries(request.url); - const lowercaseQueries = {}; - if (queries) { - const queryKeys = []; - for (const key in queries) { - if (Object.prototype.hasOwnProperty.call(queries, key)) { - const lowercaseKey = key.toLowerCase(); - lowercaseQueries[lowercaseKey] = queries[key]; - queryKeys.push(lowercaseKey); - } - } - queryKeys.sort(); - for (const key of queryKeys) { - canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; - } - } - return canonicalizedResourceString; + get errorCode() { + return this.originalResponse.errorCode; } -} - -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * StorageSharedKeyCredential for account key authorization of Azure Storage service. - */ -class StorageSharedKeyCredential extends Credential { /** - * Creates an instance of StorageSharedKeyCredential. - * @param accountName - - * @param accountKey - + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly */ - constructor(accountName, accountKey) { - super(); - this.accountName = accountName; - this.accountKey = Buffer.from(accountKey, "base64"); + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; } /** - * Creates a StorageSharedKeyCredentialPolicy object. + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. * - * @param nextPolicy - - * @param options - + * @readonly */ - create(nextPolicy, options) { - return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + get blobContentMD5() { + return this.originalResponse.blobContentMD5; } /** - * Generates a hash signature for an HTTP request or for a SAS. + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. * - * @param stringToSign - + * @readonly */ - computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + get lastModified() { + return this.originalResponse.lastModified; } -} - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -const packageName = "azure-storage-blob"; -const packageVersion = "12.17.0"; -class StorageClientContext extends coreHttp__namespace.ServiceClient { /** - * Initializes a new instance of the StorageClientContext class. - * @param url The URL of the service account, container, or blob that is the target of the desired - * operation. - * @param options The parameter options + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly */ - constructor(url, options) { - if (url === undefined) { - throw new Error("'url' cannot be null"); - } - // Initializing default values for options - if (!options) { - options = {}; - } - if (!options.userAgent) { - const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); - options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; - } - super(undefined, options); - this.requestContentType = "application/json; charset=utf-8"; - this.baseUri = options.endpoint || "{url}"; - // Parameter assignments - this.url = url; - // Assigning values to Constant parameters - this.version = options.version || "2023-11-03"; + get lastAccessed() { + return this.originalResponse.lastAccessed; } -} - -// Copyright (c) Microsoft Corporation. -/** - * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} - * and etc. - */ -class StorageClient { /** - * Creates an instance of StorageClient. - * @param url - url to resource - * @param pipeline - request policy pipeline. + * Returns the date and time the blob was created. + * + * @readonly */ - constructor(url, pipeline) { - // URL should be encoded and only once, protocol layer shouldn't encode URL again - this.url = escapeURLPath(url); - this.accountName = getAccountNameFromUrl(url); - this.pipeline = pipeline; - this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); - this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); - this.credential = new AnonymousCredential(); - for (const factory of this.pipeline.factories) { - if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) || - factory instanceof AnonymousCredential) { - this.credential = factory; - } - else if (coreHttp.isTokenCredential(factory.credential)) { - // Only works if the factory has been attached a "credential" property. - // We do that in newPipeline() when using TokenCredential. - this.credential = factory.credential; - } - } - // Override protocol layer's default content-type - const storageClientContext = this.storageClientContext; - storageClientContext.requestContentType = undefined; + get createdOn() { + return this.originalResponse.createdOn; } -} - -// Copyright (c) Microsoft Corporation. -/** - * Creates a span using the global tracer. - * @internal - */ -const createSpan = coreTracing.createSpanFunction({ - packagePrefix: "Azure.Storage.Blob", - namespace: "Microsoft.Storage", -}); -/** - * @internal - * - * Adapt the tracing options from OperationOptions to what they need to be for - * RequestOptionsBase (when we update to later OpenTelemetry versions this is now - * two separate fields, not just one). - */ -function convertTracingToRequestOptionsBase(options) { - var _a, _b; - return { - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, - tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting - * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all - * the values are set, this should be serialized with toString and set as the permissions field on a - * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but - * the order of the permissions is particular and this class guarantees correctness. - */ -class BlobSASPermissions { - constructor() { - /** - * Specifies Read access granted. - */ - this.read = false; - /** - * Specifies Add access granted. - */ - this.add = false; - /** - * Specifies Create access granted. - */ - this.create = false; - /** - * Specifies Write access granted. - */ - this.write = false; - /** - * Specifies Delete access granted. - */ - this.delete = false; - /** - * Specifies Delete version access granted. - */ - this.deleteVersion = false; - /** - * Specfies Tag access granted. - */ - this.tag = false; - /** - * Specifies Move access granted. - */ - this.move = false; - /** - * Specifies Execute access granted. - */ - this.execute = false; - /** - * Specifies SetImmutabilityPolicy access granted. - */ - this.setImmutabilityPolicy = false; - /** - * Specifies that Permanent Delete is permitted. - */ - this.permanentDelete = false; + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; } /** - * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. * - * @param permissions - + * @readonly */ - static parse(permissions) { - const blobSASPermissions = new BlobSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - blobSASPermissions.read = true; - break; - case "a": - blobSASPermissions.add = true; - break; - case "c": - blobSASPermissions.create = true; - break; - case "w": - blobSASPermissions.write = true; - break; - case "d": - blobSASPermissions.delete = true; - break; - case "x": - blobSASPermissions.deleteVersion = true; - break; - case "t": - blobSASPermissions.tag = true; - break; - case "m": - blobSASPermissions.move = true; - break; - case "e": - blobSASPermissions.execute = true; - break; - case "i": - blobSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - blobSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission: ${char}`); - } - } - return blobSASPermissions; + get requestId() { + return this.originalResponse.requestId; } /** - * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. * - * @param permissionLike - + * @readonly */ - static from(permissionLike) { - const blobSASPermissions = new BlobSASPermissions(); - if (permissionLike.read) { - blobSASPermissions.read = true; - } - if (permissionLike.add) { - blobSASPermissions.add = true; - } - if (permissionLike.create) { - blobSASPermissions.create = true; - } - if (permissionLike.write) { - blobSASPermissions.write = true; - } - if (permissionLike.delete) { - blobSASPermissions.delete = true; - } - if (permissionLike.deleteVersion) { - blobSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - blobSASPermissions.tag = true; - } - if (permissionLike.move) { - blobSASPermissions.move = true; - } - if (permissionLike.execute) { - blobSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - blobSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - blobSASPermissions.permanentDelete = true; - } - return blobSASPermissions; + get clientRequestId() { + return this.originalResponse.clientRequestId; } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. + * Indicates the version of the Blob service used + * to execute the request. * - * @returns A string which represents the BlobSASPermissions + * @readonly */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.move) { - permissions.push("m"); - } - if (this.execute) { - permissions.push("e"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - return permissions.join(""); + get version() { + return this.originalResponse.version; } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. - * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. - * Once all the values are set, this should be serialized with toString and set as the permissions field on a - * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but - * the order of the permissions is particular and this class guarantees correctness. - */ -class ContainerSASPermissions { - constructor() { - /** - * Specifies Read access granted. - */ - this.read = false; - /** - * Specifies Add access granted. - */ - this.add = false; - /** - * Specifies Create access granted. - */ - this.create = false; - /** - * Specifies Write access granted. - */ - this.write = false; - /** - * Specifies Delete access granted. - */ - this.delete = false; - /** - * Specifies Delete version access granted. - */ - this.deleteVersion = false; - /** - * Specifies List access granted. - */ - this.list = false; - /** - * Specfies Tag access granted. - */ - this.tag = false; - /** - * Specifies Move access granted. - */ - this.move = false; - /** - * Specifies Execute access granted. - */ - this.execute = false; - /** - * Specifies SetImmutabilityPolicy access granted. - */ - this.setImmutabilityPolicy = false; - /** - * Specifies that Permanent Delete is permitted. - */ - this.permanentDelete = false; - /** - * Specifies that Filter Blobs by Tags is permitted. - */ - this.filterByTags = false; + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; } /** - * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. + * Indicates whether version of this blob is a current version. * - * @param permissions - + * @readonly */ - static parse(permissions) { - const containerSASPermissions = new ContainerSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - containerSASPermissions.read = true; - break; - case "a": - containerSASPermissions.add = true; - break; - case "c": - containerSASPermissions.create = true; - break; - case "w": - containerSASPermissions.write = true; - break; - case "d": - containerSASPermissions.delete = true; - break; - case "l": - containerSASPermissions.list = true; - break; - case "t": - containerSASPermissions.tag = true; - break; - case "x": - containerSASPermissions.deleteVersion = true; - break; - case "m": - containerSASPermissions.move = true; - break; - case "e": - containerSASPermissions.execute = true; - break; - case "i": - containerSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - containerSASPermissions.permanentDelete = true; - break; - case "f": - containerSASPermissions.filterByTags = true; - break; - default: - throw new RangeError(`Invalid permission ${char}`); - } - } - return containerSASPermissions; + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; } /** - * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. * - * @param permissionLike - + * @readonly */ - static from(permissionLike) { - const containerSASPermissions = new ContainerSASPermissions(); - if (permissionLike.read) { - containerSASPermissions.read = true; - } - if (permissionLike.add) { - containerSASPermissions.add = true; - } - if (permissionLike.create) { - containerSASPermissions.create = true; - } - if (permissionLike.write) { - containerSASPermissions.write = true; - } - if (permissionLike.delete) { - containerSASPermissions.delete = true; - } - if (permissionLike.list) { - containerSASPermissions.list = true; - } - if (permissionLike.deleteVersion) { - containerSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - containerSASPermissions.tag = true; - } - if (permissionLike.move) { - containerSASPermissions.move = true; - } - if (permissionLike.execute) { - containerSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - containerSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - containerSASPermissions.permanentDelete = true; - } - if (permissionLike.filterByTags) { - containerSASPermissions.filterByTags = true; - } - return containerSASPermissions; + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. * - * The order of the characters should be as specified here to ensure correctness. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. * + * @readonly */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.list) { - permissions.push("l"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.move) { - permissions.push("m"); - } - if (this.execute) { - permissions.push("e"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - if (this.filterByTags) { - permissions.push("f"); - } - return permissions.join(""); + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; } -} - -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * UserDelegationKeyCredential is only used for generation of user delegation SAS. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas - */ -class UserDelegationKeyCredential { /** - * Creates an instance of UserDelegationKeyCredential. - * @param accountName - - * @param userDelegationKey - + * If this blob has been sealed. + * + * @readonly */ - constructor(accountName, userDelegationKey) { - this.accountName = accountName; - this.userDelegationKey = userDelegationKey; - this.key = Buffer.from(userDelegationKey.value, "base64"); + get isSealed() { + return this.originalResponse.isSealed; } /** - * Generates a hash signature for an HTTP request or for a SAS. + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. * - * @param stringToSign - + * @readonly */ - computeHMACSHA256(stringToSign) { - // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); - return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Generate SasIPRange format string. For example: - * - * "8.8.8.8" or "1.1.1.1-255.255.255.255" - * - * @param ipRange - - */ -function ipRangeToString(ipRange) { - return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; -} - -// Copyright (c) Microsoft Corporation. -/** - * Protocols for generated SAS. - */ -exports.SASProtocol = void 0; -(function (SASProtocol) { /** - * Protocol that allows HTTPS only + * Indicates immutability policy mode. + * + * @readonly */ - SASProtocol["Https"] = "https"; + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } /** - * Protocol that allows both HTTPS and HTTP + * Indicates if a legal hold is present on the blob. + * + * @readonly */ - SASProtocol["HttpsAndHttp"] = "https,http"; -})(exports.SASProtocol || (exports.SASProtocol = {})); -/** - * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly - * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} - * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should - * be taken here in case there are existing query parameters, which might affect the appropriate means of appending - * these query parameters). - * - * NOTE: Instances of this class are immutable. - */ -class SASQueryParameters { - constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { - this.version = version; - this.signature = signature; - if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { - // SASQueryParametersOptions - this.permissions = permissionsOrOptions.permissions; - this.services = permissionsOrOptions.services; - this.resourceTypes = permissionsOrOptions.resourceTypes; - this.protocol = permissionsOrOptions.protocol; - this.startsOn = permissionsOrOptions.startsOn; - this.expiresOn = permissionsOrOptions.expiresOn; - this.ipRangeInner = permissionsOrOptions.ipRange; - this.identifier = permissionsOrOptions.identifier; - this.encryptionScope = permissionsOrOptions.encryptionScope; - this.resource = permissionsOrOptions.resource; - this.cacheControl = permissionsOrOptions.cacheControl; - this.contentDisposition = permissionsOrOptions.contentDisposition; - this.contentEncoding = permissionsOrOptions.contentEncoding; - this.contentLanguage = permissionsOrOptions.contentLanguage; - this.contentType = permissionsOrOptions.contentType; - if (permissionsOrOptions.userDelegationKey) { - this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; - this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; - this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; - this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; - this.signedService = permissionsOrOptions.userDelegationKey.signedService; - this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; - this.correlationId = permissionsOrOptions.correlationId; - } - } - else { - this.services = services; - this.resourceTypes = resourceTypes; - this.expiresOn = expiresOn; - this.permissions = permissionsOrOptions; - this.protocol = protocol; - this.startsOn = startsOn; - this.ipRangeInner = ipRange; - this.encryptionScope = encryptionScope; - this.identifier = identifier; - this.resource = resource; - this.cacheControl = cacheControl; - this.contentDisposition = contentDisposition; - this.contentEncoding = contentEncoding; - this.contentLanguage = contentLanguage; - this.contentType = contentType; - if (userDelegationKey) { - this.signedOid = userDelegationKey.signedObjectId; - this.signedTenantId = userDelegationKey.signedTenantId; - this.signedStartsOn = userDelegationKey.signedStartsOn; - this.signedExpiresOn = userDelegationKey.signedExpiresOn; - this.signedService = userDelegationKey.signedService; - this.signedVersion = userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; - this.correlationId = correlationId; - } - } + get legalHold() { + return this.originalResponse.legalHold; } /** - * Optional. IP range allowed for this SAS. + * The response body as a browser Blob. + * Always undefined in node.js. * * @readonly */ - get ipRange() { - if (this.ipRangeInner) { - return { - end: this.ipRangeInner.end, - start: this.ipRangeInner.start, - }; - } - return undefined; + get contentAsBlob() { + return this.originalResponse.blobBody; } /** - * Encodes all SAS query parameters into a string that can be appended to a URL. + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. * + * @readonly */ - toString() { - const params = [ - "sv", - "ss", - "srt", - "spr", - "st", - "se", - "sip", - "si", - "ses", - "skoid", - "sktid", - "skt", - "ske", - "sks", - "skv", - "sr", - "sp", - "sig", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "saoid", - "scid", - ]; - const queries = []; - for (const param of params) { - switch (param) { - case "sv": - this.tryAppendQueryParameter(queries, param, this.version); - break; - case "ss": - this.tryAppendQueryParameter(queries, param, this.services); - break; - case "srt": - this.tryAppendQueryParameter(queries, param, this.resourceTypes); - break; - case "spr": - this.tryAppendQueryParameter(queries, param, this.protocol); - break; - case "st": - this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); - break; - case "se": - this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); - break; - case "sip": - this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); - break; - case "si": - this.tryAppendQueryParameter(queries, param, this.identifier); - break; - case "ses": - this.tryAppendQueryParameter(queries, param, this.encryptionScope); - break; - case "skoid": // Signed object ID - this.tryAppendQueryParameter(queries, param, this.signedOid); - break; - case "sktid": // Signed tenant ID - this.tryAppendQueryParameter(queries, param, this.signedTenantId); - break; - case "skt": // Signed key start time - this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); - break; - case "ske": // Signed key expiry time - this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); - break; - case "sks": // Signed key service - this.tryAppendQueryParameter(queries, param, this.signedService); - break; - case "skv": // Signed key version - this.tryAppendQueryParameter(queries, param, this.signedVersion); - break; - case "sr": - this.tryAppendQueryParameter(queries, param, this.resource); - break; - case "sp": - this.tryAppendQueryParameter(queries, param, this.permissions); - break; - case "sig": - this.tryAppendQueryParameter(queries, param, this.signature); - break; - case "rscc": - this.tryAppendQueryParameter(queries, param, this.cacheControl); - break; - case "rscd": - this.tryAppendQueryParameter(queries, param, this.contentDisposition); - break; - case "rsce": - this.tryAppendQueryParameter(queries, param, this.contentEncoding); - break; - case "rscl": - this.tryAppendQueryParameter(queries, param, this.contentLanguage); - break; - case "rsct": - this.tryAppendQueryParameter(queries, param, this.contentType); - break; - case "saoid": - this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); - break; - case "scid": - this.tryAppendQueryParameter(queries, param, this.correlationId); - break; - } - } - return queries.join("&"); + get readableStreamBody() { + return coreUtil.isNode ? this.blobDownloadStream : undefined; } /** - * A private helper method used to filter and append query key/value pairs into an array. + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } + /** + * Creates an instance of BlobDownloadResponse. * - * @param queries - - * @param key - - * @param value - + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - */ - tryAppendQueryParameter(queries, key, value) { - if (!value) { - return; - } - key = encodeURIComponent(key); - value = encodeURIComponent(value); - if (key.length > 0 && value.length > 0) { - queries.push(`${key}=${value}`); - } + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); } } // Copyright (c) Microsoft Corporation. -function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential - ? sharedKeyCredentialOrUserDelegationKey - : undefined; - let userDelegationKeyCredential; - if (sharedKeyCredential === undefined && accountName !== undefined) { - userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); +// Licensed under the MIT License. +const AVRO_SYNC_MARKER_SIZE = 16; +const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +const AVRO_CODEC_KEY = "avro.codec"; +const AVRO_SCHEMA_KEY = "avro.schema"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; } - if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { - throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; } - // Version 2020-12-06 adds support for encryptionscope in SAS. - if (version >= "2020-12-06") { - if (sharedKeyCredential !== undefined) { - return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + // eslint-disable-next-line no-self-assign + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } + else if (b === 0) { + return false; } else { - return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + throw new Error("Byte was not a boolean."); } } - // Version 2019-12-12 adds support for the blob tags permission. - // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. - // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string - if (version >= "2018-11-09") { - if (sharedKeyCredential !== undefined) { - return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); } - else { - // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. - if (version >= "2020-02-10") { - return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + return stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; } - else { - return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); } } + return items; } - if (version >= "2015-04-05") { - if (sharedKeyCredential !== undefined) { - return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); } else { - throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + return AvroType.fromObjectSchema(schema); } } - throw new RangeError("'version' must be >= '2015-04-05'."); -} -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn and identifier. - * - * WARNING: When identifier is not provided, permissions and expiresOn are required. - * You MUST assign value to identifier or expiresOn & permissions manually if you initial with - * this constructor. - * - * @param blobSASSignatureValues - - * @param sharedKeyCredential - - */ -function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && - !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } } - let resource = "c"; - if (blobSASSignatureValues.blobName) { - resource = "b"; + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); } - else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + catch (_a) { + // eslint-disable-line no-empty + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + // eslint-disable-next-line no-case-declarations + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); } } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn and identifier. - * - * WARNING: When identifier is not provided, permissions and expiresOn are required. - * You MUST assign value to identifier or expiresOn & permissions manually if you initial with - * this constructor. - * - * @param blobSASSignatureValues - - * @param sharedKeyCredential - - */ -function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && - !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } - else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); } } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } +} +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn and identifier. - * - * WARNING: When identifier is not provided, permissions and expiresOn are required. - * You MUST assign value to identifier or expiresOn & permissions manually if you initial with - * this constructor. - * - * @param blobSASSignatureValues - - * @param sharedKeyCredential - - */ -function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && - !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } - else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); +} +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); + } +} +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream, options = {}) { + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } } + return record; } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); } -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn. - * - * WARNING: identifier will be ignored, permissions and expiresOn are required. - * - * @param blobSASSignatureValues - - * @param userDelegationKeyCredential - - */ -function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - // Stored access policies are not supported for a user delegation SAS. - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +function arraysEqual(a, b) { + if (a === b) + return true; + // eslint-disable-next-line eqeqeq + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; + return true; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +class AvroReader { + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); } - else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === undefined || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); } - } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; } - else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } } } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType, - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects() { + return tslib.__asyncGenerator(this, arguments, function* parseObjects_1(options = {}) { + if (!this._initialized) { + yield tslib.__await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (_a) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield tslib.__await(result); + } + }); + } } -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn. - * - * WARNING: identifier will be ignored, permissions and expiresOn are required. - * - * @param blobSASSignatureValues - - * @param userDelegationKeyCredential - - */ -function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - // Stored access policies are not supported for a user delegation SAS. - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +class AvroReadable { +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); +class AvroReadableFromStream extends AvroReadable { + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; } - else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); } - } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + /* eslint-disable @typescript-eslint/no-use-before-define */ + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + // callbackChunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + /* eslint-enable @typescript-eslint/no-use-before-define */ + }); } } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - undefined, - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType, - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn. - * - * WARNING: identifier will be ignored, permissions and expiresOn are required. * - * @param blobSASSignatureValues - - * @param userDelegationKeyCredential - + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. */ -function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - // Stored access policies are not supported for a user delegation SAS. - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); - } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } - else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } +class BlobQuickQueryStream extends stream.Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); } } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - undefined, - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType, - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); -} -function getCanonicalName(accountName, containerName, blobName) { - // Container: "/blob/account/containerName" - // Blob: "/blob/account/containerName/blobName" - const elements = [`/blob/${accountName}/${containerName}`]; - if (blobName) { - elements.push(`/${blobName}`); - } - return elements.join(""); -} -function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { - const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { - throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); - } - if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { - throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); - } - if (blobSASSignatureValues.versionId && version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); - } - if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { - throw RangeError("Must provide 'blobName' when providing 'versionId'."); - } - if (blobSASSignatureValues.permissions && - blobSASSignatureValues.permissions.setImmutabilityPolicy && - version < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); - } - if (blobSASSignatureValues.permissions && - blobSASSignatureValues.permissions.deleteVersion && - version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); - } - if (blobSASSignatureValues.permissions && - blobSASSignatureValues.permissions.permanentDelete && - version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); - } - if (blobSASSignatureValues.permissions && - blobSASSignatureValues.permissions.tag && - version < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); - } - if (version < "2020-02-10" && - blobSASSignatureValues.permissions && - (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { - throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); - } - if (version < "2021-04-10" && - blobSASSignatureValues.permissions && - blobSASSignatureValues.permissions.filterByTags) { - throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); - } - if (version < "2020-02-10" && - (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { - throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); - } - if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); } - blobSASSignatureValues.version = version; - return blobSASSignatureValues; } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. */ -class BlobLeaseClient { +class BlobQueryResponse { /** - * Creates an instance of BlobLeaseClient. - * @param client - The client to make the lease operation requests. - * @param leaseId - Initial proposed lease id. + * Indicates that the service supports + * requests for partial file content. + * + * @readonly */ - constructor(client, leaseId) { - const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); - this._url = client.url; - if (client.name === undefined) { - this._isContainer = true; - this._containerOrBlobOperation = new Container(clientContext); - } - else { - this._isContainer = false; - this._containerOrBlobOperation = new Blob$1(clientContext); - } - if (!leaseId) { - leaseId = coreHttp.generateUuid(); - } - this._leaseId = leaseId; + get acceptRanges() { + return this.originalResponse.acceptRanges; } /** - * Gets the lease Id. + * Returns if it was previously specified + * for the file. * * @readonly */ - get leaseId() { - return this._leaseId; + get cacheControl() { + return this.originalResponse.cacheControl; } /** - * Gets the url. + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. * * @readonly */ - get url() { - return this._url; + get contentDisposition() { + return this.originalResponse.contentDisposition; } /** - * Establishes and manages a lock on a container for delete operations, or on a blob - * for write and delete operations. - * The lock duration can be 15 to 60 seconds, or can be infinite. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * Returns the value that was specified + * for the Content-Encoding request header. * - * @param duration - Must be between 15 to 60 seconds, or infinite (-1) - * @param options - option to configure lease management operations. - * @returns Response data for acquire lease operation. + * @readonly */ - async acquireLease(duration, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + get contentEncoding() { + return this.originalResponse.contentEncoding; } /** - * To change the ID of the lease. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * Returns the value that was specified + * for the Content-Language request header. * - * @param proposedLeaseId - the proposed new lease Id. - * @param options - option to configure lease management operations. - * @returns Response data for change lease operation. + * @readonly */ - async changeLease(proposedLeaseId, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - this._leaseId = proposedLeaseId; - return response; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + get contentLanguage() { + return this.originalResponse.contentLanguage; } /** - * To free the lease if it is no longer needed so that another client may - * immediately acquire a lease against the container or the blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. * - * @param options - option to configure lease management operations. - * @returns Response data for release lease operation. + * @readonly */ - async releaseLease(options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; } /** - * To renew the lease. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. * - * @param options - Optional option to configure lease management operations. - * @returns Response data for renew lease operation. + * @readonly */ - async renewLease(options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + get blobType() { + return this.originalResponse.blobType; } /** - * To end the lease but ensure that another client cannot acquire a new lease - * until the current lease period has expired. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * The number of bytes present in the + * response body. * - * @param breakPeriod - Break period - * @param options - Optional options to configure lease management operations. - * @returns Response data for break lease operation. + * @readonly */ - async breakLease(breakPeriod, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); - return await this._containerOrBlobOperation.breakLease(operationOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + get contentLength() { + return this.originalResponse.contentLength; } -} - -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. - */ -class RetriableReadableStream extends stream.Readable { /** - * Creates an instance of RetriableReadableStream. - * - * @param source - The current ReadableStream returned from getter - * @param getter - A method calling downloading request returning - * a new ReadableStream from specified offset - * @param offset - Offset position in original data source to read - * @param count - How much data in original data source to read - * @param options - - */ - constructor(source, getter, offset, count, options = {}) { - super({ highWaterMark: options.highWaterMark }); - this.retries = 0; - this.sourceDataHandler = (data) => { - if (this.options.doInjectErrorOnce) { - this.options.doInjectErrorOnce = undefined; - this.source.pause(); - this.source.removeAllListeners("data"); - this.source.emit("end"); - return; - } - // console.log( - // `Offset: ${this.offset}, Received ${data.length} from internal stream` - // ); - this.offset += data.length; - if (this.onProgress) { - this.onProgress({ loadedBytes: this.offset - this.start }); - } - if (!this.push(data)) { - this.source.pause(); - } - }; - this.sourceErrorOrEndHandler = (err) => { - if (err && err.name === "AbortError") { - this.destroy(err); - return; - } - // console.log( - // `Source stream emits end or error, offset: ${ - // this.offset - // }, dest end : ${this.end}` - // ); - this.removeSourceEventHandlers(); - if (this.offset - 1 === this.end) { - this.push(null); - } - else if (this.offset <= this.end) { - // console.log( - // `retries: ${this.retries}, max retries: ${this.maxRetries}` - // ); - if (this.retries < this.maxRetryRequests) { - this.retries += 1; - this.getter(this.offset) - .then((newSource) => { - this.source = newSource; - this.setSourceEventHandlers(); - return; - }) - .catch((error) => { - this.destroy(error); - }); - } - else { - this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); - } - } - else { - this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); - } - }; - this.getter = getter; - this.source = source; - this.start = offset; - this.offset = offset; - this.end = offset + count - 1; - this.maxRetryRequests = - options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; - this.onProgress = options.onProgress; - this.options = options; - this.setSourceEventHandlers(); - } - _read() { - this.source.resume(); - } - setSourceEventHandlers() { - this.source.on("data", this.sourceDataHandler); - this.source.on("end", this.sourceErrorOrEndHandler); - this.source.on("error", this.sourceErrorOrEndHandler); - } - removeSourceEventHandlers() { - this.source.removeListener("data", this.sourceDataHandler); - this.source.removeListener("end", this.sourceErrorOrEndHandler); - this.source.removeListener("error", this.sourceErrorOrEndHandler); - } - _destroy(error, callback) { - // remove listener from source and release source - this.removeSourceEventHandlers(); - this.source.destroy(); - callback(error === null ? undefined : error); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will - * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot - * trigger retries defined in pipeline retry policy.) - * - * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js - * Readable stream. - */ -class BlobDownloadResponse { - /** - * Creates an instance of BlobDownloadResponse. - * - * @param originalResponse - - * @param getter - - * @param offset - - * @param count - - * @param options - - */ - constructor(originalResponse, getter, offset, count, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); - } - /** - * Indicates that the service supports - * requests for partial file content. - * - * @readonly - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; - } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; - } - /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. - * - * @readonly - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; - } - /** - * Returns the value that was specified - * for the Content-Encoding request header. - * - * @readonly - */ - get contentEncoding() { - return this.originalResponse.contentEncoding; - } - /** - * Returns the value that was specified - * for the Content-Language request header. - * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } - /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. - * - * @readonly - */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; - } - /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. - * - * @readonly - */ - get blobType() { - return this.originalResponse.blobType; - } - /** - * The number of bytes present in the - * response body. - * - * @readonly - */ - get contentLength() { - return this.originalResponse.contentLength; - } - /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. * * @readonly */ @@ -93505,7 +86665,7 @@ class BlobDownloadResponse { * @readonly */ get copyCompletedOn() { - return this.originalResponse.copyCompletedOn; + return undefined; } /** * String identifier for the last attempted Copy @@ -93612,14 +86772,6 @@ class BlobDownloadResponse { get etag() { return this.originalResponse.etag; } - /** - * The number of tags associated with the blob - * - * @readonly - */ - get tagCount() { - return this.originalResponse.tagCount; - } /** * The error code. * @@ -93662,23 +86814,6 @@ class BlobDownloadResponse { get lastModified() { return this.originalResponse.lastModified; } - /** - * Returns the UTC date and time generated by the service that indicates the time at which the blob was - * last read or written to. - * - * @readonly - */ - get lastAccessed() { - return this.originalResponse.lastAccessed; - } - /** - * Returns the date and time the blob was created. - * - * @readonly - */ - get createdOn() { - return this.originalResponse.createdOn; - } /** * A name-value pair * to associate with a file storage object. @@ -93707,7 +86842,7 @@ class BlobDownloadResponse { return this.originalResponse.clientRequestId; } /** - * Indicates the version of the Blob service used + * Indicates the version of the File service used * to execute the request. * * @readonly @@ -93715,22 +86850,6 @@ class BlobDownloadResponse { get version() { return this.originalResponse.version; } - /** - * Indicates the versionId of the downloaded blob version. - * - * @readonly - */ - get versionId() { - return this.originalResponse.versionId; - } - /** - * Indicates whether version of this blob is a current version. - * - * @readonly - */ - get isCurrentVersion() { - return this.originalResponse.isCurrentVersion; - } /** * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned * when the blob was encrypted with a customer-provided key. @@ -93750,2017 +86869,2221 @@ class BlobDownloadResponse { return this.originalResponse.contentCrc64; } /** - * Object Replication Policy Id of the destination blob. + * The response body as a browser Blob. + * Always undefined in node.js. * * @readonly */ - get objectReplicationDestinationPolicyId() { - return this.originalResponse.objectReplicationDestinationPolicyId; + get blobBody() { + return undefined; } /** - * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. * * @readonly */ - get objectReplicationSourceProperties() { - return this.originalResponse.objectReplicationSourceProperties; + get readableStreamBody() { + return coreUtil.isNode ? this.blobDownloadStream : undefined; } /** - * If this blob has been sealed. - * - * @readonly + * The HTTP response. */ - get isSealed() { - return this.originalResponse.isSealed; + get _response() { + return this.originalResponse._response; } /** - * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * Creates an instance of BlobQueryResponse. * - * @readonly + * @param originalResponse - + * @param options - */ - get immutabilityPolicyExpiresOn() { - return this.originalResponse.immutabilityPolicyExpiresOn; + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; +(function (BlockBlobTier) { /** - * Indicates immutability policy mode. - * - * @readonly + * Optimized for storing data that is accessed frequently. */ - get immutabilityPolicyMode() { - return this.originalResponse.immutabilityPolicyMode; - } + BlockBlobTier["Hot"] = "Hot"; /** - * Indicates if a legal hold is present on the blob. - * - * @readonly + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. */ - get legalHold() { - return this.originalResponse.legalHold; - } + BlockBlobTier["Cool"] = "Cool"; /** - * The response body as a browser Blob. - * Always undefined in node.js. - * - * @readonly + * Optimized for storing data that is rarely accessed. */ - get contentAsBlob() { - return this.originalResponse.blobBody; - } + BlockBlobTier["Cold"] = "Cold"; /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. - * - * It will automatically retry when internal read stream unexpected ends. - * - * @readonly + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : undefined; - } + BlockBlobTier["Archive"] = "Archive"; +})(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; +(function (PremiumPageBlobTier) { /** - * The HTTP response. + * P4 Tier. */ - get _response() { - return this.originalResponse._response; + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); +function toAccessTier(tier) { + if (tier === undefined) { + return undefined; } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). +} +function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } +} +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); +/** + * + * To get OAuth audience for a storage account for blob service. + */ +function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const AVRO_SYNC_MARKER_SIZE = 16; -const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); -const AVRO_CODEC_KEY = "avro.codec"; -const AVRO_SCHEMA_KEY = "avro.schema"; +// Licensed under the MIT License. +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); +} // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -class AvroParser { - /** - * Reads a fixed number of bytes from the stream. - * - * @param stream - - * @param length - - * @param options - - */ - static async readFixedBytes(stream, length, options = {}) { - const bytes = await stream.read(length, { abortSignal: options.abortSignal }); - if (bytes.length !== length) { - throw new Error("Hit stream end."); +// Licensed under the MIT License. +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +class BlobBeginCopyFromUrlPoller extends coreLro.Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; } - return bytes; - } - /** - * Reads a single byte from the stream. - * - * @param stream - - * @param options - - */ - static async readByte(stream, options = {}) { - const buf = await AvroParser.readFixedBytes(stream, 1, options); - return buf[0]; - } - // int and long are stored in variable-length zig-zag coding. - // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt - // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types - static async readZigZagLong(stream, options = {}) { - let zigZagEncoded = 0; - let significanceInBit = 0; - let byte, haveMoreByte, significanceInFloat; - do { - byte = await AvroParser.readByte(stream, options); - haveMoreByte = byte & 0x80; - zigZagEncoded |= (byte & 0x7f) << significanceInBit; - significanceInBit += 7; - } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers - if (haveMoreByte) { - // Switch to float arithmetic - // eslint-disable-next-line no-self-assign - zigZagEncoded = zigZagEncoded; - significanceInFloat = 268435456; // 2 ** 28. - do { - byte = await AvroParser.readByte(stream, options); - zigZagEncoded += (byte & 0x7f) * significanceInFloat; - significanceInFloat *= 128; // 2 ** 7 - } while (byte & 0x80); - const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; - if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { - throw new Error("Integer overflow."); - } - return res; + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); } - return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + this.intervalInMs = intervalInMs; } - static async readLong(stream, options = {}) { - return AvroParser.readZigZagLong(stream, options); + delay() { + return coreUtil.delay(this.intervalInMs); } - static async readInt(stream, options = {}) { - return AvroParser.readZigZagLong(stream, options); +} +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); } - static async readNull() { - return null; + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); } - static async readBoolean(stream, options = {}) { - const b = await AvroParser.readByte(stream, options); - if (b === 1) { - return true; + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; } - else if (b === 0) { - return false; + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } } - else { - throw new Error("Byte was not a boolean."); + catch (err) { + state.error = err; + state.isCompleted = true; } } - static async readFloat(stream, options = {}) { - const u8arr = await AvroParser.readFixedBytes(stream, 4, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat32(0, true); // littleEndian = true + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); } - static async readDouble(stream, options = {}) { - const u8arr = await AvroParser.readFixedBytes(stream, 8, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat64(0, true); // littleEndian = true + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); } - static async readBytes(stream, options = {}) { - const size = await AvroParser.readLong(stream, options); - if (size < 0) { - throw new Error("Bytes size was negative."); + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +// In browser, during webpack or browserify bundling, this module will be replaced by 'events' +// https://github.com/Gozala/events +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); } - return stream.read(size, { abortSignal: options.abortSignal }); + this.concurrency = concurrency; + this.emitter = new events.EventEmitter(); } - static async readString(stream, options = {}) { - const u8arr = await AvroParser.readBytes(stream, options); - const utf8decoder = new TextDecoder(); - return utf8decoder.decode(u8arr); + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); } - static async readMapPair(stream, readItemMethod, options = {}) { - const key = await AvroParser.readString(stream, options); - // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. - const value = await readItemMethod(stream, options); - return { key, value }; + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); } - static async readMap(stream, readItemMethod, options = {}) { - const readPairMethod = (s, opts = {}) => { - return AvroParser.readMapPair(s, readItemMethod, opts); - }; - const pairs = await AvroParser.readArray(stream, readPairMethod, options); - const dict = {}; - for (const pair of pairs) { - dict[pair.key] = pair.value; + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; } - return dict; + return null; } - static async readArray(stream, readItemMethod, options = {}) { - const items = []; - for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { - if (count < 0) { - // Ignore block sizes - await AvroParser.readLong(stream, options); - count = -count; + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); } - while (count--) { - const item = await readItemMethod(stream, options); - items.push(item); + else { + return; } } - return items; } } -var AvroComplex; -(function (AvroComplex) { - AvroComplex["RECORD"] = "record"; - AvroComplex["ENUM"] = "enum"; - AvroComplex["ARRAY"] = "array"; - AvroComplex["MAP"] = "map"; - AvroComplex["UNION"] = "union"; - AvroComplex["FIXED"] = "fixed"; -})(AvroComplex || (AvroComplex = {})); -var AvroPrimitive; -(function (AvroPrimitive) { - AvroPrimitive["NULL"] = "null"; - AvroPrimitive["BOOLEAN"] = "boolean"; - AvroPrimitive["INT"] = "int"; - AvroPrimitive["LONG"] = "long"; - AvroPrimitive["FLOAT"] = "float"; - AvroPrimitive["DOUBLE"] = "double"; - AvroPrimitive["BYTES"] = "bytes"; - AvroPrimitive["STRING"] = "string"; -})(AvroPrimitive || (AvroPrimitive = {})); -class AvroType { + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * This class generates a readable stream from the data in an array of buffers. + */ +class BuffersStream extends stream.Readable { /** - * Determines the AvroType from the Avro Schema. + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers */ - static fromSchema(schema) { - if (typeof schema === "string") { - return AvroType.fromStringSchema(schema); - } - else if (Array.isArray(schema)) { - return AvroType.fromArraySchema(schema); - } - else { - return AvroType.fromObjectSchema(schema); + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; } - } - static fromStringSchema(schema) { - switch (schema) { - case AvroPrimitive.NULL: - case AvroPrimitive.BOOLEAN: - case AvroPrimitive.INT: - case AvroPrimitive.LONG: - case AvroPrimitive.FLOAT: - case AvroPrimitive.DOUBLE: - case AvroPrimitive.BYTES: - case AvroPrimitive.STRING: - return new AvroPrimitiveType(schema); - default: - throw new Error(`Unexpected Avro type ${schema}`); + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); } } - static fromArraySchema(schema) { - return new AvroUnionType(schema.map(AvroType.fromSchema)); - } - static fromObjectSchema(schema) { - const type = schema.type; - // Primitives can be defined as strings or objects - try { - return AvroType.fromStringSchema(type); + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); } - catch (err) { - // eslint-disable-line no-empty + if (!size) { + size = this.readableHighWaterMark; } - switch (type) { - case AvroComplex.RECORD: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.name) { - throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); - } - // eslint-disable-next-line no-case-declarations - const fields = {}; - if (!schema.fields) { - throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); - } - for (const field of schema.fields) { - fields[field.name] = AvroType.fromSchema(field.type); - } - return new AvroRecordType(fields, schema.name); - case AvroComplex.ENUM: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.symbols) { - throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; } - return new AvroEnumType(schema.symbols); - case AvroComplex.MAP: - if (!schema.values) { - throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + else { + this.byteOffsetInCurrentBuffer = end; } - return new AvroMapType(AvroType.fromSchema(schema.values)); - case AvroComplex.ARRAY: // Unused today - case AvroComplex.FIXED: // Unused today - default: - throw new Error(`Unexpected Avro type ${type} in ${schema}`); + this.pushedBytesLength += remaining; + i += remaining; + } } - } -} -class AvroPrimitiveType extends AvroType { - constructor(primitive) { - super(); - this._primitive = primitive; - } - read(stream, options = {}) { - switch (this._primitive) { - case AvroPrimitive.NULL: - return AvroParser.readNull(); - case AvroPrimitive.BOOLEAN: - return AvroParser.readBoolean(stream, options); - case AvroPrimitive.INT: - return AvroParser.readInt(stream, options); - case AvroPrimitive.LONG: - return AvroParser.readLong(stream, options); - case AvroPrimitive.FLOAT: - return AvroParser.readFloat(stream, options); - case AvroPrimitive.DOUBLE: - return AvroParser.readDouble(stream, options); - case AvroPrimitive.BYTES: - return AvroParser.readBytes(stream, options); - case AvroPrimitive.STRING: - return AvroParser.readString(stream, options); - default: - throw new Error("Unknown Avro Primitive"); + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); } - } -} -class AvroEnumType extends AvroType { - constructor(symbols) { - super(); - this._symbols = symbols; - } - async read(stream, options = {}) { - const value = await AvroParser.readInt(stream, options); - return this._symbols[value]; - } -} -class AvroUnionType extends AvroType { - constructor(types) { - super(); - this._types = types; - } - async read(stream, options = {}) { - // eslint-disable-line @typescript-eslint/ban-types - const typeIndex = await AvroParser.readInt(stream, options); - return this._types[typeIndex].read(stream, options); - } -} -class AvroMapType extends AvroType { - constructor(itemType) { - super(); - this._itemType = itemType; - } - read(stream, options = {}) { - const readItemMethod = (s, opts) => { - return this._itemType.read(s, opts); - }; - return AvroParser.readMap(stream, readItemMethod, options); - } -} -class AvroRecordType extends AvroType { - constructor(fields, name) { - super(); - this._fields = fields; - this._name = name; - } - async read(stream, options = {}) { - const record = {}; - record["$schema"] = this._name; - for (const key in this._fields) { - if (Object.prototype.hasOwnProperty.call(this._fields, key)) { - record[key] = await this._fields[key].read(stream, options); - } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); } - return record; } } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function arraysEqual(a, b) { - if (a === b) - return true; - // eslint-disable-next-line eqeqeq - if (a == null || b == null) - return false; - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; ++i) { - if (a[i] !== b[i]) - return false; - } - return true; -} - -// Copyright (c) Microsoft Corporation. -class AvroReader { - constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { - this._dataStream = dataStream; - this._headerStream = headerStream || dataStream; - this._initialized = false; - this._blockOffset = currentBlockOffset || 0; - this._objectIndex = indexWithinCurrentBlock || 0; - this._initialBlockOffset = currentBlockOffset || 0; - } - get blockOffset() { - return this._blockOffset; - } - get objectIndex() { - return this._objectIndex; +// Licensed under the MIT License. +const maxBufferLength = buffer.constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +class PooledBuffer { + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; } - async initialize(options = {}) { - const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal, - }); - if (!arraysEqual(header, AVRO_INIT_BYTES)) { - throw new Error("Stream is not an Avro file."); - } - // File metadata is written as if defined by the following map schema: - // { "type": "map", "values": "bytes"} - this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal, - }); - // Validate codec - const codec = this._metadata[AVRO_CODEC_KEY]; - if (!(codec === undefined || codec === null || codec === "null")) { - throw new Error("Codecs are not supported"); - } - // The 16-byte, randomly-generated sync marker for this file. - this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal, - }); - // Parse the schema - const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); - this._itemType = AvroType.fromSchema(schema); - if (this._blockOffset === 0) { - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - } - this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal, - }); - // skip block length - await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); - this._initialized = true; - if (this._objectIndex && this._objectIndex > 0) { - for (let i = 0; i < this._objectIndex; i++) { - await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); - this._itemsRemainingInBlock--; + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); } } - hasNext() { - return !this._initialized || this._itemsRemainingInBlock > 0; - } - parseObjects(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* parseObjects_1() { - if (!this._initialized) { - yield tslib.__await(this.initialize(options)); + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; } - while (this.hasNext()) { - const result = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal, - })); - this._itemsRemainingInBlock--; - this._objectIndex++; - if (this._itemsRemainingInBlock === 0) { - const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal, - })); - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - this._objectIndex = 0; - if (!arraysEqual(this._syncMarker, marker)) { - throw new Error("Stream is not a valid Avro file."); - } - try { - this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal, - })); - } - catch (err) { - // We hit the end of the stream. - this._itemsRemainingInBlock = 0; - } - if (this._itemsRemainingInBlock > 0) { - // Ignore block size - yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); - } - } - yield yield tslib.__await(result); + if (targetOffset === target.length) { + j++; + targetOffset = 0; } - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -class AvroReadable { -} - -// Copyright (c) Microsoft Corporation. -const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); -class AvroReadableFromStream extends AvroReadable { - constructor(readable) { - super(); - this._readable = readable; - this._position = 0; - } - toUint8Array(data) { - if (typeof data === "string") { - return Buffer.from(data); - } - return data; - } - get position() { - return this._position; - } - async read(size, options = {}) { - var _a; - if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw ABORT_ERROR; - } - if (size < 0) { - throw new Error(`size parameter should be positive: ${size}`); - } - if (size === 0) { - return new Uint8Array(); - } - if (!this._readable.readable) { - throw new Error("Stream no longer readable."); } - // See if there is already enough data. - const chunk = this._readable.read(size); - if (chunk) { - this._position += chunk.length; - // chunk.length maybe less than desired size if the stream ends. - return this.toUint8Array(chunk); - } - else { - // register callback to wait for enough data to read - return new Promise((resolve, reject) => { - /* eslint-disable @typescript-eslint/no-use-before-define */ - const cleanUp = () => { - this._readable.removeListener("readable", readableCallback); - this._readable.removeListener("error", rejectCallback); - this._readable.removeListener("end", rejectCallback); - this._readable.removeListener("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.removeEventListener("abort", abortHandler); - } - }; - const readableCallback = () => { - const callbackChunk = this._readable.read(size); - if (callbackChunk) { - this._position += callbackChunk.length; - cleanUp(); - // callbackChunk.length maybe less than desired size if the stream ends. - resolve(this.toUint8Array(callbackChunk)); - } - }; - const rejectCallback = () => { - cleanUp(); - reject(); - }; - const abortHandler = () => { - cleanUp(); - reject(ABORT_ERROR); - }; - this._readable.on("readable", readableCallback); - this._readable.once("error", rejectCallback); - this._readable.once("end", rejectCallback); - this._readable.once("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.addEventListener("abort", abortHandler); - } - /* eslint-enable @typescript-eslint/no-use-before-define */ - }); + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); } } -} - -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. - */ -class BlobQuickQueryStream extends stream.Readable { /** - * Creates an instance of BlobQuickQueryStream. + * Get the readable stream assembled from all the data in the internal buffers. * - * @param source - The current ReadableStream returned from getter - * @param options - */ - constructor(source, options = {}) { - super(); - this.avroPaused = true; - this.source = source; - this.onProgress = options.onProgress; - this.onError = options.onError; - this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); - this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); - } - _read() { - if (this.avroPaused) { - this.readInternal().catch((err) => { - this.emit("error", err); - }); - } - } - async readInternal() { - this.avroPaused = false; - let avroNext; - do { - avroNext = await this.avroIter.next(); - if (avroNext.done) { - break; - } - const obj = avroNext.value; - const schema = obj.$schema; - if (typeof schema !== "string") { - throw Error("Missing schema in avro record."); - } - switch (schema) { - case "com.microsoft.azure.storage.queryBlobContents.resultData": - { - const data = obj.data; - if (data instanceof Uint8Array === false) { - throw Error("Invalid data in avro result record."); - } - if (!this.push(Buffer.from(data))) { - this.avroPaused = true; - } - } - break; - case "com.microsoft.azure.storage.queryBlobContents.progress": - { - const bytesScanned = obj.bytesScanned; - if (typeof bytesScanned !== "number") { - throw Error("Invalid bytesScanned in avro progress record."); - } - if (this.onProgress) { - this.onProgress({ loadedBytes: bytesScanned }); - } - } - break; - case "com.microsoft.azure.storage.queryBlobContents.end": - if (this.onProgress) { - const totalBytes = obj.totalBytes; - if (typeof totalBytes !== "number") { - throw Error("Invalid totalBytes in avro end record."); - } - this.onProgress({ loadedBytes: totalBytes }); - } - this.push(null); - break; - case "com.microsoft.azure.storage.queryBlobContents.error": - if (this.onError) { - const fatal = obj.fatal; - if (typeof fatal !== "boolean") { - throw Error("Invalid fatal in avro error record."); - } - const name = obj.name; - if (typeof name !== "string") { - throw Error("Invalid name in avro error record."); - } - const description = obj.description; - if (typeof description !== "string") { - throw Error("Invalid description in avro error record."); - } - const position = obj.position; - if (typeof position !== "number") { - throw Error("Invalid position in avro error record."); - } - this.onError({ - position, - name, - isFatal: fatal, - description, - }); - } - break; - default: - throw Error(`Unknown schema ${schema} in avro progress record.`); - } - } while (!avroNext.done && !this.avroPaused); + getReadableStream() { + return new BuffersStream(this.buffers, this.size); } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. * - * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will - * parse avor data returned by blob query. + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. */ -class BlobQueryResponse { - /** - * Creates an instance of BlobQueryResponse. - * - * @param originalResponse - - * @param options - - */ - constructor(originalResponse, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); - } +class BufferScheduler { /** - * Indicates that the service supports - * requests for partial file content. + * Creates an instance of BufferScheduler. * - * @readonly + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream */ - get acceptRanges() { - return this.originalResponse.acceptRanges; + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new events.EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; } /** - * Returns if it was previously specified - * for the file. + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. * - * @readonly */ - get cacheControl() { - return this.originalResponse.cacheControl; + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); + } + else if (this.unresolvedLength >= this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + }); } /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. + * Insert a new data into unresolved array. * - * @readonly + * @param data - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; } /** - * Returns the value that was specified - * for the Content-Encoding request header. + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. * - * @readonly */ - get contentEncoding() { - return this.originalResponse.contentEncoding; + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; } /** - * Returns the value that was specified - * for the Content-Language request header. + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } - /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. + * Return false when available buffers in incoming are not enough, else true. * - * @readonly + * @returns Return false when buffers in incoming are not enough, else true. */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; } /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. - * - * @readonly + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. */ - get blobType() { - return this.originalResponse.blobType; + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); } /** - * The number of bytes present in the - * response body. + * Trigger a outgoing handler for a buffer shifted from outgoing. * - * @readonly + * @param buffer - */ - get contentLength() { - return this.originalResponse.contentLength; + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } + catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); } /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. + * Return buffer used by outgoing handler into incoming. * - * @readonly + * @param buffer - */ - get contentMD5() { - return this.originalResponse.contentMD5; + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Reads a readable stream into buffer. Fill the buffer from offset to end. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream + */ +async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); + stream.on("readable", () => { + if (pos >= count) { + clearTimeout(timeout); + resolve(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + clearTimeout(timeout); + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", (msg) => { + clearTimeout(timeout); + reject(msg); + }); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. + */ +async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs__namespace.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +class BlobClient extends StorageClient { /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. - * - * @readonly + * The name of the blob. */ - get contentRange() { - return this.originalResponse.contentRange; + get name() { + return this._name; } /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' - * - * @readonly + * The name of the storage container the blob is associated with. */ - get contentType() { - return this.originalResponse.contentType; + get containerName() { + return this._containerName; } - /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. - * - * @readonly - */ - get copyCompletedOn() { - return undefined; + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = this.storageClientContext.blob; + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); } /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. * - * @readonly + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp */ - get copyId() { - return this.originalResponse.copyId; + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. * - * @readonly + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. */ - get copyProgress() { - return this.originalResponse.copyProgress; + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); } /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. + * Creates a AppendBlobClient object. * - * @readonly */ - get copySource() { - return this.originalResponse.copySource; + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); } /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' + * Creates a BlockBlobClient object. * - * @readonly */ - get copyStatus() { - return this.originalResponse.copyStatus; + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); } /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. + * Creates a PageBlobClient object. * - * @readonly */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); } /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. * - * @readonly - */ - get leaseDuration() { - return this.originalResponse.leaseDuration; - } - /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody * - * @readonly - */ - get leaseState() { - return this.originalResponse.leaseState; - } - /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob * - * @readonly - */ - get leaseStatus() { - return this.originalResponse.leaseStatus; - } - /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. * - * @readonly - */ - get date() { - return this.originalResponse.date; - } - /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. * - * @readonly - */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; - } - /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. + * Example usage (Node.js): * - * @readonly - */ - get etag() { - return this.originalResponse.etag; - } - /** - * The error code. + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); * - * @readonly - */ - get errorCode() { - return this.originalResponse.errorCode; - } - /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` * - * @readonly - */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; - } - /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. + * Example usage (browser): * - * @readonly - */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; - } - /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); * - * @readonly + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` */ - get lastModified() { - return this.originalResponse.lastModified; - } - /** - * A name-value pair - * to associate with a file storage object. - * - * @readonly - */ - get metadata() { - return this.originalResponse.metadata; + async download(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.download({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onDownloadProgress: coreUtil.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, + range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!coreUtil.isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + }); } /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. + * Returns true if the Azure blob resource represented by this client exists; false otherwise. * - * @readonly + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. */ - get requestId() { - return this.originalResponse.requestId; + async exists(options = {}) { + return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + throw e; + } + }); } /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties * - * @readonly + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. */ - get clientRequestId() { - return this.originalResponse.clientRequestId; + async getProperties(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.getProperties({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + }); } /** - * Indicates the version of the File service used - * to execute the request. + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @readonly + * @param options - Optional options to Blob Delete operation. */ - get version() { - return this.originalResponse.version; + async delete(options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.delete({ + abortSignal: options.abortSignal, + deleteSnapshots: options.deleteSnapshots, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @readonly + * @param options - Optional options to Blob Delete operation. */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; + async deleteIfExists(options = {}) { + return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.delete(updatedOptions)); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. */ - get contentCrc64() { - return this.originalResponse.contentCrc64; + async undelete(options = {}) { + return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.undelete({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * The response body as a browser Blob. - * Always undefined in node.js. + * Sets system properties on the blob. * - * @readonly + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. */ - get blobBody() { - return undefined; + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setHttpHeaders({ + abortSignal: options.abortSignal, + blobHttpHeaders: blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. + * Sets user-defined metadata for the specified blob as one or more name-value pairs. * - * It will parse avor data returned by blob query. + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata * - * @readonly + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : undefined; + async setMetadata(metadata, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * The HTTP response. + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - */ - get _response() { - return this.originalResponse._response; + async setTags(tags, options = {}) { + return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + tags: toBlobTags(tags), + })); + }); } -} - -// Copyright (c) Microsoft Corporation. -/** - * Represents the access tier on a blob. - * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} - */ -exports.BlockBlobTier = void 0; -(function (BlockBlobTier) { - /** - * Optimized for storing data that is accessed frequently. - */ - BlockBlobTier["Hot"] = "Hot"; - /** - * Optimized for storing data that is infrequently accessed and stored for at least 30 days. - */ - BlockBlobTier["Cool"] = "Cool"; - /** - * Optimized for storing data that is rarely accessed. - */ - BlockBlobTier["Cold"] = "Cold"; - /** - * Optimized for storing data that is rarely accessed and stored for at least 180 days - * with flexible latency requirements (on the order of hours). - */ - BlockBlobTier["Archive"] = "Archive"; -})(exports.BlockBlobTier || (exports.BlockBlobTier = {})); -/** - * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. - * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} - * for detailed information on the corresponding IOPS and throughput per PageBlobTier. - */ -exports.PremiumPageBlobTier = void 0; -(function (PremiumPageBlobTier) { - /** - * P4 Tier. - */ - PremiumPageBlobTier["P4"] = "P4"; - /** - * P6 Tier. - */ - PremiumPageBlobTier["P6"] = "P6"; - /** - * P10 Tier. - */ - PremiumPageBlobTier["P10"] = "P10"; - /** - * P15 Tier. - */ - PremiumPageBlobTier["P15"] = "P15"; - /** - * P20 Tier. - */ - PremiumPageBlobTier["P20"] = "P20"; /** - * P30 Tier. - */ - PremiumPageBlobTier["P30"] = "P30"; - /** - * P40 Tier. + * Gets the tags associated with the underlying blob. + * + * @param options - */ - PremiumPageBlobTier["P40"] = "P40"; + async getTags(options = {}) { + return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.blobContext.getTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + }); + } /** - * P50 Tier. + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. */ - PremiumPageBlobTier["P50"] = "P50"; + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } /** - * P60 Tier. + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. */ - PremiumPageBlobTier["P60"] = "P60"; + async createSnapshot(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.createSnapshot({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } /** - * P70 Tier. + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. */ - PremiumPageBlobTier["P70"] = "P70"; + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; + } /** - * P80 Tier. + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. */ - PremiumPageBlobTier["P80"] = "P80"; -})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); -function toAccessTier(tier) { - if (tier === undefined) { - return undefined; - } - return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). -} -function ensureCpkIfSpecified(cpk, isHttps) { - if (cpk && !isHttps) { - throw new RangeError("Customer-provided encryption key must be used over HTTPS."); - } - if (cpk && !cpk.encryptionAlgorithm) { - cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + async abortCopyFromURL(copyId, options = {}) { + return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } -} -/** - * Defines the known cloud audiences for Storage. - */ -exports.StorageBlobAudience = void 0; -(function (StorageBlobAudience) { /** - * The OAuth scope to use to retrieve an AAD token for Azure Storage. + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - */ - StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + async syncCopyFromURL(copySource, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f, _g; + return assertResponse(await this.blobContext.copyFromURL(copySource, { + abortSignal: options.abortSignal, + metadata: options.metadata, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + sourceContentMD5: options.sourceContentMD5, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn, + immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode, + legalHold: options.legalHold, + encryptionScope: options.encryptionScope, + copySourceTags: options.copySourceTags, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } /** - * The OAuth scope to use to retrieve an AAD token for Azure Disk. + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. */ - StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; -})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); -function getBlobServiceAccountAudience(storageAccountName) { - return `https://${storageAccountName}.blob.core.windows.net/.default`; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Function that converts PageRange and ClearRange to a common Range object. - * PageRange and ClearRange have start and end while Range offset and count - * this function normalizes to Range. - * @param response - Model PageBlob Range response - */ -function rangeResponseFromModel(response) { - const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start, - })); - const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start, - })); - return Object.assign(Object.assign({}, response), { pageRange, - clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { - pageRange, - clearRange, - } }) }); -} - -// Copyright (c) Microsoft Corporation. -/** - * This is the poller returned by {@link BlobClient.beginCopyFromURL}. - * This can not be instantiated directly outside of this package. - * - * @hidden - */ -class BlobBeginCopyFromUrlPoller extends coreLro.Poller { - constructor(options) { - const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; - let state; - if (resumeFrom) { - state = JSON.parse(resumeFrom).state; + async setAccessTier(tier, options = {}) { + return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTier(toAccessTier(tier), { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + rehydratePriority: options.rehydratePriority, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + var _a; + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; } - const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, - copySource, - startCopyFromURLOptions })); - super(operation); - if (typeof onProgress === "function") { - this.onProgress(onProgress); + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; } - this.intervalInMs = intervalInMs; - } - delay() { - return coreHttp.delay(this.intervalInMs); - } -} -/** - * Note: Intentionally using function expression over arrow function expression - * so that the function can be invoked with a different context. - * This affects what `this` refers to. - * @hidden - */ -const cancel = async function cancel(options = {}) { - const state = this.state; - const { copyId } = state; - if (state.isCompleted) { - return makeBlobBeginCopyFromURLPollOperation(state); - } - if (!copyId) { - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); - } - // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call - await state.blobClient.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal, - }); - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); -}; -/** - * Note: Intentionally using function expression over arrow function expression - * so that the function can be invoked with a different context. - * This affects what `this` refers to. - * @hidden - */ -const update = async function update(options = {}) { - const state = this.state; - const { blobClient, copySource, startCopyFromURLOptions } = state; - if (!state.isStarted) { - state.isStarted = true; - const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); - // copyId is needed to abort - state.copyId = result.copyId; - if (result.copyStatus === "success") { - state.result = result; - state.isCompleted = true; + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (blockSize === 0) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; } + return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + blockSize < chunkEnd) { + chunkEnd = off + blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + }); } - else if (!state.isCompleted) { + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + }); + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; try { - const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); - const { copyStatus, copyProgress } = result; - const prevCopyProgress = state.copyProgress; - if (copyProgress) { - state.copyProgress = copyProgress; + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = new URL(this.url); + if (parsedUrl.host.split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; } - if (copyStatus === "pending" && - copyProgress !== prevCopyProgress && - typeof options.fireProgress === "function") { - // trigger in setTimeout, or swallow error? - options.fireProgress(state); + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; } - else if (copyStatus === "success") { - state.result = result; - state.isCompleted = true; + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; } - else if (copyStatus === "failed") { - state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); - state.isCompleted = true; + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); } + return { blobName, containerName }; } - catch (err) { - state.error = err; - state.isCompleted = true; - } - } - return makeBlobBeginCopyFromURLPollOperation(state); -}; -/** - * Note: Intentionally using function expression over arrow function expression - * so that the function can be invoked with a different context. - * This affects what `this` refers to. - * @hidden - */ -const toString = function toString() { - return JSON.stringify({ state: this.state }, (key, value) => { - // remove blobClient from serialized state since a client can't be hydrated from this info. - if (key === "blobClient") { - return undefined; + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); } - return value; - }); -}; -/** - * Creates a poll operation given the provided state. - * @hidden - */ -function makeBlobBeginCopyFromURLPollOperation(state) { - return { - state: Object.assign({}, state), - cancel, - toString, - update, - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Generate a range string. For example: - * - * "bytes=255-" or "bytes=0-511" - * - * @param iRange - - */ -function rangeToString(iRange) { - if (iRange.offset < 0) { - throw new RangeError(`Range.offset cannot be smaller than 0.`); - } - if (iRange.count && iRange.count <= 0) { - throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); } - return iRange.count - ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` - : `bytes=${iRange.offset}-`; -} - -// Copyright (c) Microsoft Corporation. -/** - * States for Batch. - */ -var BatchStates; -(function (BatchStates) { - BatchStates[BatchStates["Good"] = 0] = "Good"; - BatchStates[BatchStates["Error"] = 1] = "Error"; -})(BatchStates || (BatchStates = {})); -/** - * Batch provides basic parallel execution with concurrency limits. - * Will stop execute left operations when one of the executed operation throws an error. - * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. - */ -class Batch { /** - * Creates an instance of Batch. - * @param concurrency - + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. */ - constructor(concurrency = 5) { - /** - * Number of active operations under execution. - */ - this.actives = 0; - /** - * Number of completed operations under execution. - */ - this.completed = 0; - /** - * Offset of next operation to be executed. - */ - this.offset = 0; - /** - * Operation array to be executed. - */ - this.operations = []; - /** - * States of Batch. When an error happens, state will turn into error. - * Batch will stop execute left operations. - */ - this.state = BatchStates.Good; - if (concurrency < 1) { - throw new RangeError("concurrency must be larger than 0"); - } - this.concurrency = concurrency; - this.emitter = new events.EventEmitter(); + async startCopyFromURL(copySource, options = {}) { + return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return assertResponse(await this.blobContext.startCopyFromURL(copySource, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + rehydratePriority: options.rehydratePriority, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + sealBlob: options.sealBlob, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Add a operation into queue. + * Only available for BlobClient constructed with a shared key credential. * - * @param operation - + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - addOperation(operation) { - this.operations.push(async () => { - try { - this.actives++; - await operation(); - this.actives--; - this.completed++; - this.parallelExecute(); - } - catch (error) { - this.emitter.emit("error", error); + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); }); } /** - * Start execute operations in the queue. + * Only available for BlobClient constructed with a shared key credential. * + * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - async do() { - if (this.operations.length === 0) { - return Promise.resolve(); + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + generateSasStringToSign(options) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - this.parallelExecute(); - return new Promise((resolve, reject) => { - this.emitter.on("finish", resolve); - this.emitter.on("error", (error) => { - this.state = BatchStates.Error; - reject(error); - }); - }); + return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).stringToSign; } /** - * Get next operation to be executed. Return null when reaching ends. + * Delete the immutablility policy on the blob. * + * @param options - Optional options to delete immutability policy on the blob. */ - nextOperation() { - if (this.offset < this.operations.length) { - return this.operations[this.offset++]; - } - return null; + async deleteImmutabilityPolicy(options = {}) { + return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.deleteImmutabilityPolicy({ + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Start execute operations. One one the most important difference between - * this method with do() is that do() wraps as an sync method. + * Set immutability policy on the blob. * + * @param options - Optional options to set immutability policy on the blob. */ - parallelExecute() { - if (this.state === BatchStates.Error) { - return; - } - if (this.completed >= this.operations.length) { - this.emitter.emit("finish"); - return; - } - while (this.actives < this.concurrency) { - const operation = this.nextOperation(); - if (operation) { - operation(); - } - else { - return; - } - } + async setImmutabilityPolicy(immutabilityPolicy, options = {}) { + return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setImmutabilityPolicy({ + immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, + immutabilityPolicyMode: immutabilityPolicy.policyMode, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } -} - -// Copyright (c) Microsoft Corporation. -/** - * This class generates a readable stream from the data in an array of buffers. - */ -class BuffersStream extends stream.Readable { /** - * Creates an instance of BuffersStream that will emit the data - * contained in the array of buffers. + * Set legal hold on the blob. * - * @param buffers - Array of buffers containing the data - * @param byteLength - The total length of data contained in the buffers + * @param options - Optional options to set legal hold on the blob. */ - constructor(buffers, byteLength, options) { - super(options); - this.buffers = buffers; - this.byteLength = byteLength; - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex = 0; - this.pushedBytesLength = 0; - // check byteLength is no larger than buffers[] total length - let buffersLength = 0; - for (const buf of this.buffers) { - buffersLength += buf.byteLength; - } - if (buffersLength < this.byteLength) { - throw new Error("Data size shouldn't be larger than the total length of buffers."); - } + async setLegalHold(legalHoldEnabled, options = {}) { + return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, { + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Internal _read() that will be called when the stream wants to pull more data in. + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information * - * @param size - Optional. The size of data to be read + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. */ - _read(size) { - if (this.pushedBytesLength >= this.byteLength) { - this.push(null); - } - if (!size) { - size = this.readableHighWaterMark; - } - const outBuffers = []; - let i = 0; - while (i < size && this.pushedBytesLength < this.byteLength) { - // The last buffer may be longer than the data it contains. - const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; - const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; - const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); - if (remaining > size - i) { - // chunkSize = size - i - const end = this.byteOffsetInCurrentBuffer + size - i; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - this.pushedBytesLength += size - i; - this.byteOffsetInCurrentBuffer = end; - i = size; - break; - } - else { - // chunkSize = remaining - const end = this.byteOffsetInCurrentBuffer + remaining; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - if (remaining === remainingCapacityInThisBuffer) { - // this.buffers[this.bufferIndex] used up, shift to next one - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex++; - } - else { - this.byteOffsetInCurrentBuffer = end; - } - this.pushedBytesLength += remaining; - i += remaining; - } - } - if (outBuffers.length > 1) { - this.push(Buffer.concat(outBuffers)); - } - else if (outBuffers.length === 1) { - this.push(outBuffers[0]); - } + async getAccountInfo(options = {}) { + return tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } } - -// Copyright (c) Microsoft Corporation. /** - * maxBufferLength is max size of each buffer in the pooled buffers. + * AppendBlobClient defines a set of operations applicable to append blobs. */ -// Can't use import as Typescript doesn't recognize "buffer". -const maxBufferLength = (__nccwpck_require__(14300).constants.MAX_LENGTH); -/** - * This class provides a buffer container which conceptually has no hard size limit. - * It accepts a capacity, an array of input buffers and the total length of input data. - * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers - * into the internal "buffer" serially with respect to the total length. - * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream - * assembled from all the data in the internal "buffer". - */ -class PooledBuffer { - constructor(capacity, buffers, totalLength) { - /** - * Internal buffers used to keep the data. - * Each buffer has a length of the maxBufferLength except last one. - */ - this.buffers = []; - this.capacity = capacity; - this._size = 0; - // allocate - const bufferNum = Math.ceil(capacity / maxBufferLength); - for (let i = 0; i < bufferNum; i++) { - let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; - if (len === 0) { - len = maxBufferLength; - } - this.buffers.push(Buffer.allocUnsafe(len)); +class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; } - if (buffers) { - this.fill(buffers, totalLength); + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } - } - /** - * The size of the data contained in the pooled buffers. - */ - get size() { - return this._size; - } - /** - * Fill the internal buffers with data in the input buffers serially - * with respect to the total length and the total capacity of the internal buffers. - * Data copied will be shift out of the input buffers. - * - * @param buffers - Input buffers containing the data to be filled in the pooled buffer - * @param totalLength - Total length of the data to be filled in. - * - */ - fill(buffers, totalLength) { - this._size = Math.min(this.capacity, totalLength); - let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; - while (totalCopiedNum < this._size) { - const source = buffers[i]; - const target = this.buffers[j]; - const copiedNum = source.copy(target, targetOffset, sourceOffset); - totalCopiedNum += copiedNum; - sourceOffset += copiedNum; - targetOffset += copiedNum; - if (sourceOffset === source.length) { - i++; - sourceOffset = 0; + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } - if (targetOffset === target.length) { - j++; - targetOffset = 0; + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } - // clear copied from source buffers - buffers.splice(0, i); - if (buffers.length > 0) { - buffers[0] = buffers[0].slice(sourceOffset); + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } + super(url, pipeline); + this.appendBlobContext = this.storageClientContext.appendBlob; } /** - * Get the readable stream assembled from all the data in the internal buffers. + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ - getReadableStream() { - return new BuffersStream(this.buffers, this.size); + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } -} - -// Copyright (c) Microsoft Corporation. -/** - * This class accepts a Node.js Readable stream as input, and keeps reading data - * from the stream into the internal buffer structure, until it reaches maxBuffers. - * Every available buffer will try to trigger outgoingHandler. - * - * The internal buffer structure includes an incoming buffer array, and a outgoing - * buffer array. The incoming buffer array includes the "empty" buffers can be filled - * with new incoming data. The outgoing array includes the filled buffers to be - * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. - * - * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING - * - * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers - * - * PERFORMANCE IMPROVEMENT TIPS: - * 1. Input stream highWaterMark is better to set a same value with bufferSize - * parameter, which will avoid Buffer.concat() operations. - * 2. concurrency should set a smaller value than maxBuffers, which is helpful to - * reduce the possibility when a outgoing handler waits for the stream data. - * in this situation, outgoing handlers are blocked. - * Outgoing queue shouldn't be empty. - */ -class BufferScheduler { /** - * Creates an instance of BufferScheduler. + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * - * @param readable - A Node.js Readable stream - * @param bufferSize - Buffer size of every maintained buffer - * @param maxBuffers - How many buffers can be allocated - * @param outgoingHandler - An async function scheduled to be - * triggered when a buffer fully filled - * with stream data - * @param concurrency - Concurrency of executing outgoingHandlers (>0) - * @param encoding - [Optional] Encoding of Readable stream when it's a string stream - */ - constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { - /** - * An internal event emitter. - */ - this.emitter = new events.EventEmitter(); - /** - * An internal offset marker to track data offset in bytes of next outgoingHandler. - */ - this.offset = 0; - /** - * An internal marker to track whether stream is end. - */ - this.isStreamEnd = false; - /** - * An internal marker to track whether stream or outgoingHandler returns error. - */ - this.isError = false; - /** - * How many handlers are executing. - */ - this.executingOutgoingHandlers = 0; - /** - * How many buffers have been allocated. - */ - this.numBuffers = 0; - /** - * Because this class doesn't know how much data every time stream pops, which - * is defined by highWaterMarker of the stream. So BufferScheduler will cache - * data received from the stream, when data in unresolvedDataArray exceeds the - * blockSize defined, it will try to concat a blockSize of buffer, fill into available - * buffers from incoming and push to outgoing array. - */ - this.unresolvedDataArray = []; - /** - * How much data consisted in unresolvedDataArray. - */ - this.unresolvedLength = 0; - /** - * The array includes all the available buffers can be used to fill data from stream. - */ - this.incoming = []; - /** - * The array (queue) includes all the buffers filled from stream data. - */ - this.outgoing = []; - if (bufferSize <= 0) { - throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); - } - if (maxBuffers <= 0) { - throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); - } - if (concurrency <= 0) { - throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); - } - this.bufferSize = bufferSize; - this.maxBuffers = maxBuffers; - this.readable = readable; - this.outgoingHandler = outgoingHandler; - this.concurrency = concurrency; - this.encoding = encoding; - } - /** - * Start the scheduler, will return error when stream of any of the outgoingHandlers - * returns error. + * @param options - Options to the Append Block Create operation. * - */ - async do() { - return new Promise((resolve, reject) => { - this.readable.on("data", (data) => { - data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; - this.appendUnresolvedData(data); - if (!this.resolveData()) { - this.readable.pause(); - } - }); - this.readable.on("error", (err) => { - this.emitter.emit("error", err); - }); - this.readable.on("end", () => { - this.isStreamEnd = true; - this.emitter.emit("checkEnd"); - }); - this.emitter.on("error", (err) => { - this.isError = true; - this.readable.pause(); - reject(err); - }); - this.emitter.on("checkEnd", () => { - if (this.outgoing.length > 0) { - this.triggerOutgoingHandlers(); - return; - } - if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { - if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { - const buffer = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) - .then(resolve) - .catch(reject); - } - else if (this.unresolvedLength >= this.bufferSize) { - return; - } - else { - resolve(); - } - } - }); - }); - } - /** - * Insert a new data into unresolved array. * - * @param data - - */ - appendUnresolvedData(data) { - this.unresolvedDataArray.push(data); - this.unresolvedLength += data.length; - } - /** - * Try to shift a buffer with size in blockSize. The buffer returned may be less - * than blockSize when data in unresolvedDataArray is less than bufferSize. + * Example usage: * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` */ - shiftBufferFromUnresolvedDataArray(buffer) { - if (!buffer) { - buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); - } - else { - buffer.fill(this.unresolvedDataArray, this.unresolvedLength); - } - this.unresolvedLength -= buffer.size; - return buffer; + async create(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.appendBlobContext.create(0, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Resolve data in unresolvedDataArray. For every buffer with size in blockSize - * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, - * then push it into outgoing to be handled by outgoing handler. - * - * Return false when available buffers in incoming are not enough, else true. + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * - * @returns Return false when buffers in incoming are not enough, else true. + * @param options - */ - resolveData() { - while (this.unresolvedLength >= this.bufferSize) { - let buffer; - if (this.incoming.length > 0) { - buffer = this.incoming.shift(); - this.shiftBufferFromUnresolvedDataArray(buffer); + async createIfNotExists(options = {}) { + const conditions = { ifNoneMatch: ETagAny }; + return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - else { - if (this.numBuffers < this.maxBuffers) { - buffer = this.shiftBufferFromUnresolvedDataArray(); - this.numBuffers++; - } - else { - // No available buffer, wait for buffer returned - return false; + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } + throw e; } - this.outgoing.push(buffer); - this.triggerOutgoingHandlers(); - } - return true; + }); } /** - * Try to trigger a outgoing handler for every buffer in outgoing. Stop when - * concurrency reaches. + * Seals the append blob, making it read only. + * + * @param options - */ - async triggerOutgoingHandlers() { - let buffer; - do { - if (this.executingOutgoingHandlers >= this.concurrency) { - return; - } - buffer = this.outgoing.shift(); - if (buffer) { - this.triggerOutgoingHandler(buffer); - } - } while (buffer); + async seal(options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.seal({ + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Trigger a outgoing handler for a buffer shifted from outgoing. + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block * - * @param buffer - + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` */ - async triggerOutgoingHandler(buffer) { - const bufferLength = buffer.size; - this.executingOutgoingHandlers++; - this.offset += bufferLength; - try { - await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); - } - catch (err) { - this.emitter.emit("error", err); - return; - } - this.executingOutgoingHandlers--; - this.reuseBuffer(buffer); - this.emitter.emit("checkEnd"); + async appendBlock(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.appendBlock(contentLength, body, { + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Return buffer used by outgoing handler into incoming. + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url * - * @param buffer - + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - */ - reuseBuffer(buffer) { - this.incoming.push(buffer); - if (!this.isError && this.resolveData() && !this.isStreamEnd) { - this.readable.resume(); - } - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Reads a readable stream into buffer. Fill the buffer from offset to end. - * - * @param stream - A Node.js Readable stream - * @param buffer - Buffer to be filled, length must greater than or equal to offset - * @param offset - From which position in the buffer to be filled, inclusive - * @param end - To which position in the buffer to be filled, exclusive - * @param encoding - Encoding of the Readable stream - */ -async function streamToBuffer(stream, buffer, offset, end, encoding) { - let pos = 0; // Position in stream - const count = end - offset; // Total amount of data needed in stream - return new Promise((resolve, reject) => { - const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); - stream.on("readable", () => { - if (pos >= count) { - clearTimeout(timeout); - resolve(); - return; - } - let chunk = stream.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - // How much data needed in this chunk - const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; - buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); - pos += chunkLength; - }); - stream.on("end", () => { - clearTimeout(timeout); - if (pos < count) { - reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); - } - resolve(); - }); - stream.on("error", (msg) => { - clearTimeout(timeout); - reject(msg); - }); - }); -} -/** - * Reads a readable stream into buffer entirely. - * - * @param stream - A Node.js Readable stream - * @param buffer - Buffer to be filled, length must greater than or equal to offset - * @param encoding - Encoding of the Readable stream - * @returns with the count of bytes read. - * @throws `RangeError` If buffer size is not big enough. - */ -async function streamToBuffer2(stream, buffer, encoding) { - let pos = 0; // Position in stream - const bufferSize = buffer.length; - return new Promise((resolve, reject) => { - stream.on("readable", () => { - let chunk = stream.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - if (pos + chunk.length > bufferSize) { - reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); - return; - } - buffer.fill(chunk, pos, pos + chunk.length); - pos += chunk.length; - }); - stream.on("end", () => { - resolve(pos); - }); - stream.on("error", reject); - }); -} -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. - * - * @param rs - The read stream. - * @param file - Destination file path. - */ -async function readStreamToLocalFile(rs, file) { - return new Promise((resolve, reject) => { - const ws = fs__namespace.createWriteStream(file); - rs.on("error", (err) => { - reject(err); - }); - ws.on("error", (err) => { - reject(err); + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + abortSignal: options.abortSignal, + sourceRange: rangeToString({ offset: sourceOffset, count }), + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + appendPositionAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); }); - ws.on("close", resolve); - rs.pipe(ws); - }); + } } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Promisified version of fs.stat(). - */ -const fsStat = util__namespace.promisify(fs__namespace.stat); -const fsCreateReadStream = fs__namespace.createReadStream; - -/** - * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, - * append blob, or page blob. + * BlockBlobClient defines a set of operations applicable to block blobs. */ -class BlobClient extends StorageClient { +class BlockBlobClient extends BlobClient { constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { - options = options || {}; + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); let pipeline; let url; + options = options || {}; if (isPipelineLike(credentialOrPipelineOrContainerName)) { // (url: string, pipeline: Pipeline) url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; options = blobNameOrOptions; @@ -95785,11 +89108,11 @@ class BlobClient extends StorageClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } @@ -95812,988 +89135,516 @@ class BlobClient extends StorageClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - ({ blobName: this._name, containerName: this._containerName } = - this.getBlobAndContainerNamesFromUrl()); - this.blobContext = new Blob$1(this.storageClientContext); - this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); - this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); - } - /** - * The name of the blob. - */ - get name() { - return this._name; - } - /** - * The name of the storage container the blob is associated with. - */ - get containerName() { - return this._containerName; + this.blockBlobContext = this.storageClientContext.blockBlob; + this._blobContext = this.storageClientContext.blob; } /** - * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. * * @param snapshot - The snapshot timestamp. - * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ withSnapshot(snapshot) { - return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } /** - * Creates a new BlobClient object pointing to a version of this blob. - * Provide "" will remove the versionId and return a Client to the base blob. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param versionId - The versionId. - * @returns A new BlobClient object pointing to the version of this blob. + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - */ - withVersion(versionId) { - return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + async query(query, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + if (!coreUtil.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._blobContext.query({ + abortSignal: options.abortSignal, + queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + }); } /** - * Creates a AppendBlobClient object. + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` */ - getAppendBlobClient() { - return new AppendBlobClient(this.url, this.pipeline); + async upload(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.upload(contentLength, body, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Creates a BlockBlobClient object. + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. */ - getBlockBlobClient() { - return new BlockBlobClient(this.url, this.pipeline); + async syncUploadFromURL(sourceURL, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f; + return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions }))); + }); } /** - * Creates a PageBlobClient object. + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. */ - getPageBlobClient() { - return new PageBlobClient(this.url, this.pipeline); + async stageBlock(blockId, body, contentLength, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + requestOptions: { + onUploadProgress: options.onProgress, + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Reads or downloads a blob from the system, including its metadata and properties. - * You can also call Get Blob to read a snapshot. - * - * * In Node.js, data returns in a Readable stream readableStreamBody - * * In browsers, data returns in a promise blobBody - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param offset - From which position of the blob to download, greater than or equal to 0 * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Optional options to Blob Download operation. - * - * - * Example usage (Node.js): - * - * ```js - * // Download and convert a blob to a string - * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); - * console.log("Downloaded blob content:", downloaded.toString()); - * - * async function streamToBuffer(readableStream) { - * return new Promise((resolve, reject) => { - * const chunks = []; - * readableStream.on("data", (data) => { - * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); - * }); - * readableStream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * readableStream.on("error", reject); - * }); - * } - * ``` - * - * Example usage (browser): - * - * ```js - * // Download and convert a blob to a string - * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); - * console.log( - * "Downloaded blob content", - * downloaded - * ); + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list * - * async function blobToString(blob: Blob): Promise { - * const fileReader = new FileReader(); - * return new Promise((resolve, reject) => { - * fileReader.onloadend = (ev: any) => { - * resolve(ev.target!.result); - * }; - * fileReader.onerror = reject; - * fileReader.readAsText(blob); - * }); - * } - * ``` + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. */ - async download(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; + async commitBlockList(blocks, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlobClient-download", options); - try { - const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream - }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - // Return browser response immediately - if (!coreHttp.isNode) { - return wrappedRes; - } - // We support retrying when download stream unexpected ends in Node.js runtime - // Following code shouldn't be bundled into browser build, however some - // bundlers may try to bundle following code and "FileReadResponse.ts". - // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" - // The config is in package.json "browser" field - if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { - // TODO: Default value or make it a required parameter? - options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; - } - if (res.contentLength === undefined) { - throw new RangeError(`File download response doesn't contain valid content length header`); - } - if (!res.etag) { - throw new RangeError(`File download response doesn't contain valid etag header`); - } - return new BlobDownloadResponse(wrappedRes, async (start) => { - var _a; - const updatedDownloadOptions = { - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ifMatch: options.conditions.ifMatch || res.etag, - ifModifiedSince: options.conditions.ifModifiedSince, - ifNoneMatch: options.conditions.ifNoneMatch, - ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, - }, - range: rangeToString({ - count: offset + res.contentLength - start, - offset: start, - }), - rangeGetContentMD5: options.rangeGetContentMD5, - rangeGetContentCRC64: options.rangeGetContentCrc64, - snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey, - }; - // Debug purpose only - // console.log( - // `Read from internal stream, range: ${ - // updatedOptions.range - // }, options: ${JSON.stringify(updatedOptions)}` - // ); - return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; - }, offset, res.contentLength, { - maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress, - }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks }, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Returns true if the Azure blob resource represented by this client exists; false otherwise. - * - * NOTE: use this function with care since an existing blob might be deleted by other clients or - * applications. Vice versa new blobs might be added by other clients or applications after this - * function completes. + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list * - * @param options - options to Exists operation. + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-exists", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - await this.getProperties({ + async getBlockList(listType, options = {}) { + return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blockBlobContext.getBlockList(listType, { abortSignal: options.abortSignal, - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - // Expected exception when checking blob existence - return false; + })); + if (!res.committedBlocks) { + res.committedBlocks = []; } - else if (e.statusCode === 409 && - (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || - e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { - // Expected exception when checking blob existence - return true; + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return res; + }); } + // High level functions /** - * Returns all user-defined metadata, standard HTTP properties, and system properties - * for the blob. It does not return the content of the blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which - * will retain their original casing. + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. * - * @param options - Optional options to Get Properties operation. + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - */ - async getProperties(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); - try { - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async uploadData(data, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { + if (coreUtil.isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + }); } /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * ONLY AVAILABLE IN BROWSERS. * - * @param options - Optional options to Blob Delete operation. + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. */ - async delete(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-delete", options); - options.conditions = options.conditions || {}; - try { - return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async uploadBrowserData(browserData, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { + const browserBlob = new Blob([browserData]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + }); } /** - * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @param options - Optional options to Blob Delete operation. + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - async deleteIfExists(options = {}) { + async uploadSeekableInternal(bodyFactory, size, options = {}) { var _a, _b; - const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + const maxSingleShotSize = (_b = options.maxSingleShotSize) !== null && _b !== void 0 ? _b : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); } - finally { - span.end(); - } - } - /** - * Restores the contents and metadata of soft deleted blob and any associated - * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 - * or later. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob - * - * @param options - Optional options to Blob Undelete operation. - */ - async undelete(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-undelete", options); - try { - return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Sets system properties on the blob. - * - * If no value provided, or no value provided for the specified blob HTTP headers, - * these blob HTTP headers without a value will be cleared. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties - * - * @param blobHTTPHeaders - If no value provided, or no value provided for - * the specified blob HTTP headers, these blob HTTP - * headers without a value will be cleared. - * A common header to set is `blobContentType` - * enabling the browser to provide functionality - * based on file type. - * @param options - Optional options to Blob Set HTTP Headers operation. - */ - async setHTTPHeaders(blobHTTPHeaders, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Sets user-defined metadata for the specified blob as one or more name-value pairs. - * - * If no option provided, or no metadata defined in the parameter, the blob - * metadata will be removed. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata - * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Optional options to Set Metadata operation. - */ - async setMetadata(metadata, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Sets tags on the underlying blob. - * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. - * Valid tag key and value characters include lower and upper case letters, digits (0-9), - * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). - * - * @param tags - - * @param options - - */ - async setTags(tags, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setTags", options); - try { - return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Gets the tags associated with the underlying blob. - * - * @param options - - */ - async getTags(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getTags", options); - try { - const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Get a {@link BlobLeaseClient} that manages leases on the blob. - * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the blob. - */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); - } - /** - * Creates a read-only snapshot of a blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob - * - * @param options - Optional options to the Blob Create Snapshot operation. - */ - async createSnapshot(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Asynchronously copies a blob to a destination within the storage account. - * This method returns a long running operation poller that allows you to wait - * indefinitely until the copy is completed. - * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. - * Note that the onProgress callback will not be invoked if the operation completes in the first - * request, and attempting to cancel a completed copy will result in an error being thrown. - * - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob - * - * Example using automatic polling: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * const result = await copyPoller.pollUntilDone(); - * ``` - * - * Example using manual polling: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * while (!poller.isDone()) { - * await poller.poll(); - * } - * const result = copyPoller.getResult(); - * ``` - * - * Example using progress updates: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { - * onProgress(state) { - * console.log(`Progress: ${state.copyProgress}`); - * } - * }); - * const result = await copyPoller.pollUntilDone(); - * ``` - * - * Example using a changing polling interval (default 15 seconds): - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { - * intervalInMs: 1000 // poll blob every 1 second for copy progress - * }); - * const result = await copyPoller.pollUntilDone(); - * ``` - * - * Example using copy cancellation: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * // cancel operation after starting it. - * try { - * await copyPoller.cancelOperation(); - * // calls to get the result now throw PollerCancelledError - * await copyPoller.getResult(); - * } catch (err) { - * if (err.name === 'PollerCancelledError') { - * console.log('The copy was cancelled.'); - * } - * } - * ``` - * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. - */ - async beginCopyFromURL(copySource, options = {}) { - const client = { - abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), - getProperties: (...args) => this.getProperties(...args), - startCopyFromURL: (...args) => this.startCopyFromURL(...args), - }; - const poller = new BlobBeginCopyFromUrlPoller({ - blobClient: client, - copySource, - intervalInMs: options.intervalInMs, - onProgress: options.onProgress, - resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options, - }); - // Trigger the startCopyFromURL call by calling poll. - // Any errors from this method should be surfaced to the user. - await poller.poll(); - return poller; - } - /** - * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero - * length and full metadata. Version 2012-02-12 and newer. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob - * - * @param copyId - Id of the Copy From URL operation. - * @param options - Optional options to the Blob Abort Copy From URL operation. - */ - async abortCopyFromURL(copyId, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); - try { - return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not - * return a response until the copy is complete. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url - * - * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication - * @param options - - */ - async syncCopyFromURL(copySource, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant - * storage only). A premium page blob's tier determines the allowed size, IOPS, - * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive - * storage type. This operation does not update the blob's ETag. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier - * - * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. - * @param options - Optional options to the Blob Set Tier operation. - */ - async setAccessTier(tier, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); - try { - return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - async downloadToBuffer(param1, param2, param3, param4 = {}) { - let buffer; - let offset = 0; - let count = 0; - let options = param4; - if (param1 instanceof Buffer) { - buffer = param1; - offset = param2 || 0; - count = typeof param3 === "number" ? param3 : 0; - } - else { - offset = typeof param1 === "number" ? param1 : 0; - count = typeof param2 === "number" ? param2 : 0; - options = param3 || {}; - } - const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); - try { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0) { - throw new RangeError("blockSize option must be >= 0"); - } - if (options.blockSize === 0) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - if (offset < 0) { - throw new RangeError("offset option must be >= 0"); - } - if (count && count <= 0) { - throw new RangeError("count option must be greater than 0"); - } - if (!options.conditions) { - options.conditions = {}; + if (blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); } - // Customer doesn't specify length, get it - if (!count) { - const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - count = response.contentLength - offset; - if (count < 0) { - throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + if (size > maxSingleShotSize) { + blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } } - // Allocate the buffer of size = count if the buffer is not provided - if (!buffer) { - try { - buffer = Buffer.alloc(count); - } - catch (error) { - throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); - } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { + if (size <= maxSingleShotSize) { + return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions)); } - if (buffer.length < count) { - throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + const numBlocks = Math.floor((size - 1) / blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); } + const blockList = []; + const blockIDPrefix = coreUtil.randomUUID(); let transferProgress = 0; const batch = new Batch(options.concurrency); - for (let off = offset; off < offset + count; off = off + options.blockSize) { + for (let i = 0; i < numBlocks; i++) { batch.addOperation(async () => { - // Exclusive chunk end position - let chunkEnd = offset + count; - if (off + options.blockSize < chunkEnd) { - chunkEnd = off + options.blockSize; - } - const response = await this.download(off, chunkEnd - off, { + const blockID = generateBlockID(blockIDPrefix, i); + const start = blockSize * i; + const end = i === numBlocks - 1 ? size : start + blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { abortSignal: options.abortSignal, conditions: options.conditions, - maxRetryRequests: options.maxRetryRequestsPerBlock, - customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, }); - const stream = response.readableStreamBody; - await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); - // Update progress after block is downloaded, in case of block trying - // Could provide finer grained progress updating inside HTTP requests, - // only if convenience layer download try is enabled - transferProgress += chunkEnd - off; + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); + options.onProgress({ + loadedBytes: transferProgress, + }); } }); } await batch.do(); - return buffer; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return this.commitBlockList(blockList, updatedOptions); + }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * Downloads an Azure Blob to a local file. - * Fails if the the given file path already exits. - * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. - * - * @param filePath - - * @param offset - From which position of the block blob to download. - * @param count - How much data to be downloaded. Will download to the end when passing undefined. - * @param options - Options to Blob download options. - * @returns The response data for blob download operation, - * but with readableStreamBody set to undefined since its - * content is already read and written into a local file - * at the specified path. - */ - async downloadToFile(filePath, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); - try { - const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - if (response.readableStreamBody) { - await readStreamToLocalFile(response.readableStreamBody, filePath); - } - // The stream is no longer accessible so setting it to undefined. - response.blobDownloadStream = undefined; - return response; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - getBlobAndContainerNamesFromUrl() { - let containerName; - let blobName; - try { - // URL may look like the following - // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; - // "https://myaccount.blob.core.windows.net/mycontainer/blob"; - // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; - // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; - // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` - // http://localhost:10001/devstoreaccount1/containername/blob - const parsedUrl = coreHttp.URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { - // "https://myaccount.blob.core.windows.net/containername/blob". - // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } - else if (isIpEndpointStyle(parsedUrl)) { - // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob - // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob - // .getPath() -> /devstoreaccount1/containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); - containerName = pathComponents[2]; - blobName = pathComponents[4]; - } - else { - // "https://customdomain.com/containername/blob". - // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } - // decode the encoded blobName, containerName - to get all the special characters that might be present in them - containerName = decodeURIComponent(containerName); - blobName = decodeURIComponent(blobName); - // Azure Storage Server will replace "\" with "/" in the blob names - // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName - blobName = blobName.replace(/\\/g, "/"); - if (!containerName) { - throw new Error("Provided containerName is invalid."); - } - return { blobName, containerName }; - } - catch (error) { - throw new Error("Unable to extract blobName and containerName with provided information."); - } - } - /** - * Asynchronously copies a blob to a destination within the storage account. - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob - * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. - */ - async startCopyFromURL(copySource, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions, - }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Only available for BlobClient constructed with a shared key credential. - * - * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. + * Uploads a local file in blocks to a block blob. * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + async uploadFile(filePath, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { + const size = (await fsStat(filePath)).size; + return this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Delete the immutablility policy on the blob. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param options - Optional options to delete immutability policy on the blob. - */ - async deleteImmutabilityPolicy(options) { - const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); - try { - return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Set immutablility policy on the blob. + * Uploads a Node.js Readable stream into block blob. * - * @param options - Optional options to set immutability policy on the blob. - */ - async setImmutabilityPolicy(immutabilityPolicy, options) { - const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); - try { - return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Set legal hold on the blob. + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. * - * @param options - Optional options to set legal hold on the blob. + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - async setLegalHold(legalHoldEnabled, options) { - const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); - try { - return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; } - finally { - span.end(); + if (!options.conditions) { + options.conditions = {}; } + return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { + let blockNum = 0; + const blockIDPrefix = coreUtil.randomUUID(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }))); + }); } } /** - * AppendBlobClient defines a set of operations applicable to append blobs. + * PageBlobClient defines a set of operations applicable to page blobs. */ -class AppendBlobClient extends BlobClient { +class PageBlobClient extends BlobClient { constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ @@ -96808,10 +89659,10 @@ class AppendBlobClient extends BlobClient { url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); @@ -96819,8 +89670,8 @@ class AppendBlobClient extends BlobClient { else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - url = urlOrConnectionString; // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && @@ -96832,11 +89683,11 @@ class AppendBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } @@ -96859,1214 +89710,291 @@ class AppendBlobClient extends BlobClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - this.appendBlobContext = new AppendBlob(this.storageClientContext); + this.pageBlobContext = this.storageClientContext.pageBlob; } /** - * Creates a new AppendBlobClient object identical to the source but with the + * Creates a new PageBlobClient object identical to the source but with the * specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. * * @param snapshot - The snapshot timestamp. - * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ withSnapshot(snapshot) { - return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * - * @param options - Options to the Append Block Create operation. - * - * - * Example usage: - * - * ```js - * const appendBlobClient = containerClient.getAppendBlobClient(""); - * await appendBlobClient.create(); - * ``` + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. */ - async create(options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); + async create(size, options = {}) { options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.pageBlobContext.create(0, size, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + blobSequenceNumber: options.blobSequenceNumber, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * + * @param size - size of the page blob. * @param options - */ - async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); - const conditions = { ifNoneMatch: ETagAny }; - try { - const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + async createIfNotExists(size, options = {}) { + return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** - * Seals the append blob, making it read only. + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page * - * @param options - + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. */ - async seal(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); + async uploadPages(body, offset, count, options = {}) { options.conditions = options.conditions || {}; - try { - return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.uploadPages(count, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Commits a new block of data to the end of the existing append blob. - * @see https://docs.microsoft.com/rest/api/storageservices/append-block - * - * @param body - Data to be appended. - * @param contentLength - Length of the body in bytes. - * @param options - Options to the Append Block operation. - * - * - * Example usage: - * - * ```js - * const content = "Hello World!"; + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url * - * // Create a new append blob and append data to the blob. - * const newAppendBlobClient = containerClient.getAppendBlobClient(""); - * await newAppendBlobClient.create(); - * await newAppendBlobClient.appendBlock(content, content.length); + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), { + abortSignal: options.abortSignal, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + sequenceNumberAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page * - * // Append data to an existing append blob. - * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); - * await existingAppendBlobClient.appendBlock(content, content.length); - * ``` + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. */ - async appendBlock(body, contentLength, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); + async clearPages(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress, - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.clearPages(0, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * The Append Block operation commits a new block of data to the end of an existing append blob - * where the contents are read from a source url. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param sourceURL - - * The url to the blob that will be the source of the copy. A source blob in the same storage account can - * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob - * must either be public or must be authenticated via a shared access signature. If the source blob is - * public, no authentication is required to perform the operation. - * @param sourceOffset - Offset in source to be appended - * @param count - Number of bytes to be appended as a block - * @param options - + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. */ - async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); + async getPageRanges(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); } -} -/** - * BlockBlobClient defines a set of operations applicable to block blobs. - */ -class BlockBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. - // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); - let pipeline; - let url; - options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - // (url: string, pipeline: Pipeline) - url = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || - credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - url = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } - else if (!credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName !== "string") { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - // The second parameter is undefined. Use anonymous credential. - url = urlOrConnectionString; - if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { - options = blobNameOrOptions; - } - pipeline = newPipeline(new AnonymousCredential(), options); - } - else if (credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName === "string" && - blobNameOrOptions && - typeof blobNameOrOptions === "string") { - // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } - else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } - else if (extractedCreds.kind === "SASConnString") { - url = - appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + - "?" + - extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } - else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + marker: marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItemSegments() { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker, options = {}) { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); } - } - else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url, pipeline); - this.blockBlobContext = new BlockBlob(this.storageClientContext); - this._blobContext = new Blob$1(this.storageClientContext); + }); } /** - * Creates a new BlockBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a URL to the base blob. + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * - * @param snapshot - The snapshot timestamp. - * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. */ - withSnapshot(snapshot) { - return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + listPageRangeItems() { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) { + var _a, e_1, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_1) throw e_1.error; } + } + }); } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * Quick query for a JSON or CSV formatted blob. + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. * - * Example usage (Node.js): + * Example using `for await` syntax: * * ```js - * // Query and convert a blob to a string - * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); - * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); - * console.log("Query blob content:", downloaded); - * - * async function streamToBuffer(readableStream) { - * return new Promise((resolve, reject) => { - * const chunks = []; - * readableStream.on("data", (data) => { - * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); - * }); - * readableStream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * readableStream.on("error", reject); - * }); + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` * - * @param query - - * @param options - - */ - async query(query, options = {}) { - var _a; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); - try { - if (!coreHttp.isNode) { - throw new Error("This operation currently is only supported in Node.js."); - } - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { - queryType: "SQL", - expression: query, - inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration), - }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - return new BlobQueryResponse(response, { - abortSignal: options.abortSignal, - onProgress: options.onProgress, - onError: options.onError, - }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Creates a new block blob, or updates the content of an existing block blob. - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link stageBlock} and {@link commitBlockList}. - * - * This is a non-parallel uploading method, please use {@link uploadFile}, - * {@link uploadStream} or {@link uploadBrowserData} for better performance - * with concurrency uploading. - * - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob - * - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to the Block Blob Upload operation. - * @returns Response data for the Block Blob Upload operation. - * - * Example usage: - * - * ```js - * const content = "Hello world!"; - * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); - * ``` - */ - async upload(body, contentLength, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress, - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Creates a new Block Blob where the contents of the blob are read from a given URL. - * This API is supported beginning with the 2020-04-08 version. Partial updates - * are not supported with Put Blob from URL; the content of an existing blob is overwritten with - * the content of the new blob. To perform partial updates to a block blob’s contents using a - * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. - * - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Optional parameters. - */ - async syncUploadFromURL(sourceURL, options = {}) { - var _a, _b, _c, _d, _e; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, - sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, - sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, - sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, - }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Uploads the specified block to the block blob's "staging area" to be later - * committed by a call to commitBlockList. - * @see https://docs.microsoft.com/rest/api/storageservices/put-block - * - * @param blockId - A 64-byte value that is base64-encoded - * @param body - Data to upload to the staging area. - * @param contentLength - Number of bytes to upload. - * @param options - Options to the Block Blob Stage Block operation. - * @returns Response data for the Block Blob Stage Block operation. - */ - async stageBlock(blockId, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { - onUploadProgress: options.onProgress, - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * The Stage Block From URL operation creates a new block to be committed as part - * of a blob where the contents are read from a URL. - * This API is available starting in version 2018-03-28. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url - * - * @param blockId - A 64-byte value that is base64-encoded - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Options to the Block Blob Stage Block From URL operation. - * @returns Response data for the Block Blob Stage Block From URL operation. - */ - async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Writes a blob by specifying the list of block IDs that make up the blob. - * In order to be written as part of a blob, a block must have been successfully written - * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to - * update a blob by uploading only those blocks that have changed, then committing the new and existing - * blocks together. Any blocks not specified in the block list and permanently deleted. - * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list - * - * @param blocks - Array of 64-byte value that is base64-encoded - * @param options - Options to the Block Blob Commit Block List operation. - * @returns Response data for the Block Blob Commit Block List operation. - */ - async commitBlockList(blocks, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Returns the list of blocks that have been uploaded as part of a block blob - * using the specified block list filter. - * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list - * - * @param listType - Specifies whether to return the list of committed blocks, - * the list of uncommitted blocks, or both lists together. - * @param options - Options to the Block Blob Get Block List operation. - * @returns Response data for the Block Blob Get Block List operation. - */ - async getBlockList(listType, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); - try { - const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - if (!res.committedBlocks) { - res.committedBlocks = []; - } - if (!res.uncommittedBlocks) { - res.uncommittedBlocks = []; - } - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - // High level functions - /** - * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. - * - * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. - * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. - * - * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView - * @param options - - */ - async uploadData(data, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); - try { - if (coreHttp.isNode) { - let buffer; - if (data instanceof Buffer) { - buffer = data; - } - else if (data instanceof ArrayBuffer) { - buffer = Buffer.from(data); - } - else { - data = data; - buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); - } - return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); - } - else { - const browserBlob = new Blob([data]); - return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * ONLY AVAILABLE IN BROWSERS. - * - * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. - * - * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call - * {@link commitBlockList} to commit the block list. - * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. - * - * @deprecated Use {@link uploadData} instead. - * - * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView - * @param options - Options to upload browser data. - * @returns Response data for the Blob Upload operation. - */ - async uploadBrowserData(browserData, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); - try { - const browserBlob = new Blob([browserData]); - return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * - * Uploads data to block blob. Requires a bodyFactory as the data source, - * which need to return a {@link HttpRequestBody} object with the offset and size provided. - * - * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. - * - * @param bodyFactory - - * @param size - size of the data to upload. - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. - */ - async uploadSeekableInternal(bodyFactory, size, options = {}) { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { - throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); - } - if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { - options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - } - if (options.maxSingleShotSize < 0 || - options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { - throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); - } - if (options.blockSize === 0) { - if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`${size} is too larger to upload to a block blob.`); - } - if (size > options.maxSingleShotSize) { - options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); - if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - } - } - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); - try { - if (size <= options.maxSingleShotSize) { - return await this.upload(bodyFactory(0, size), size, updatedOptions); - } - const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; - if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + - `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); - } - const blockList = []; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let i = 0; i < numBlocks; i++) { - batch.addOperation(async () => { - const blockID = generateBlockID(blockIDPrefix, i); - const start = options.blockSize * i; - const end = i === numBlocks - 1 ? size : start + options.blockSize; - const contentLength = end - start; - blockList.push(blockID); - await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { - abortSignal: options.abortSignal, - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions, - }); - // Update progress after block is successfully uploaded to server, in case of block trying - // TODO: Hook with convenience layer progress event in finer level - transferProgress += contentLength; - if (options.onProgress) { - options.onProgress({ - loadedBytes: transferProgress, - }); - } - }); - } - await batch.do(); - return this.commitBlockList(blockList, updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a local file in blocks to a block blob. - * - * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList - * to commit the block list. - * - * @param filePath - Full path of local file - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. - */ - async uploadFile(filePath, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); - try { - const size = (await fsStat(filePath)).size; - return await this.uploadSeekableInternal((offset, count) => { - return () => fsCreateReadStream(filePath, { - autoClose: true, - end: count ? offset + count - 1 : Infinity, - start: offset, - }); - }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a Node.js Readable stream into block blob. - * - * PERFORMANCE IMPROVEMENT TIPS: - * * Input stream highWaterMark is better to set a same value with bufferSize - * parameter, which will avoid Buffer.concat() operations. - * - * @param stream - Node.js Readable stream - * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB - * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, - * positive correlation with max uploading concurrency. Default value is 5 - * @param options - Options to Upload Stream to Block Blob operation. - * @returns Response data for the Blob Upload operation. - */ - async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); - try { - let blockNum = 0; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const blockList = []; - const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { - const blockID = generateBlockID(blockIDPrefix, blockNum); - blockList.push(blockID); - blockNum++; - await this.stageBlock(blockID, body, length, { - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions, - }); - // Update progress after block is successfully uploaded to server, in case of block trying - transferProgress += length; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }, - // concurrency should set a smaller value than maxConcurrency, which is helpful to - // reduce the possibility when a outgoing handler waits for stream data, in - // this situation, outgoing handlers are blocked. - // Outgoing queue shouldn't be empty. - Math.ceil((maxConcurrency / 4) * 3)); - await scheduler.do(); - return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } -} -/** - * PageBlobClient defines a set of operations applicable to page blobs. - */ -class PageBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. - // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); - let pipeline; - let url; - options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - // (url: string, pipeline: Pipeline) - url = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || - credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - url = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } - else if (!credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName !== "string") { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - // The second parameter is undefined. Use anonymous credential. - url = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); - } - else if (credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName === "string" && - blobNameOrOptions && - typeof blobNameOrOptions === "string") { - // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } - else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } - else if (extractedCreds.kind === "SASConnString") { - url = - appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + - "?" + - extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } - else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } - else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url, pipeline); - this.pageBlobContext = new PageBlob(this.storageClientContext); - } - /** - * Creates a new PageBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. - * - * @param snapshot - The snapshot timestamp. - * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. - */ - withSnapshot(snapshot) { - return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); - } - /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob - * - * @param size - size of the page blob. - * @param options - Options to the Page Blob Create operation. - * @returns Response data for the Page Blob Create operation. - */ - async create(size, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-create", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. If the blob with the same name already exists, the content - * of the existing blob will remain unchanged. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob - * - * @param size - size of the page blob. - * @param options - - */ - async createIfNotExists(size, options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); - try { - const conditions = { ifNoneMatch: ETagAny }; - const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. - * @see https://docs.microsoft.com/rest/api/storageservices/put-page - * - * @param body - Data to upload - * @param offset - Offset of destination page blob - * @param count - Content length of the body, also number of bytes to be uploaded - * @param options - Options to the Page Blob Upload Pages operation. - * @returns Response data for the Page Blob Upload Pages operation. - */ - async uploadPages(body, offset, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress, - }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * The Upload Pages operation writes a range of pages to a page blob where the - * contents are read from a URL. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url - * - * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication - * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob - * @param destOffset - Offset of destination page blob - * @param count - Number of bytes to be uploaded from source page blob - * @param options - - */ - async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Frees the specified pages from the page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-page - * - * @param offset - Starting byte position of the pages to clear. - * @param count - Number of bytes to clear. - * @param options - Options to the Page Blob Clear Pages operation. - * @returns Response data for the Page Blob Clear Pages operation. - */ - async clearPages(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); - try { - return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Returns the list of valid page ranges for a page blob or snapshot of a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns Response data for the Page Blob Get Ranges operation. - */ - async getPageRanges(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); - try { - return await this.pageBlobContext - .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * getPageRangesSegment returns a single segment of page ranges starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to PageBlob Get Page Ranges Segment operation. - */ - async listPageRangesSegment(offset = 0, count, marker, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); - try { - return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The - * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get - * items. The marker value is opaque to the client. - * @param options - Options to List Page Ranges operation. - */ - listPageRangeItemSegments(offset = 0, count, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1() { - let getPageRangeItemSegmentsResponse; - if (!!marker || marker === undefined) { - do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options)); - marker = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); - } while (marker); - } - }); - } - /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to List Page Ranges operation. - */ - listPageRangeItems(offset = 0, count, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1() { - var e_1, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const getPageRangesSegment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } - finally { if (e_1) throw e_1.error; } - } - }); - } - /** - * Returns an async iterable iterator to list of page ranges for a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * .byPage() returns an async iterable iterator to list of page ranges for a page blob. - * - * Example using `for await` syntax: - * - * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` - * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRanges()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * ``` - * - * Example using `iter.next()`: + * Example using `iter.next()`: * * ```js * let i = 1; @@ -98156,24 +90084,19 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Get Page Range Diff operation. */ async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { + var _a; + const result = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshot, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(result); + }); } /** * getPageRangesDiffSegment returns a single segment of page ranges starting from the @@ -98189,25 +90112,23 @@ class PageBlobClient extends BlobClient { * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); - try { - return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { + return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, + leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshotOrUrl, + range: rangeToString({ offset: offset, count: count, - }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }), + marker: marker, + maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} @@ -98247,18 +90168,20 @@ class PageBlobClient extends BlobClient { */ listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { - var e_2, _a; + var _a, e_2, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const getPageRangesSegment = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } @@ -98372,24 +90295,19 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Get Page Range Diff operation. */ async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevSnapshotUrl, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); } /** * Resizes the page blob to the specified size (which must be a multiple of 512). @@ -98400,22 +90318,17 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Resize operation. */ async resize(size, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); - try { - return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.resize(size, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets a page blob's sequence number. @@ -98427,22 +90340,17 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Update Sequence Number operation. */ async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); - try { - return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { + abortSignal: options.abortSignal, + blobSequenceNumber: sequenceNumber, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. @@ -98458,25 +90366,19 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Copy Incremental operation. */ async startCopyIncremental(copySource, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); - try { - return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.copyIncremental(copySource, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. async function getBodyAsText(batchResponse) { let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); @@ -98489,6 +90391,7 @@ function utf8ByteLength(str) { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. const HTTP_HEADER_DELIMITER = ": "; const SPACE_DELIMITER = " "; const NOT_FOUND = -1; @@ -98538,7 +90441,7 @@ class BatchResponseParser { for (let index = 0; index < subResponseCount; index++) { const subResponse = subResponses[index]; const deserializedSubResponse = {}; - deserializedSubResponse.headers = new coreHttp.HttpHeaders(); + deserializedSubResponse.headers = coreHttpCompat.toHttpHeadersLike(coreRestPipeline.createHttpHeaders()); const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); let subRespHeaderStartFound = false; let subRespHeaderEndFound = false; @@ -98620,7 +90523,7 @@ class BatchResponseParser { } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +// Licensed under the MIT License. var MutexLockStatus; (function (MutexLockStatus) { MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; @@ -98685,6 +90588,7 @@ Mutex.keys = {}; Mutex.listeners = {}; // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** * A BlobBatch represents an aggregated set of operations on blobs. * Currently, only `delete` and `setAccessTier` are supported. @@ -98737,9 +90641,9 @@ class BlobBatch { let url; let credential; if (typeof urlOrBlobClient === "string" && - ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + ((coreUtil.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || credentialOrOptions instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrOptions))) { + coreAuth.isTokenCredential(credentialOrOptions))) { // First overload url = urlOrBlobClient; credential = credentialOrOptions; @@ -98756,8 +90660,7 @@ class BlobBatch { if (!options) { options = {}; } - const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); - try { + return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("delete"); await this.addSubRequestInternal({ url: url, @@ -98765,26 +90668,16 @@ class BlobBatch { }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { let url; let credential; let tier; if (typeof urlOrBlobClient === "string" && - ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + ((coreUtil.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || credentialOrTier instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrTier))) { + coreAuth.isTokenCredential(credentialOrTier))) { // First overload url = urlOrBlobClient; credential = credentialOrTier; @@ -98803,8 +90696,7 @@ class BlobBatch { if (!options) { options = {}; } - const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); - try { + return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ url: url, @@ -98812,17 +90704,7 @@ class BlobBatch { }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } } /** @@ -98833,7 +90715,7 @@ class InnerBatchRequest { constructor() { this.operationCount = 0; this.body = ""; - const tempGuid = coreHttp.generateUuid(); + const tempGuid = coreUtil.randomUUID(); // batch_{batchid} this.boundary = `batch_${tempGuid}`; // --batch_{batchid} @@ -98854,29 +90736,48 @@ class InnerBatchRequest { * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ createPipeline(credential) { - const isAnonymousCreds = credential instanceof AnonymousCredential; - const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] - const factories = new Array(policyFactoryLength); - factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer - factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers - if (!isAnonymousCreds) { - factories[2] = coreHttp.isTokenCredential(credential) - ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) - : credential; - } - factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire - return new Pipeline(factories, {}); + const corePipeline = coreRestPipeline.createEmptyPipeline(); + corePipeline.addPolicy(coreClient.serializationPolicy({ + stringifyXML: coreXml.stringifyXML, + serializerOptions: { + xml: { + xmlCharKey: "#", + }, + }, + }), { phase: "Serialize" }); + // Use batch header filter policy to exclude unnecessary headers + corePipeline.addPolicy(batchHeaderFilterPolicy()); + // Use batch assemble policy to assemble request and intercept request from going to wire + corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); + if (coreAuth.isTokenCredential(credential)) { + corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential, + scopes: StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + const pipeline = new Pipeline([]); + // attach the v2 pipeline to this one + pipeline._credential = credential; + pipeline._corePipeline = corePipeline; + return pipeline; } appendSubRequestToBody(request) { // Start to assemble sub request this.body += [ - this.subRequestPrefix, - `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, - "", + this.subRequestPrefix, // sub request constant prefix + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID + "", // empty line after sub request's content ID `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method ].join(HTTP_LINE_ENDING); - for (const header of request.headers.headersArray()) { - this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + for (const [name, value] of request.headers) { + this.body += `${name}: ${value}${HTTP_LINE_ENDING}`; } this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line // No body to assemble for current batch request support @@ -98907,55 +90808,39 @@ class InnerBatchRequest { return this.subRequests; } } -class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { - constructor(batchRequest, nextPolicy, options) { - super(nextPolicy, options); - this.dummyResponse = { - request: new coreHttp.WebResource(), - status: 200, - headers: new coreHttp.HttpHeaders(), - }; - this.batchRequest = batchRequest; - } - async sendRequest(request) { - await this.batchRequest.appendSubRequestToBody(request); - return this.dummyResponse; // Intercept request from going to wire - } -} -class BatchRequestAssemblePolicyFactory { - constructor(batchRequest) { - this.batchRequest = batchRequest; - } - create(nextPolicy, options) { - return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); - } +function batchRequestAssemblePolicy(batchRequest) { + return { + name: "batchRequestAssemblePolicy", + async sendRequest(request) { + batchRequest.appendSubRequestToBody(request); + return { + request, + status: 200, + headers: coreRestPipeline.createHttpHeaders(), + }; + }, + }; } -class BatchHeaderFilterPolicy extends coreHttp.BaseRequestPolicy { - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(request) { - let xMsHeaderName = ""; - for (const header of request.headers.headersArray()) { - if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { - xMsHeaderName = header.name; +function batchHeaderFilterPolicy() { + return { + name: "batchHeaderFilterPolicy", + async sendRequest(request, next) { + let xMsHeaderName = ""; + for (const [name] of request.headers) { + if (iEqual(name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = name; + } } - } - if (xMsHeaderName !== "") { - request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. - } - return this._nextPolicy.sendRequest(request); - } -} -class BatchHeaderFilterPolicyFactory { - create(nextPolicy, options) { - return new BatchHeaderFilterPolicy(nextPolicy, options); - } + if (xMsHeaderName !== "") { + request.headers.delete(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return next(request); + }, + }; } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. * @@ -98977,14 +90862,14 @@ class BlobBatchClient { else { pipeline = newPipeline(credentialOrPipeline, options); } - const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + const storageClientContext = new StorageContextClient(url, getCoreClientOptions(pipeline)); const path = getURLPath(url); if (path && path !== "/") { // Container scoped. - this.serviceOrContainerContext = new Container(storageClientContext); + this.serviceOrContainerContext = storageClientContext.container; } else { - this.serviceOrContainerContext = new Service(storageClientContext); + this.serviceOrContainerContext = storageClientContext.service; } } /** @@ -99063,11 +90948,10 @@ class BlobBatchClient { if (!batchRequest || batchRequest.getSubRequests().size === 0) { throw new RangeError("Batch request should contain one or more sub requests."); } - const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); - try { + return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { const batchRequestBody = batchRequest.getHttpRequestBody(); // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. - const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions))); // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); const responseSummary = await batchResponseParser.parseBatchResponse(); @@ -99083,17 +90967,7 @@ class BlobBatchClient { subResponsesFailedCount: responseSummary.subResponsesFailedCount, }; return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } } @@ -99101,6 +90975,12 @@ class BlobBatchClient { * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. */ class ContainerClient extends StorageClient { + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ @@ -99113,9 +90993,9 @@ class ContainerClient extends StorageClient { url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); @@ -99133,11 +91013,11 @@ class ContainerClient extends StorageClient { const containerName = credentialOrPipelineOrContainerName; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } @@ -99161,13 +91041,7 @@ class ContainerClient extends StorageClient { } super(url, pipeline); this._containerName = this.getContainerNameFromUrl(); - this.containerContext = new Container(this.storageClientContext); - } - /** - * The name of the container. - */ - get containerName() { - return this._containerName; + this.containerContext = this.storageClientContext.container; } /** * Creates a new container under the specified account. If the container with @@ -99187,22 +91061,9 @@ class ContainerClient extends StorageClient { * ``` */ async create(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-create", options); - try { - // Spread operator in destructuring assignments, - // this will filter out unwanted properties from the response object into result object - return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.create(updatedOptions)); + }); } /** * Creates a new container under the specified account. If the container with @@ -99213,29 +91074,21 @@ class ContainerClient extends StorageClient { * @param options - */ async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); - try { - const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + else { + throw e; + } + } + }); } /** * Returns true if the Azure container resource represented by this client exists; false otherwise. @@ -99247,31 +91100,21 @@ class ContainerClient extends StorageClient { * @param options - */ async exists(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-exists", options); - try { - await this.getProperties({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions, - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence", + return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, }); - return false; + return true; } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (e.statusCode === 404) { + return false; + } + throw e; + } + }); } /** * Creates a {@link BlobClient} @@ -99332,20 +91175,9 @@ class ContainerClient extends StorageClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); - try { - return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions }))); + }); } /** * Marks the specified container for deletion. The container and any blobs @@ -99358,20 +91190,14 @@ class ContainerClient extends StorageClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("ContainerClient-delete", options); - try { - return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.delete({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Marks the specified container for deletion if it exists. The container and any blobs @@ -99381,29 +91207,19 @@ class ContainerClient extends StorageClient { * @param options - Options to Container Delete operation. */ async deleteIfExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** * Sets one or more user-defined name-value pairs for the specified container. @@ -99424,21 +91240,16 @@ class ContainerClient extends StorageClient { if (options.conditions.ifUnmodifiedSince) { throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); } - const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); - try { - return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } + return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } /** * Gets the permissions for the specified container. The permissions indicate * whether container data may be accessed publicly. @@ -99454,9 +91265,12 @@ class ContainerClient extends StorageClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); - try { - const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.getAccessPolicy({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); const res = { _response: response._response, blobPublicAccess: response.blobPublicAccess, @@ -99488,17 +91302,7 @@ class ContainerClient extends StorageClient { }); } return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Sets the permissions for the specified container. The permissions indicate @@ -99519,8 +91323,7 @@ class ContainerClient extends StorageClient { */ async setAccessPolicy(access, containerAcl, options = {}) { options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); - try { + return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { const acl = []; for (const identifier of containerAcl || []) { acl.push({ @@ -99536,18 +91339,15 @@ class ContainerClient extends StorageClient { id: identifier.id, }); } - return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return assertResponse(await this.containerContext.setAccessPolicy({ + abortSignal: options.abortSignal, + access, + containerAcl: acl, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Get a {@link BlobLeaseClient} that manages leases on the container. @@ -99581,25 +91381,14 @@ class ContainerClient extends StorageClient { * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ async uploadBlockBlob(blobName, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); - try { + return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { const blockBlobClient = this.getBlockBlobClient(blobName); const response = await blockBlobClient.upload(body, contentLength, updatedOptions); return { blockBlobClient, response, }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Marks the specified blob or snapshot for deletion. The blob is later deleted @@ -99613,24 +91402,13 @@ class ContainerClient extends StorageClient { * @returns Block blob deletion response data. */ async deleteBlob(blobName, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); - try { + return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { let blobClient = this.getBlobClient(blobName); if (options.versionId) { blobClient = blobClient.withVersion(options.versionId); } - return await blobClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return blobClient.delete(updatedOptions); + }); } /** * listBlobFlatSegment returns a single segment of blobs starting from the @@ -99643,25 +91421,14 @@ class ContainerClient extends StorageClient { * @param options - Options to Container List Blob Flat Segment operation. */ async listBlobFlatSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); - try { - const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); return blobItem; }) }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * listBlobHierarchySegment returns a single segment of blobs starting from @@ -99675,29 +91442,18 @@ class ContainerClient extends StorageClient { * @param options - Options to Container List Blob Hierarchy Segment operation. */ async listBlobHierarchySegment(delimiter, marker, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); - try { - const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); return blobItem; }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); return blobPrefix; }) }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse @@ -99711,8 +91467,8 @@ class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listSegments(marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + listSegments(marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { let listBlobsFlatSegmentResponse; if (!!marker || marker === undefined) { do { @@ -99728,20 +91484,22 @@ class ContainerClient extends StorageClient { * * @param options - Options to list blobs operation. */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItems_1() { - var e_1, _a; + listItems() { + return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_1, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const listBlobsFlatSegmentResponse = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsFlatSegmentResponse = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } @@ -99889,8 +91647,8 @@ class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listHierarchySegments(delimiter, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1() { + listHierarchySegments(delimiter_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter, marker, options = {}) { let listBlobsHierarchySegmentResponse; if (!!marker || marker === undefined) { do { @@ -99907,13 +91665,15 @@ class ContainerClient extends StorageClient { * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listItemsByHierarchy(delimiter, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { - var e_2, _a; + listItemsByHierarchy(delimiter_1) { + return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter, options = {}) { + var _a, e_2, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const listBlobsHierarchySegmentResponse = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsHierarchySegmentResponse = _c; const segment = listBlobsHierarchySegmentResponse.segment; if (segment.blobPrefixes) { for (const prefix of segment.blobPrefixes) { @@ -99928,7 +91688,7 @@ class ContainerClient extends StorageClient { catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } @@ -100092,9 +91852,14 @@ class ContainerClient extends StorageClient { * @param options - Options to find blobs by tags. */ async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); - try { - const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { var _a; let tagValue = ""; @@ -100104,17 +91869,7 @@ class ContainerClient extends StorageClient { return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. @@ -100132,8 +91887,8 @@ class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { let response; if (!!marker || marker === undefined) { do { @@ -100154,20 +91909,22 @@ class ContainerClient extends StorageClient { * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { - var e_3, _a; + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_3, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_3_1) { e_3 = { error: e_3_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_3) throw e_3.error; } } @@ -100275,6 +92032,24 @@ class ContainerClient extends StorageClient { }, }; } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + return tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } getContainerNameFromUrl() { let containerName; try { @@ -100283,23 +92058,23 @@ class ContainerClient extends StorageClient { // "https://myaccount.blob.core.windows.net/mycontainer"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` // http://localhost:10001/devstoreaccount1/containername - const parsedUrl = coreHttp.URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { + const parsedUrl = new URL(this.url); + if (parsedUrl.hostname.split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername". // "https://customdomain.com/containername". // .getPath() -> /containername - containerName = parsedUrl.getPath().split("/")[1]; + containerName = parsedUrl.pathname.split("/")[1]; } else if (isIpEndpointStyle(parsedUrl)) { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername // .getPath() -> /devstoreaccount1/containername - containerName = parsedUrl.getPath().split("/")[2]; + containerName = parsedUrl.pathname.split("/")[2]; } else { // "https://customdomain.com/containername". // .getPath() -> /containername - containerName = parsedUrl.getPath().split("/")[1]; + containerName = parsedUrl.pathname.split("/")[1]; } // decode the encoded containerName - to get all the special characters that might be present in it containerName = decodeURIComponent(containerName); @@ -100332,6 +92107,24 @@ class ContainerClient extends StorageClient { resolve(appendToURLQuery(this.url, sas)); }); } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI + * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + generateSasStringToSign(options) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), this.credential).stringToSign; + } /** * Creates a BlobBatchClient object to conduct batch operations. * @@ -100345,7 +92138,7 @@ class ContainerClient extends StorageClient { } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +// Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -100366,7 +92159,7 @@ class AccountSASPermissions { */ this.write = false; /** - * Permission to create blobs and files granted. + * Permission to delete blobs and files granted. */ this.delete = false; /** @@ -100572,7 +92365,7 @@ class AccountSASPermissions { } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +// Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -100644,7 +92437,7 @@ class AccountSASResourceTypes { } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +// Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -100724,6 +92517,7 @@ class AccountSASServices { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -100736,6 +92530,10 @@ class AccountSASServices { * @param sharedKeyCredential - */ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) + .sasQueryParameters; +} +function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { const version = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; @@ -100805,7 +92603,10 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC ].join("\n"); } const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); + return { + sasQueryParameters: new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope), + stringToSign: stringToSign, + }; } /** @@ -100813,26 +92614,6 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC * to manipulate blob containers. */ class BlobServiceClient extends StorageClient { - constructor(url, credentialOrPipeline, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - let pipeline; - if (isPipelineLike(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } - else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || - credentialOrPipeline instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipeline)) { - pipeline = newPipeline(credentialOrPipeline, options); - } - else { - // The second parameter is undefined. Use anonymous credential - pipeline = newPipeline(new AnonymousCredential(), options); - } - super(url, pipeline); - this.serviceContext = new Service(this.storageClientContext); - } /** * * Creates an instance of BlobServiceClient from connection string. @@ -100852,10 +92633,10 @@ class BlobServiceClient extends StorageClient { options = options || {}; const extractedCreds = extractConnectionStringParts(connectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } const pipeline = newPipeline(sharedKeyCredential, options); return new BlobServiceClient(extractedCreds.url, pipeline); @@ -100872,6 +92653,26 @@ class BlobServiceClient extends StorageClient { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((coreUtil.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = this.storageClientContext.service; + } /** * Creates a {@link ContainerClient} object * @@ -100895,25 +92696,14 @@ class BlobServiceClient extends StorageClient { * @returns Container creation response and the corresponding container client. */ async createContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); const containerCreateResponse = await containerClient.create(updatedOptions); return { containerClient, containerCreateResponse, }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Deletes a Blob container. @@ -100923,21 +92713,10 @@ class BlobServiceClient extends StorageClient { * @returns Container deletion response. */ async deleteContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); - return await containerClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return containerClient.delete(updatedOptions); + }); } /** * Restore a previously deleted Blob container. @@ -100949,25 +92728,17 @@ class BlobServiceClient extends StorageClient { * @returns Container deletion response. */ async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, - deletedContainerVersion }, updatedOptions)); + const containerContext = containerClient["storageClientContext"].container; + const containerUndeleteResponse = assertResponse(await containerContext.restore({ + deletedContainerName, + deletedContainerVersion, + tracingOptions: updatedOptions.tracingOptions, + })); return { containerClient, containerUndeleteResponse }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Rename an existing Blob Container. @@ -100979,25 +92750,14 @@ class BlobServiceClient extends StorageClient { /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. async renameContainer(sourceContainerName, destinationContainerName, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => { + var _a; const containerClient = this.getContainerClient(destinationContainerName); // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + const containerContext = containerClient["storageClientContext"].container; + const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))); return { containerClient, containerRenameResponse }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Gets the properties of a storage account’s Blob service, including properties @@ -101008,20 +92768,12 @@ class BlobServiceClient extends StorageClient { * @returns Response data for the Service Get Properties operation. */ async getProperties(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); - try { - return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets properties for a storage account’s Blob service endpoint, including properties @@ -101033,20 +92785,12 @@ class BlobServiceClient extends StorageClient { * @returns Response data for the Service Set Properties operation. */ async setProperties(properties, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); - try { - return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.setProperties(properties, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Retrieves statistics related to replication for the Blob service. It is only @@ -101058,20 +92802,12 @@ class BlobServiceClient extends StorageClient { * @returns Response data for the Service Get Statistics operation. */ async getStatistics(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); - try { - return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getStatistics({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The Get Account Information operation returns the sku name and account kind @@ -101084,20 +92820,12 @@ class BlobServiceClient extends StorageClient { * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); - try { - return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns a list of the containers under the specified account. @@ -101114,20 +92842,9 @@ class BlobServiceClient extends StorageClient { * @returns Response data for the Service List Container Segment operation. */ async listContainersSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); - try { - return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions }))); + }); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags @@ -101148,9 +92865,14 @@ class BlobServiceClient extends StorageClient { * @param options - Options to find blobs by tags. */ async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); - try { - const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { var _a; let tagValue = ""; @@ -101160,17 +92882,7 @@ class BlobServiceClient extends StorageClient { return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. @@ -101188,8 +92900,8 @@ class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { let response; if (!!marker || marker === undefined) { do { @@ -101210,20 +92922,22 @@ class BlobServiceClient extends StorageClient { * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { - var e_1, _a; + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_1, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } @@ -101345,8 +93059,8 @@ class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list containers operation. */ - listSegments(marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + listSegments(marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { let listContainersSegmentResponse; if (!!marker || marker === undefined) { do { @@ -101364,20 +93078,22 @@ class BlobServiceClient extends StorageClient { * * @param options - Options to list containers operation. */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItems_1() { - var e_2, _a; + listItems() { + return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_2, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } @@ -101507,12 +93223,14 @@ class BlobServiceClient extends StorageClient { * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ async getUserDelegationKey(startsOn, expiresOn, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); - try { - const response = await this.serviceContext.getUserDelegationKey({ + return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.getUserDelegationKey({ startsOn: truncatedISO8061Date(startsOn, false), expiresOn: truncatedISO8061Date(expiresOn, false), - }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + }, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); const userDelegationKey = { signedObjectId: response.signedObjectId, signedTenantId: response.signedTenantId, @@ -101524,17 +93242,7 @@ class BlobServiceClient extends StorageClient { }; const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Creates a BlobBatchClient object to conduct batch operations. @@ -101573,39 +93281,45 @@ class BlobServiceClient extends StorageClient { resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); return appendToURLQuery(this.url, sas); } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasStringToSign(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + return generateAccountSASQueryParametersInternal(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).stringToSign; + } } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +// Licensed under the MIT License. /** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ exports.KnownEncryptionAlgorithmType = void 0; (function (KnownEncryptionAlgorithmType) { KnownEncryptionAlgorithmType["AES256"] = "AES256"; })(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {})); -Object.defineProperty(exports, "BaseRequestPolicy", ({ - enumerable: true, - get: function () { return coreHttp.BaseRequestPolicy; } -})); -Object.defineProperty(exports, "HttpHeaders", ({ - enumerable: true, - get: function () { return coreHttp.HttpHeaders; } -})); -Object.defineProperty(exports, "RequestPolicyOptions", ({ - enumerable: true, - get: function () { return coreHttp.RequestPolicyOptions; } -})); Object.defineProperty(exports, "RestError", ({ enumerable: true, - get: function () { return coreHttp.RestError; } -})); -Object.defineProperty(exports, "WebResource", ({ - enumerable: true, - get: function () { return coreHttp.WebResource; } -})); -Object.defineProperty(exports, "deserializationPolicy", ({ - enumerable: true, - get: function () { return coreHttp.deserializationPolicy; } + get: function () { return coreRestPipeline.RestError; } })); exports.AccountSASPermissions = AccountSASPermissions; exports.AccountSASResourceTypes = AccountSASResourceTypes; @@ -101613,6 +93327,7 @@ exports.AccountSASServices = AccountSASServices; exports.AnonymousCredential = AnonymousCredential; exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; exports.AppendBlobClient = AppendBlobClient; +exports.BaseRequestPolicy = BaseRequestPolicy; exports.BlobBatch = BlobBatch; exports.BlobBatchClient = BlobBatchClient; exports.BlobClient = BlobClient; @@ -101645,50509 +93360,129582 @@ exports.newPipeline = newPipeline; /***/ }), -/***/ 18486: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 14079: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +/* module decorator */ module = __nccwpck_require__.nmd(module); +(e=>{"function"==typeof define&&define.amd?define(["protobufjs/minimal"],e): true&&module&&module.exports&&(module.exports=e(__nccwpck_require__(96916)))})(function(r){var e,t,o,n,C,i=r.util,a=r.roots.firestore_v1||(r.roots.firestore_v1={});function V(e){if(e)for(var t=Object.keys(e),o=0;o>>0),null!=e.totalDocuments&&(t.totalDocuments=e.totalDocuments>>>0),null!=e.totalBytes&&(i.Long?(t.totalBytes=i.Long.fromValue(e.totalBytes)).unsigned=!0:"string"==typeof e.totalBytes?t.totalBytes=parseInt(e.totalBytes,10):"number"==typeof e.totalBytes?t.totalBytes=e.totalBytes:"object"==typeof e.totalBytes&&(t.totalBytes=new i.LongBits(e.totalBytes.low>>>0,e.totalBytes.high>>>0).toNumber(!0))),t},B.toObject=function(e,t){var o,r={};return(t=t||{}).defaults&&(r.id="",r.createTime=null,r.version=0,r.totalDocuments=0,i.Long?(o=new i.Long(0,0,!0),r.totalBytes=t.longs===String?o.toString():t.longs===Number?o.toNumber():o):r.totalBytes=t.longs===String?"0":0),null!=e.id&&e.hasOwnProperty("id")&&(r.id=e.id),null!=e.createTime&&e.hasOwnProperty("createTime")&&(r.createTime=a.google.protobuf.Timestamp.toObject(e.createTime,t)),null!=e.version&&e.hasOwnProperty("version")&&(r.version=e.version),null!=e.totalDocuments&&e.hasOwnProperty("totalDocuments")&&(r.totalDocuments=e.totalDocuments),null!=e.totalBytes&&e.hasOwnProperty("totalBytes")&&("number"==typeof e.totalBytes?r.totalBytes=t.longs===String?String(e.totalBytes):e.totalBytes:r.totalBytes=t.longs===String?i.Long.prototype.toString.call(e.totalBytes):t.longs===Number?new i.LongBits(e.totalBytes.low>>>0,e.totalBytes.high>>>0).toNumber(!0):e.totalBytes),r},B.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},B.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/firestore.BundleMetadata"},B),o.BundleElement=(J.prototype.metadata=null,J.prototype.namedQuery=null,J.prototype.documentMetadata=null,J.prototype.document=null,Object.defineProperty(J.prototype,"elementType",{get:i.oneOfGetter(t=["metadata","namedQuery","documentMetadata","document"]),set:i.oneOfSetter(t)}),J.fromObject=function(e){if(e instanceof a.firestore.BundleElement)return e;var t=new a.firestore.BundleElement;if(null!=e.metadata){if("object"!=typeof e.metadata)throw TypeError(".firestore.BundleElement.metadata: object expected");t.metadata=a.firestore.BundleMetadata.fromObject(e.metadata)}if(null!=e.namedQuery){if("object"!=typeof e.namedQuery)throw TypeError(".firestore.BundleElement.namedQuery: object expected");t.namedQuery=a.firestore.NamedQuery.fromObject(e.namedQuery)}if(null!=e.documentMetadata){if("object"!=typeof e.documentMetadata)throw TypeError(".firestore.BundleElement.documentMetadata: object expected");t.documentMetadata=a.firestore.BundledDocumentMetadata.fromObject(e.documentMetadata)}if(null!=e.document){if("object"!=typeof e.document)throw TypeError(".firestore.BundleElement.document: object expected");t.document=a.google.firestore.v1.Document.fromObject(e.document)}return t},J.toObject=function(e,t){t=t||{};var o={};return null!=e.metadata&&e.hasOwnProperty("metadata")&&(o.metadata=a.firestore.BundleMetadata.toObject(e.metadata,t),t.oneofs)&&(o.elementType="metadata"),null!=e.namedQuery&&e.hasOwnProperty("namedQuery")&&(o.namedQuery=a.firestore.NamedQuery.toObject(e.namedQuery,t),t.oneofs)&&(o.elementType="namedQuery"),null!=e.documentMetadata&&e.hasOwnProperty("documentMetadata")&&(o.documentMetadata=a.firestore.BundledDocumentMetadata.toObject(e.documentMetadata,t),t.oneofs)&&(o.elementType="documentMetadata"),null!=e.document&&e.hasOwnProperty("document")&&(o.document=a.google.firestore.v1.Document.toObject(e.document,t),t.oneofs)&&(o.elementType="document"),o},J.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},J.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/firestore.BundleElement"},J),o),a.google=((C={}).firestore=((t={}).v1=((o={}).AggregationResult=(Q.prototype.aggregateFields=i.emptyObject,Q.fromObject=function(e){if(e instanceof a.google.firestore.v1.AggregationResult)return e;var t=new a.google.firestore.v1.AggregationResult;if(e.aggregateFields){if("object"!=typeof e.aggregateFields)throw TypeError(".google.firestore.v1.AggregationResult.aggregateFields: object expected");t.aggregateFields={};for(var o=Object.keys(e.aggregateFields),r=0;r>>0,e.integerValue.high>>>0).toNumber())),null!=e.doubleValue&&(t.doubleValue=Number(e.doubleValue)),null!=e.timestampValue){if("object"!=typeof e.timestampValue)throw TypeError(".google.firestore.v1.Value.timestampValue: object expected");t.timestampValue=a.google.protobuf.Timestamp.fromObject(e.timestampValue)}if(null!=e.stringValue&&(t.stringValue=String(e.stringValue)),null!=e.bytesValue&&("string"==typeof e.bytesValue?i.base64.decode(e.bytesValue,t.bytesValue=i.newBuffer(i.base64.length(e.bytesValue)),0):0<=e.bytesValue.length&&(t.bytesValue=e.bytesValue)),null!=e.referenceValue&&(t.referenceValue=String(e.referenceValue)),null!=e.geoPointValue){if("object"!=typeof e.geoPointValue)throw TypeError(".google.firestore.v1.Value.geoPointValue: object expected");t.geoPointValue=a.google.type.LatLng.fromObject(e.geoPointValue)}if(null!=e.arrayValue){if("object"!=typeof e.arrayValue)throw TypeError(".google.firestore.v1.Value.arrayValue: object expected");t.arrayValue=a.google.firestore.v1.ArrayValue.fromObject(e.arrayValue)}if(null!=e.mapValue){if("object"!=typeof e.mapValue)throw TypeError(".google.firestore.v1.Value.mapValue: object expected");t.mapValue=a.google.firestore.v1.MapValue.fromObject(e.mapValue)}return t},s.toObject=function(e,t){t=t||{};var o={};return null!=e.booleanValue&&e.hasOwnProperty("booleanValue")&&(o.booleanValue=e.booleanValue,t.oneofs)&&(o.valueType="booleanValue"),null!=e.integerValue&&e.hasOwnProperty("integerValue")&&("number"==typeof e.integerValue?o.integerValue=t.longs===String?String(e.integerValue):e.integerValue:o.integerValue=t.longs===String?i.Long.prototype.toString.call(e.integerValue):t.longs===Number?new i.LongBits(e.integerValue.low>>>0,e.integerValue.high>>>0).toNumber():e.integerValue,t.oneofs)&&(o.valueType="integerValue"),null!=e.doubleValue&&e.hasOwnProperty("doubleValue")&&(o.doubleValue=t.json&&!isFinite(e.doubleValue)?String(e.doubleValue):e.doubleValue,t.oneofs)&&(o.valueType="doubleValue"),null!=e.referenceValue&&e.hasOwnProperty("referenceValue")&&(o.referenceValue=e.referenceValue,t.oneofs)&&(o.valueType="referenceValue"),null!=e.mapValue&&e.hasOwnProperty("mapValue")&&(o.mapValue=a.google.firestore.v1.MapValue.toObject(e.mapValue,t),t.oneofs)&&(o.valueType="mapValue"),null!=e.geoPointValue&&e.hasOwnProperty("geoPointValue")&&(o.geoPointValue=a.google.type.LatLng.toObject(e.geoPointValue,t),t.oneofs)&&(o.valueType="geoPointValue"),null!=e.arrayValue&&e.hasOwnProperty("arrayValue")&&(o.arrayValue=a.google.firestore.v1.ArrayValue.toObject(e.arrayValue,t),t.oneofs)&&(o.valueType="arrayValue"),null!=e.timestampValue&&e.hasOwnProperty("timestampValue")&&(o.timestampValue=a.google.protobuf.Timestamp.toObject(e.timestampValue,t),t.oneofs)&&(o.valueType="timestampValue"),null!=e.nullValue&&e.hasOwnProperty("nullValue")&&(o.nullValue=t.enums!==String||void 0===a.google.protobuf.NullValue[e.nullValue]?e.nullValue:a.google.protobuf.NullValue[e.nullValue],t.oneofs)&&(o.valueType="nullValue"),null!=e.stringValue&&e.hasOwnProperty("stringValue")&&(o.stringValue=e.stringValue,t.oneofs)&&(o.valueType="stringValue"),null!=e.bytesValue&&e.hasOwnProperty("bytesValue")&&(o.bytesValue=t.bytes===String?i.base64.encode(e.bytesValue,0,e.bytesValue.length):t.bytes===Array?Array.prototype.slice.call(e.bytesValue):e.bytesValue,t.oneofs)&&(o.valueType="bytesValue"),o},s.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},s.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.firestore.v1.Value"},s),o.ArrayValue=(G.prototype.values=i.emptyArray,G.fromObject=function(e){if(e instanceof a.google.firestore.v1.ArrayValue)return e;var t=new a.google.firestore.v1.ArrayValue;if(e.values){if(!Array.isArray(e.values))throw TypeError(".google.firestore.v1.ArrayValue.values: array expected");t.values=[];for(var o=0;o>>0,e.partitionCount.high>>>0).toNumber())),null!=e.pageToken&&(t.pageToken=String(e.pageToken)),null!=e.pageSize&&(t.pageSize=0|e.pageSize),null!=e.readTime){if("object"!=typeof e.readTime)throw TypeError(".google.firestore.v1.PartitionQueryRequest.readTime: object expected");t.readTime=a.google.protobuf.Timestamp.fromObject(e.readTime)}return t},d.toObject=function(e,t){var o,r={};return(t=t||{}).defaults&&(r.parent="",i.Long?(o=new i.Long(0,0,!1),r.partitionCount=t.longs===String?o.toString():t.longs===Number?o.toNumber():o):r.partitionCount=t.longs===String?"0":0,r.pageToken="",r.pageSize=0),null!=e.parent&&e.hasOwnProperty("parent")&&(r.parent=e.parent),null!=e.structuredQuery&&e.hasOwnProperty("structuredQuery")&&(r.structuredQuery=a.google.firestore.v1.StructuredQuery.toObject(e.structuredQuery,t),t.oneofs)&&(r.queryType="structuredQuery"),null!=e.partitionCount&&e.hasOwnProperty("partitionCount")&&("number"==typeof e.partitionCount?r.partitionCount=t.longs===String?String(e.partitionCount):e.partitionCount:r.partitionCount=t.longs===String?i.Long.prototype.toString.call(e.partitionCount):t.longs===Number?new i.LongBits(e.partitionCount.low>>>0,e.partitionCount.high>>>0).toNumber():e.partitionCount),null!=e.pageToken&&e.hasOwnProperty("pageToken")&&(r.pageToken=e.pageToken),null!=e.pageSize&&e.hasOwnProperty("pageSize")&&(r.pageSize=e.pageSize),null!=e.readTime&&e.hasOwnProperty("readTime")&&(r.readTime=a.google.protobuf.Timestamp.toObject(e.readTime,t),t.oneofs)&&(r.consistencySelector="readTime"),r},d.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},d.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.firestore.v1.PartitionQueryRequest"},d),o.PartitionQueryResponse=(ce.prototype.partitions=i.emptyArray,ce.prototype.nextPageToken="",ce.fromObject=function(e){if(e instanceof a.google.firestore.v1.PartitionQueryResponse)return e;var t=new a.google.firestore.v1.PartitionQueryResponse;if(e.partitions){if(!Array.isArray(e.partitions))throw TypeError(".google.firestore.v1.PartitionQueryResponse.partitions: array expected");t.partitions=[];for(var o=0;o>>0,e.resultsReturned.high>>>0).toNumber())),null!=e.executionDuration){if("object"!=typeof e.executionDuration)throw TypeError(".google.firestore.v1.ExecutionStats.executionDuration: object expected");t.executionDuration=a.google.protobuf.Duration.fromObject(e.executionDuration)}if(null!=e.readOperations&&(i.Long?(t.readOperations=i.Long.fromValue(e.readOperations)).unsigned=!1:"string"==typeof e.readOperations?t.readOperations=parseInt(e.readOperations,10):"number"==typeof e.readOperations?t.readOperations=e.readOperations:"object"==typeof e.readOperations&&(t.readOperations=new i.LongBits(e.readOperations.low>>>0,e.readOperations.high>>>0).toNumber())),null!=e.debugStats){if("object"!=typeof e.debugStats)throw TypeError(".google.firestore.v1.ExecutionStats.debugStats: object expected");t.debugStats=a.google.protobuf.Struct.fromObject(e.debugStats)}return t},Le.toObject=function(e,t){var o,r={};return(t=t||{}).defaults&&(i.Long?(o=new i.Long(0,0,!1),r.resultsReturned=t.longs===String?o.toString():t.longs===Number?o.toNumber():o):r.resultsReturned=t.longs===String?"0":0,r.executionDuration=null,i.Long?(o=new i.Long(0,0,!1),r.readOperations=t.longs===String?o.toString():t.longs===Number?o.toNumber():o):r.readOperations=t.longs===String?"0":0,r.debugStats=null),null!=e.resultsReturned&&e.hasOwnProperty("resultsReturned")&&("number"==typeof e.resultsReturned?r.resultsReturned=t.longs===String?String(e.resultsReturned):e.resultsReturned:r.resultsReturned=t.longs===String?i.Long.prototype.toString.call(e.resultsReturned):t.longs===Number?new i.LongBits(e.resultsReturned.low>>>0,e.resultsReturned.high>>>0).toNumber():e.resultsReturned),null!=e.executionDuration&&e.hasOwnProperty("executionDuration")&&(r.executionDuration=a.google.protobuf.Duration.toObject(e.executionDuration,t)),null!=e.readOperations&&e.hasOwnProperty("readOperations")&&("number"==typeof e.readOperations?r.readOperations=t.longs===String?String(e.readOperations):e.readOperations:r.readOperations=t.longs===String?i.Long.prototype.toString.call(e.readOperations):t.longs===Number?new i.LongBits(e.readOperations.low>>>0,e.readOperations.high>>>0).toNumber():e.readOperations),null!=e.debugStats&&e.hasOwnProperty("debugStats")&&(r.debugStats=a.google.protobuf.Struct.toObject(e.debugStats,t)),r},Le.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},Le.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.firestore.v1.ExecutionStats"},Le),o.Write=(v.prototype.update=null,v.prototype.delete=null,v.prototype.transform=null,v.prototype.updateMask=null,v.prototype.updateTransforms=i.emptyArray,v.prototype.currentDocument=null,Object.defineProperty(v.prototype,"operation",{get:i.oneOfGetter(n=["update","delete","transform"]),set:i.oneOfSetter(n)}),v.fromObject=function(e){if(e instanceof a.google.firestore.v1.Write)return e;var t=new a.google.firestore.v1.Write;if(null!=e.update){if("object"!=typeof e.update)throw TypeError(".google.firestore.v1.Write.update: object expected");t.update=a.google.firestore.v1.Document.fromObject(e.update)}if(null!=e.delete&&(t.delete=String(e.delete)),null!=e.transform){if("object"!=typeof e.transform)throw TypeError(".google.firestore.v1.Write.transform: object expected");t.transform=a.google.firestore.v1.DocumentTransform.fromObject(e.transform)}if(null!=e.updateMask){if("object"!=typeof e.updateMask)throw TypeError(".google.firestore.v1.Write.updateMask: object expected");t.updateMask=a.google.firestore.v1.DocumentMask.fromObject(e.updateMask)}if(e.updateTransforms){if(!Array.isArray(e.updateTransforms))throw TypeError(".google.firestore.v1.Write.updateTransforms: array expected");t.updateTransforms=[];for(var o=0;o>>0,e.positiveIntValue.high>>>0).toNumber(!0))),null!=e.negativeIntValue&&(i.Long?(t.negativeIntValue=i.Long.fromValue(e.negativeIntValue)).unsigned=!1:"string"==typeof e.negativeIntValue?t.negativeIntValue=parseInt(e.negativeIntValue,10):"number"==typeof e.negativeIntValue?t.negativeIntValue=e.negativeIntValue:"object"==typeof e.negativeIntValue&&(t.negativeIntValue=new i.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber())),null!=e.doubleValue&&(t.doubleValue=Number(e.doubleValue)),null!=e.stringValue&&("string"==typeof e.stringValue?i.base64.decode(e.stringValue,t.stringValue=i.newBuffer(i.base64.length(e.stringValue)),0):0<=e.stringValue.length&&(t.stringValue=e.stringValue)),null!=e.aggregateValue&&(t.aggregateValue=String(e.aggregateValue)),t},x.toObject=function(e,t){var o,r={};if(((t=t||{}).arrays||t.defaults)&&(r.name=[]),t.defaults&&(r.identifierValue="",i.Long?(o=new i.Long(0,0,!0),r.positiveIntValue=t.longs===String?o.toString():t.longs===Number?o.toNumber():o):r.positiveIntValue=t.longs===String?"0":0,i.Long?(o=new i.Long(0,0,!1),r.negativeIntValue=t.longs===String?o.toString():t.longs===Number?o.toNumber():o):r.negativeIntValue=t.longs===String?"0":0,r.doubleValue=0,t.bytes===String?r.stringValue="":(r.stringValue=[],t.bytes!==Array&&(r.stringValue=i.newBuffer(r.stringValue))),r.aggregateValue=""),e.name&&e.name.length){r.name=[];for(var n=0;n>>0,e.positiveIntValue.high>>>0).toNumber(!0):e.positiveIntValue),null!=e.negativeIntValue&&e.hasOwnProperty("negativeIntValue")&&("number"==typeof e.negativeIntValue?r.negativeIntValue=t.longs===String?String(e.negativeIntValue):e.negativeIntValue:r.negativeIntValue=t.longs===String?i.Long.prototype.toString.call(e.negativeIntValue):t.longs===Number?new i.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber():e.negativeIntValue),null!=e.doubleValue&&e.hasOwnProperty("doubleValue")&&(r.doubleValue=t.json&&!isFinite(e.doubleValue)?String(e.doubleValue):e.doubleValue),null!=e.stringValue&&e.hasOwnProperty("stringValue")&&(r.stringValue=t.bytes===String?i.base64.encode(e.stringValue,0,e.stringValue.length):t.bytes===Array?Array.prototype.slice.call(e.stringValue):e.stringValue),null!=e.aggregateValue&&e.hasOwnProperty("aggregateValue")&&(r.aggregateValue=e.aggregateValue),r},x.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},x.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.UninterpretedOption"},x.NamePart=(Tt.prototype.namePart="",Tt.prototype.isExtension=!1,Tt.fromObject=function(e){var t;return e instanceof a.google.protobuf.UninterpretedOption.NamePart?e:(t=new a.google.protobuf.UninterpretedOption.NamePart,null!=e.namePart&&(t.namePart=String(e.namePart)),null!=e.isExtension&&(t.isExtension=Boolean(e.isExtension)),t)},Tt.toObject=function(e,t){var o={};return(t=t||{}).defaults&&(o.namePart="",o.isExtension=!1),null!=e.namePart&&e.hasOwnProperty("namePart")&&(o.namePart=e.namePart),null!=e.isExtension&&e.hasOwnProperty("isExtension")&&(o.isExtension=e.isExtension),o},Tt.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},Tt.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.UninterpretedOption.NamePart"},Tt),x),t.FeatureSet=(F.prototype.fieldPresence=0,F.prototype.enumType=0,F.prototype.repeatedFieldEncoding=0,F.prototype.utf8Validation=0,F.prototype.messageEncoding=0,F.prototype.jsonFormat=0,F.fromObject=function(e){if(e instanceof a.google.protobuf.FeatureSet)return e;var t=new a.google.protobuf.FeatureSet;switch(e.fieldPresence){default:"number"==typeof e.fieldPresence&&(t.fieldPresence=e.fieldPresence);break;case"FIELD_PRESENCE_UNKNOWN":case 0:t.fieldPresence=0;break;case"EXPLICIT":case 1:t.fieldPresence=1;break;case"IMPLICIT":case 2:t.fieldPresence=2;break;case"LEGACY_REQUIRED":case 3:t.fieldPresence=3}switch(e.enumType){default:"number"==typeof e.enumType&&(t.enumType=e.enumType);break;case"ENUM_TYPE_UNKNOWN":case 0:t.enumType=0;break;case"OPEN":case 1:t.enumType=1;break;case"CLOSED":case 2:t.enumType=2}switch(e.repeatedFieldEncoding){default:"number"==typeof e.repeatedFieldEncoding&&(t.repeatedFieldEncoding=e.repeatedFieldEncoding);break;case"REPEATED_FIELD_ENCODING_UNKNOWN":case 0:t.repeatedFieldEncoding=0;break;case"PACKED":case 1:t.repeatedFieldEncoding=1;break;case"EXPANDED":case 2:t.repeatedFieldEncoding=2}switch(e.utf8Validation){default:"number"==typeof e.utf8Validation&&(t.utf8Validation=e.utf8Validation);break;case"UTF8_VALIDATION_UNKNOWN":case 0:t.utf8Validation=0;break;case"VERIFY":case 2:t.utf8Validation=2;break;case"NONE":case 3:t.utf8Validation=3}switch(e.messageEncoding){default:"number"==typeof e.messageEncoding&&(t.messageEncoding=e.messageEncoding);break;case"MESSAGE_ENCODING_UNKNOWN":case 0:t.messageEncoding=0;break;case"LENGTH_PREFIXED":case 1:t.messageEncoding=1;break;case"DELIMITED":case 2:t.messageEncoding=2}switch(e.jsonFormat){default:"number"==typeof e.jsonFormat&&(t.jsonFormat=e.jsonFormat);break;case"JSON_FORMAT_UNKNOWN":case 0:t.jsonFormat=0;break;case"ALLOW":case 1:t.jsonFormat=1;break;case"LEGACY_BEST_EFFORT":case 2:t.jsonFormat=2}return t},F.toObject=function(e,t){var o={};return(t=t||{}).defaults&&(o.fieldPresence=t.enums===String?"FIELD_PRESENCE_UNKNOWN":0,o.enumType=t.enums===String?"ENUM_TYPE_UNKNOWN":0,o.repeatedFieldEncoding=t.enums===String?"REPEATED_FIELD_ENCODING_UNKNOWN":0,o.utf8Validation=t.enums===String?"UTF8_VALIDATION_UNKNOWN":0,o.messageEncoding=t.enums===String?"MESSAGE_ENCODING_UNKNOWN":0,o.jsonFormat=t.enums===String?"JSON_FORMAT_UNKNOWN":0),null!=e.fieldPresence&&e.hasOwnProperty("fieldPresence")&&(o.fieldPresence=t.enums!==String||void 0===a.google.protobuf.FeatureSet.FieldPresence[e.fieldPresence]?e.fieldPresence:a.google.protobuf.FeatureSet.FieldPresence[e.fieldPresence]),null!=e.enumType&&e.hasOwnProperty("enumType")&&(o.enumType=t.enums!==String||void 0===a.google.protobuf.FeatureSet.EnumType[e.enumType]?e.enumType:a.google.protobuf.FeatureSet.EnumType[e.enumType]),null!=e.repeatedFieldEncoding&&e.hasOwnProperty("repeatedFieldEncoding")&&(o.repeatedFieldEncoding=t.enums!==String||void 0===a.google.protobuf.FeatureSet.RepeatedFieldEncoding[e.repeatedFieldEncoding]?e.repeatedFieldEncoding:a.google.protobuf.FeatureSet.RepeatedFieldEncoding[e.repeatedFieldEncoding]),null!=e.utf8Validation&&e.hasOwnProperty("utf8Validation")&&(o.utf8Validation=t.enums!==String||void 0===a.google.protobuf.FeatureSet.Utf8Validation[e.utf8Validation]?e.utf8Validation:a.google.protobuf.FeatureSet.Utf8Validation[e.utf8Validation]),null!=e.messageEncoding&&e.hasOwnProperty("messageEncoding")&&(o.messageEncoding=t.enums!==String||void 0===a.google.protobuf.FeatureSet.MessageEncoding[e.messageEncoding]?e.messageEncoding:a.google.protobuf.FeatureSet.MessageEncoding[e.messageEncoding]),null!=e.jsonFormat&&e.hasOwnProperty("jsonFormat")&&(o.jsonFormat=t.enums!==String||void 0===a.google.protobuf.FeatureSet.JsonFormat[e.jsonFormat]?e.jsonFormat:a.google.protobuf.FeatureSet.JsonFormat[e.jsonFormat]),o},F.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},F.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.FeatureSet"},F.FieldPresence=(o={},(n=Object.create(o))[o[0]="FIELD_PRESENCE_UNKNOWN"]="FIELD_PRESENCE_UNKNOWN",n[o[1]="EXPLICIT"]="EXPLICIT",n[o[2]="IMPLICIT"]="IMPLICIT",n[o[3]="LEGACY_REQUIRED"]="LEGACY_REQUIRED",n),F.EnumType=(o={},(n=Object.create(o))[o[0]="ENUM_TYPE_UNKNOWN"]="ENUM_TYPE_UNKNOWN",n[o[1]="OPEN"]="OPEN",n[o[2]="CLOSED"]="CLOSED",n),F.RepeatedFieldEncoding=(o={},(n=Object.create(o))[o[0]="REPEATED_FIELD_ENCODING_UNKNOWN"]="REPEATED_FIELD_ENCODING_UNKNOWN",n[o[1]="PACKED"]="PACKED",n[o[2]="EXPANDED"]="EXPANDED",n),F.Utf8Validation=(o={},(n=Object.create(o))[o[0]="UTF8_VALIDATION_UNKNOWN"]="UTF8_VALIDATION_UNKNOWN",n[o[2]="VERIFY"]="VERIFY",n[o[3]="NONE"]="NONE",n),F.MessageEncoding=(o={},(n=Object.create(o))[o[0]="MESSAGE_ENCODING_UNKNOWN"]="MESSAGE_ENCODING_UNKNOWN",n[o[1]="LENGTH_PREFIXED"]="LENGTH_PREFIXED",n[o[2]="DELIMITED"]="DELIMITED",n),F.JsonFormat=(o={},(n=Object.create(o))[o[0]="JSON_FORMAT_UNKNOWN"]="JSON_FORMAT_UNKNOWN",n[o[1]="ALLOW"]="ALLOW",n[o[2]="LEGACY_BEST_EFFORT"]="LEGACY_BEST_EFFORT",n),F),t.FeatureSetDefaults=(jt.prototype.defaults=i.emptyArray,jt.prototype.minimumEdition=0,jt.prototype.maximumEdition=0,jt.fromObject=function(e){if(e instanceof a.google.protobuf.FeatureSetDefaults)return e;var t=new a.google.protobuf.FeatureSetDefaults;if(e.defaults){if(!Array.isArray(e.defaults))throw TypeError(".google.protobuf.FeatureSetDefaults.defaults: array expected");t.defaults=[];for(var o=0;o>>0,e.seconds.high>>>0).toNumber())),null!=e.nanos&&(t.nanos=0|e.nanos),t)},It.toObject=function(e,t){var o,r={};return(t=t||{}).defaults&&(i.Long?(o=new i.Long(0,0,!1),r.seconds=t.longs===String?o.toString():t.longs===Number?o.toNumber():o):r.seconds=t.longs===String?"0":0,r.nanos=0),null!=e.seconds&&e.hasOwnProperty("seconds")&&("number"==typeof e.seconds?r.seconds=t.longs===String?String(e.seconds):e.seconds:r.seconds=t.longs===String?i.Long.prototype.toString.call(e.seconds):t.longs===Number?new i.LongBits(e.seconds.low>>>0,e.seconds.high>>>0).toNumber():e.seconds),null!=e.nanos&&e.hasOwnProperty("nanos")&&(r.nanos=e.nanos),r},It.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},It.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.Timestamp"},It),t.Duration=(At.prototype.seconds=i.Long?i.Long.fromBits(0,0,!1):0,At.prototype.nanos=0,At.fromObject=function(e){var t;return e instanceof a.google.protobuf.Duration?e:(t=new a.google.protobuf.Duration,null!=e.seconds&&(i.Long?(t.seconds=i.Long.fromValue(e.seconds)).unsigned=!1:"string"==typeof e.seconds?t.seconds=parseInt(e.seconds,10):"number"==typeof e.seconds?t.seconds=e.seconds:"object"==typeof e.seconds&&(t.seconds=new i.LongBits(e.seconds.low>>>0,e.seconds.high>>>0).toNumber())),null!=e.nanos&&(t.nanos=0|e.nanos),t)},At.toObject=function(e,t){var o,r={};return(t=t||{}).defaults&&(i.Long?(o=new i.Long(0,0,!1),r.seconds=t.longs===String?o.toString():t.longs===Number?o.toNumber():o):r.seconds=t.longs===String?"0":0,r.nanos=0),null!=e.seconds&&e.hasOwnProperty("seconds")&&("number"==typeof e.seconds?r.seconds=t.longs===String?String(e.seconds):e.seconds:r.seconds=t.longs===String?i.Long.prototype.toString.call(e.seconds):t.longs===Number?new i.LongBits(e.seconds.low>>>0,e.seconds.high>>>0).toNumber():e.seconds),null!=e.nanos&&e.hasOwnProperty("nanos")&&(r.nanos=e.nanos),r},At.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},At.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.Duration"},At),t.DoubleValue=(kt.prototype.value=0,kt.fromObject=function(e){var t;return e instanceof a.google.protobuf.DoubleValue?e:(t=new a.google.protobuf.DoubleValue,null!=e.value&&(t.value=Number(e.value)),t)},kt.toObject=function(e,t){var o={};return(t=t||{}).defaults&&(o.value=0),null!=e.value&&e.hasOwnProperty("value")&&(o.value=t.json&&!isFinite(e.value)?String(e.value):e.value),o},kt.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},kt.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.DoubleValue"},kt),t.FloatValue=(xt.prototype.value=0,xt.fromObject=function(e){var t;return e instanceof a.google.protobuf.FloatValue?e:(t=new a.google.protobuf.FloatValue,null!=e.value&&(t.value=Number(e.value)),t)},xt.toObject=function(e,t){var o={};return(t=t||{}).defaults&&(o.value=0),null!=e.value&&e.hasOwnProperty("value")&&(o.value=t.json&&!isFinite(e.value)?String(e.value):e.value),o},xt.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},xt.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.FloatValue"},xt),t.Int64Value=(Ft.prototype.value=i.Long?i.Long.fromBits(0,0,!1):0,Ft.fromObject=function(e){var t;return e instanceof a.google.protobuf.Int64Value?e:(t=new a.google.protobuf.Int64Value,null!=e.value&&(i.Long?(t.value=i.Long.fromValue(e.value)).unsigned=!1:"string"==typeof e.value?t.value=parseInt(e.value,10):"number"==typeof e.value?t.value=e.value:"object"==typeof e.value&&(t.value=new i.LongBits(e.value.low>>>0,e.value.high>>>0).toNumber())),t)},Ft.toObject=function(e,t){var o,r={};return(t=t||{}).defaults&&(i.Long?(o=new i.Long(0,0,!1),r.value=t.longs===String?o.toString():t.longs===Number?o.toNumber():o):r.value=t.longs===String?"0":0),null!=e.value&&e.hasOwnProperty("value")&&("number"==typeof e.value?r.value=t.longs===String?String(e.value):e.value:r.value=t.longs===String?i.Long.prototype.toString.call(e.value):t.longs===Number?new i.LongBits(e.value.low>>>0,e.value.high>>>0).toNumber():e.value),r},Ft.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},Ft.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.Int64Value"},Ft),t.UInt64Value=(_t.prototype.value=i.Long?i.Long.fromBits(0,0,!0):0,_t.fromObject=function(e){var t;return e instanceof a.google.protobuf.UInt64Value?e:(t=new a.google.protobuf.UInt64Value,null!=e.value&&(i.Long?(t.value=i.Long.fromValue(e.value)).unsigned=!0:"string"==typeof e.value?t.value=parseInt(e.value,10):"number"==typeof e.value?t.value=e.value:"object"==typeof e.value&&(t.value=new i.LongBits(e.value.low>>>0,e.value.high>>>0).toNumber(!0))),t)},_t.toObject=function(e,t){var o,r={};return(t=t||{}).defaults&&(i.Long?(o=new i.Long(0,0,!0),r.value=t.longs===String?o.toString():t.longs===Number?o.toNumber():o):r.value=t.longs===String?"0":0),null!=e.value&&e.hasOwnProperty("value")&&("number"==typeof e.value?r.value=t.longs===String?String(e.value):e.value:r.value=t.longs===String?i.Long.prototype.toString.call(e.value):t.longs===Number?new i.LongBits(e.value.low>>>0,e.value.high>>>0).toNumber(!0):e.value),r},_t.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},_t.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.UInt64Value"},_t),t.Int32Value=(Ct.prototype.value=0,Ct.fromObject=function(e){var t;return e instanceof a.google.protobuf.Int32Value?e:(t=new a.google.protobuf.Int32Value,null!=e.value&&(t.value=0|e.value),t)},Ct.toObject=function(e,t){var o={};return(t=t||{}).defaults&&(o.value=0),null!=e.value&&e.hasOwnProperty("value")&&(o.value=e.value),o},Ct.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},Ct.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.Int32Value"},Ct),t.UInt32Value=(Vt.prototype.value=0,Vt.fromObject=function(e){var t;return e instanceof a.google.protobuf.UInt32Value?e:(t=new a.google.protobuf.UInt32Value,null!=e.value&&(t.value=e.value>>>0),t)},Vt.toObject=function(e,t){var o={};return(t=t||{}).defaults&&(o.value=0),null!=e.value&&e.hasOwnProperty("value")&&(o.value=e.value),o},Vt.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},Vt.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.UInt32Value"},Vt),t.BoolValue=(Lt.prototype.value=!1,Lt.fromObject=function(e){var t;return e instanceof a.google.protobuf.BoolValue?e:(t=new a.google.protobuf.BoolValue,null!=e.value&&(t.value=Boolean(e.value)),t)},Lt.toObject=function(e,t){var o={};return(t=t||{}).defaults&&(o.value=!1),null!=e.value&&e.hasOwnProperty("value")&&(o.value=e.value),o},Lt.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},Lt.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.BoolValue"},Lt),t.StringValue=(Ut.prototype.value="",Ut.fromObject=function(e){var t;return e instanceof a.google.protobuf.StringValue?e:(t=new a.google.protobuf.StringValue,null!=e.value&&(t.value=String(e.value)),t)},Ut.toObject=function(e,t){var o={};return(t=t||{}).defaults&&(o.value=""),null!=e.value&&e.hasOwnProperty("value")&&(o.value=e.value),o},Ut.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},Ut.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.StringValue"},Ut),t.BytesValue=(Bt.prototype.value=i.newBuffer([]),Bt.fromObject=function(e){var t;return e instanceof a.google.protobuf.BytesValue?e:(t=new a.google.protobuf.BytesValue,null!=e.value&&("string"==typeof e.value?i.base64.decode(e.value,t.value=i.newBuffer(i.base64.length(e.value)),0):0<=e.value.length&&(t.value=e.value)),t)},Bt.toObject=function(e,t){var o={};return(t=t||{}).defaults&&(t.bytes===String?o.value="":(o.value=[],t.bytes!==Array&&(o.value=i.newBuffer(o.value)))),null!=e.value&&e.hasOwnProperty("value")&&(o.value=t.bytes===String?i.base64.encode(e.value,0,e.value.length):t.bytes===Array?Array.prototype.slice.call(e.value):e.value),o},Bt.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},Bt.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.BytesValue"},Bt),t.Empty=(Jt.fromObject=function(e){return e instanceof a.google.protobuf.Empty?e:new a.google.protobuf.Empty},Jt.toObject=function(){return{}},Jt.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},Jt.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.Empty"},Jt),t.Any=(Qt.prototype.type_url="",Qt.prototype.value=i.newBuffer([]),Qt.fromObject=function(e){var t;return e instanceof a.google.protobuf.Any?e:(t=new a.google.protobuf.Any,null!=e.type_url&&(t.type_url=String(e.type_url)),null!=e.value&&("string"==typeof e.value?i.base64.decode(e.value,t.value=i.newBuffer(i.base64.length(e.value)),0):0<=e.value.length&&(t.value=e.value)),t)},Qt.toObject=function(e,t){var o={};return(t=t||{}).defaults&&(o.type_url="",t.bytes===String?o.value="":(o.value=[],t.bytes!==Array&&(o.value=i.newBuffer(o.value)))),null!=e.type_url&&e.hasOwnProperty("type_url")&&(o.type_url=e.type_url),null!=e.value&&e.hasOwnProperty("value")&&(o.value=t.bytes===String?i.base64.encode(e.value,0,e.value.length):t.bytes===Array?Array.prototype.slice.call(e.value):e.value),o},Qt.prototype.toJSON=function(){return this.constructor.toObject(this,r.util.toJSONOptions)},Qt.getTypeUrl=function(e){return(e=void 0===e?"type.googleapis.com":e)+"/google.protobuf.Any"},Qt),t.FieldMask=(Mt.prototype.paths=i.emptyArray,Mt.fromObject=function(e){if(e instanceof a.google.protobuf.FieldMask)return e;var t=new a.google.protobuf.FieldMask;if(e.paths){if(!Array.isArray(e.paths))throw TypeError(".google.protobuf.FieldMask.paths: array expected");t.paths=[];for(var o=0;o { -var api = __nccwpck_require__(65163); +"use strict"; -// Copyright (c) Microsoft Corporation. -(function (SpanKind) { - /** Default value. Indicates that the span is used internally. */ - SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; +/** + * Copyright 2023 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AggregateField = exports.Aggregate = void 0; +const path_1 = __nccwpck_require__(34908); +const assert = __nccwpck_require__(39491); +/** + * Concrete implementation of the Aggregate type. + */ +class Aggregate { + constructor(alias, aggregateType, fieldPath) { + this.alias = alias; + this.aggregateType = aggregateType; + this.fieldPath = fieldPath; + } /** - * Indicates that the span covers server-side handling of an RPC or other - * remote request. + * Converts this object to the proto representation of an Aggregate. + * @internal */ - SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + toProto() { + const proto = {}; + if (this.aggregateType === 'count') { + proto.count = {}; + } + else if (this.aggregateType === 'sum') { + assert(this.fieldPath !== undefined, 'Missing field path for sum aggregation.'); + proto.sum = { + field: { + fieldPath: path_1.FieldPath.fromArgument(this.fieldPath).formattedName, + }, + }; + } + else if (this.aggregateType === 'avg') { + assert(this.fieldPath !== undefined, 'Missing field path for average aggregation.'); + proto.avg = { + field: { + fieldPath: path_1.FieldPath.fromArgument(this.fieldPath).formattedName, + }, + }; + } + else { + throw new Error(`Aggregate type ${this.aggregateType} unimplemented.`); + } + proto.alias = this.alias; + return proto; + } +} +exports.Aggregate = Aggregate; +/** + * Represents an aggregation that can be performed by Firestore. + */ +class AggregateField { /** - * Indicates that the span covers the client-side wrapper around an RPC or - * other remote request. + * Create a new AggregateField + * @param aggregateType Specifies the type of aggregation operation to perform. + * @param field Optionally specifies the field that is aggregated. + * @internal */ - SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + constructor(aggregateType, field) { + this.aggregateType = aggregateType; + /** A type string to uniquely identify instances of this class. */ + this.type = 'AggregateField'; + this._field = field; + } /** - * Indicates that the span describes producer sending a message to a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. + * Compares this object with the given object for equality. + * + * This object is considered "equal" to the other object if and only if + * `other` performs the same kind of aggregation on the same field (if any). + * + * @param other The object to compare to this object for equality. + * @return `true` if this object is "equal" to the given object, as + * defined above, or `false` otherwise. */ - SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + isEqual(other) { + return (other instanceof AggregateField && + this.aggregateType === other.aggregateType && + ((this._field === undefined && other._field === undefined) || + (this._field !== undefined && + other._field !== undefined && + path_1.FieldPath.fromArgument(this._field).isEqual(path_1.FieldPath.fromArgument(other._field))))); + } /** - * Indicates that the span describes consumer receiving a message from a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. + * Create an AggregateField object that can be used to compute the count of + * documents in the result set of a query. */ - SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; -})(exports.SpanKind || (exports.SpanKind = {})); -/** - * Return the span if one exists + static count() { + return new AggregateField('count'); + } + /** + * Create an AggregateField object that can be used to compute the average of + * a specified field over a range of documents in the result set of a query. + * @param field Specifies the field to average across the result set. + */ + static average(field) { + return new AggregateField('avg', field); + } + /** + * Create an AggregateField object that can be used to compute the sum of + * a specified field over a range of documents in the result set of a query. + * @param field Specifies the field to sum across the result set. + */ + static sum(field) { + return new AggregateField('sum', field); + } +} +exports.AggregateField = AggregateField; +//# sourceMappingURL=aggregate.js.map + +/***/ }), + +/***/ 63544: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2017 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at * - * @param context - context to get span from + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ -function getSpan(context) { - return api.trace.getSpan(context); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExponentialBackoff = exports.delayExecution = exports.MAX_RETRY_ATTEMPTS = exports.DEFAULT_BACKOFF_FACTOR = exports.DEFAULT_BACKOFF_MAX_DELAY_MS = exports.DEFAULT_BACKOFF_INITIAL_DELAY_MS = void 0; +exports.setTimeoutHandler = setTimeoutHandler; +const logger_1 = __nccwpck_require__(42718); +/* + * @module firestore/backoff + * @private + * @internal + * + * Contains backoff logic to facilitate RPC error handling. This class derives + * its implementation from the Firestore Mobile Web Client. + * + * @see https://github.com/firebase/firebase-js-sdk/blob/master/packages/firestore/src/remote/backoff.ts + */ +/*! + * The default initial backoff time in milliseconds after an error. + * Set to 1s according to https://cloud.google.com/apis/design/errors. + */ +exports.DEFAULT_BACKOFF_INITIAL_DELAY_MS = 1000; +/*! + * The default maximum backoff time in milliseconds. + */ +exports.DEFAULT_BACKOFF_MAX_DELAY_MS = 60 * 1000; +/*! + * The default factor to increase the backup by after each failed attempt. + */ +exports.DEFAULT_BACKOFF_FACTOR = 1.5; +/*! + * The default jitter to distribute the backoff attempts by (0 means no + * randomization, 1.0 means +/-50% randomization). + */ +const DEFAULT_JITTER_FACTOR = 1.0; +/*! + * The maximum number of retries that will be attempted by backoff + * before stopping all retry attempts. + */ +exports.MAX_RETRY_ATTEMPTS = 10; +/*! + * The timeout handler used by `ExponentialBackoff` and `BulkWriter`. + */ +exports.delayExecution = setTimeout; +/** + * Allows overriding of the timeout handler used by the exponential backoff + * implementation. If not invoked, we default to `setTimeout()`. + * + * Used only in testing. + * + * @private + * @internal + * @param {function} handler A handler than matches the API of `setTimeout()`. + */ +function setTimeoutHandler(handler) { + exports.delayExecution = (f, ms) => { + handler(f, ms); + const timeout = { + hasRef: () => { + throw new Error('For tests only. Not Implemented'); + }, + ref: () => { + throw new Error('For tests only. Not Implemented'); + }, + refresh: () => { + throw new Error('For tests only. Not Implemented'); + }, + unref: () => { + throw new Error('For tests only. Not Implemented'); + }, + [Symbol.toPrimitive]: () => { + throw new Error('For tests only. Not Implemented'); + }, + }; + // `NodeJS.Timeout` type signature change: + // https://github.com/DefinitelyTyped/DefinitelyTyped/pull/66176/files#diff-e838d0ace9cd5f6516bacfbd3ad00d02cd37bd60f9993ce6223f52d889a1fdbaR122-R126 + // + // Adding `[Symbol.dispose](): void;` cannot be done on older versions of + // NodeJS. So we simply cast to `NodeJS.Timeout`. + return timeout; + }; } /** - * Set the span on a context + * A helper for running delayed tasks following an exponential backoff curve + * between attempts. * - * @param context - context to use as parent - * @param span - span to set active + * Each delay is made up of a "base" delay which follows the exponential + * backoff curve, and a "jitter" (+/- 50% by default) that is calculated and + * added to the base delay. This prevents clients from accidentally + * synchronizing their delays causing spikes of load to the backend. + * + * @private + * @internal */ -function setSpan(context, span) { - return api.trace.setSpan(context, span); +class ExponentialBackoff { + constructor(options = {}) { + /** + * The number of retries that has been attempted. + * + * @private + * @internal + */ + this._retryCount = 0; + /** + * The backoff delay of the current attempt. + * + * @private + * @internal + */ + this.currentBaseMs = 0; + /** + * Whether we are currently waiting for backoff to complete. + * + * @private + * @internal + */ + this.awaitingBackoffCompletion = false; + this.initialDelayMs = + options.initialDelayMs !== undefined + ? options.initialDelayMs + : exports.DEFAULT_BACKOFF_INITIAL_DELAY_MS; + this.backoffFactor = + options.backoffFactor !== undefined + ? options.backoffFactor + : exports.DEFAULT_BACKOFF_FACTOR; + this.maxDelayMs = + options.maxDelayMs !== undefined + ? options.maxDelayMs + : exports.DEFAULT_BACKOFF_MAX_DELAY_MS; + this.jitterFactor = + options.jitterFactor !== undefined + ? options.jitterFactor + : DEFAULT_JITTER_FACTOR; + } + /** + * Resets the backoff delay and retry count. + * + * The very next backoffAndWait() will have no delay. If it is called again + * (i.e. due to an error), initialDelayMs (plus jitter) will be used, and + * subsequent ones will increase according to the backoffFactor. + * + * @private + * @internal + */ + reset() { + this._retryCount = 0; + this.currentBaseMs = 0; + } + /** + * Resets the backoff delay to the maximum delay (e.g. for use after a + * RESOURCE_EXHAUSTED error). + * + * @private + * @internal + */ + resetToMax() { + this.currentBaseMs = this.maxDelayMs; + } + /** + * Returns a promise that resolves after currentDelayMs, and increases the + * delay for any subsequent attempts. + * + * @return A Promise that resolves when the current delay elapsed. + * @private + * @internal + */ + backoffAndWait() { + if (this.awaitingBackoffCompletion) { + return Promise.reject(new Error('A backoff operation is already in progress.')); + } + if (this.retryCount > exports.MAX_RETRY_ATTEMPTS) { + return Promise.reject(new Error('Exceeded maximum number of retries allowed.')); + } + // First schedule using the current base (which may be 0 and should be + // honored as such). + const delayWithJitterMs = this.currentBaseMs + this.jitterDelayMs(); + if (this.currentBaseMs > 0) { + (0, logger_1.logger)('ExponentialBackoff.backoffAndWait', null, `Backing off for ${delayWithJitterMs} ms ` + + `(base delay: ${this.currentBaseMs} ms)`); + } + // Apply backoff factor to determine next delay and ensure it is within + // bounds. + this.currentBaseMs *= this.backoffFactor; + this.currentBaseMs = Math.max(this.currentBaseMs, this.initialDelayMs); + this.currentBaseMs = Math.min(this.currentBaseMs, this.maxDelayMs); + this._retryCount += 1; + return new Promise(resolve => { + this.awaitingBackoffCompletion = true; + (0, exports.delayExecution)(() => { + this.awaitingBackoffCompletion = false; + resolve(); + }, delayWithJitterMs); + }); + } + // Visible for testing. + get retryCount() { + return this._retryCount; + } + /** + * Returns a randomized "jitter" delay based on the current base and jitter + * factor. + * + * @returns {number} The jitter to apply based on the current delay. + * @private + * @internal + */ + jitterDelayMs() { + return (Math.random() - 0.5) * this.jitterFactor * this.currentBaseMs; + } } -/** - * Wrap span context in a NoopSpan and set as span in a new - * context +exports.ExponentialBackoff = ExponentialBackoff; +//# sourceMappingURL=backoff.js.map + +/***/ }), + +/***/ 58427: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BulkWriter = exports.BulkWriterError = exports.DEFAULT_JITTER_FACTOR = exports.DEFAULT_MAXIMUM_OPS_PER_SECOND_LIMIT = exports.DEFAULT_INITIAL_OPS_PER_SECOND_LIMIT = exports.RETRY_MAX_BATCH_SIZE = void 0; +const assert = __nccwpck_require__(39491); +const backoff_1 = __nccwpck_require__(63544); +const rate_limiter_1 = __nccwpck_require__(57934); +const timestamp_1 = __nccwpck_require__(29061); +const util_1 = __nccwpck_require__(15468); +const write_batch_1 = __nccwpck_require__(76012); +const validate_1 = __nccwpck_require__(33822); +const logger_1 = __nccwpck_require__(42718); +const trace_util_1 = __nccwpck_require__(2693); +/*! + * The maximum number of writes that can be in a single batch. + */ +const MAX_BATCH_SIZE = 20; +/*! + * The maximum number of writes can be can in a single batch that is being retried. + */ +exports.RETRY_MAX_BATCH_SIZE = 10; +/*! + * The starting maximum number of operations per second as allowed by the + * 500/50/5 rule. * - * @param context - context to set active span on - * @param spanContext - span context to be wrapped + * https://firebase.google.com/docs/firestore/best-practices#ramping_up_traffic. */ -function setSpanContext(context, spanContext) { - return api.trace.setSpanContext(context, spanContext); +exports.DEFAULT_INITIAL_OPS_PER_SECOND_LIMIT = 500; +/*! + * The maximum number of operations per second as allowed by the 500/50/5 rule. + * By default the rate limiter will not exceed this value. + * + * https://firebase.google.com/docs/firestore/best-practices#ramping_up_traffic. + */ +exports.DEFAULT_MAXIMUM_OPS_PER_SECOND_LIMIT = 10000; +/*! + * The default jitter to apply to the exponential backoff used in retries. For + * example, a factor of 0.3 means a 30% jitter is applied. + */ +exports.DEFAULT_JITTER_FACTOR = 0.3; +/*! + * The rate by which to increase the capacity as specified by the 500/50/5 rule. + */ +const RATE_LIMITER_MULTIPLIER = 1.5; +/*! + * How often the operations per second capacity should increase in milliseconds + * as specified by the 500/50/5 rule. + */ +const RATE_LIMITER_MULTIPLIER_MILLIS = 5 * 60 * 1000; +/*! + * The default maximum number of pending operations that can be enqueued onto a + * BulkWriter instance. An operation is considered pending if BulkWriter has + * sent it via RPC and is awaiting the result. BulkWriter buffers additional + * writes after this many pending operations in order to avoiding going OOM. + */ +const DEFAULT_MAXIMUM_PENDING_OPERATIONS_COUNT = 500; +/** + * Represents a single write for BulkWriter, encapsulating operation dispatch + * and error handling. + * @private + * @internal + */ +class BulkWriterOperation { + /** + * @param ref The document reference being written to. + * @param type The type of operation that created this write. + * @param sendFn A callback to invoke when the operation should be sent. + * @param errorFn The user provided global error callback. + * @param successFn The user provided global success callback. + */ + constructor(ref, type, sendFn, errorFn, successFn) { + this.ref = ref; + this.type = type; + this.sendFn = sendFn; + this.errorFn = errorFn; + this.successFn = successFn; + this.deferred = new util_1.Deferred(); + this.failedAttempts = 0; + this._backoffDuration = 0; + /** Whether flush() was called when this was the last enqueued operation. */ + this._flushed = false; + } + get promise() { + return this.deferred.promise; + } + get backoffDuration() { + return this._backoffDuration; + } + markFlushed() { + this._flushed = true; + } + get flushed() { + return this._flushed; + } + onError(error) { + ++this.failedAttempts; + try { + const bulkWriterError = new BulkWriterError(error.code, error.message, this.ref, this.type, this.failedAttempts); + const shouldRetry = this.errorFn(bulkWriterError); + (0, logger_1.logger)('BulkWriter.errorFn', null, 'Ran error callback on error code:', error.code, ', shouldRetry:', shouldRetry, ' for document:', this.ref.path); + if (shouldRetry) { + this.lastStatus = error.code; + this.updateBackoffDuration(); + this.sendFn(this); + } + else { + this.deferred.reject(bulkWriterError); + } + } + catch (userCallbackError) { + this.deferred.reject(userCallbackError); + } + } + updateBackoffDuration() { + if (this.lastStatus === 8 /* StatusCode.RESOURCE_EXHAUSTED */) { + this._backoffDuration = backoff_1.DEFAULT_BACKOFF_MAX_DELAY_MS; + } + else if (this._backoffDuration === 0) { + this._backoffDuration = backoff_1.DEFAULT_BACKOFF_INITIAL_DELAY_MS; + } + else { + this._backoffDuration *= backoff_1.DEFAULT_BACKOFF_FACTOR; + } + } + onSuccess(result) { + try { + this.successFn(this.ref, result); + this.deferred.resolve(result); + } + catch (userCallbackError) { + this.deferred.reject(userCallbackError); + } + } } /** - * Get the span context of the span if it exists. + * Used to represent a batch on the BatchQueue. * - * @param context - context to get values from + * @private + * @internal */ -function getSpanContext(context) { - return api.trace.getSpanContext(context); +class BulkCommitBatch extends write_batch_1.WriteBatch { + constructor(firestore, maxBatchSize) { + super(firestore); + // The set of document reference paths present in the WriteBatch. + this.docPaths = new Set(); + // An array of pending write operations. Only contains writes that have not + // been resolved. + this.pendingOps = []; + this._maxBatchSize = maxBatchSize; + } + get maxBatchSize() { + return this._maxBatchSize; + } + setMaxBatchSize(size) { + assert(this.pendingOps.length <= size, 'New batch size cannot be less than the number of enqueued writes'); + this._maxBatchSize = size; + } + has(documentRef) { + return this.docPaths.has(documentRef.path); + } + async bulkCommit(options = {}) { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_BULK_WRITER_COMMIT, async () => { + var _a; + const tag = (_a = options === null || options === void 0 ? void 0 : options.requestTag) !== null && _a !== void 0 ? _a : (0, util_1.requestTag)(); + // Capture the error stack to preserve stack tracing across async calls. + const stack = Error().stack; + let response; + try { + (0, logger_1.logger)('BulkCommitBatch.bulkCommit', tag, `Sending next batch with ${this._opCount} writes`); + const retryCodes = (0, util_1.getRetryCodes)('batchWrite'); + response = await this._commit({ retryCodes, methodName: 'batchWrite', requestTag: tag }); + } + catch (err) { + // Map the failure to each individual write's result. + const ops = Array.from({ length: this.pendingOps.length }); + response = { + writeResults: ops.map(() => { + return {}; + }), + status: ops.map(() => err), + }; + } + for (let i = 0; i < (response.writeResults || []).length; ++i) { + // Since delete operations currently do not have write times, use a + // sentinel Timestamp value. + // TODO(b/158502664): Use actual delete timestamp. + const DELETE_TIMESTAMP_SENTINEL = timestamp_1.Timestamp.fromMillis(0); + const status = (response.status || [])[i]; + if (status.code === 0 /* StatusCode.OK */) { + const updateTime = timestamp_1.Timestamp.fromProto(response.writeResults[i].updateTime || DELETE_TIMESTAMP_SENTINEL); + this.pendingOps[i].onSuccess(new write_batch_1.WriteResult(updateTime)); + } + else { + const error = new ((__nccwpck_require__(90418).GoogleError))(status.message || undefined); + error.code = status.code; + this.pendingOps[i].onError((0, util_1.wrapError)(error, stack)); + } + } + }, { + [trace_util_1.ATTRIBUTE_KEY_DOC_COUNT]: this._opCount, + }); + } + /** + * Helper to update data structures associated with the operation and returns + * the result. + */ + processLastOperation(op) { + assert(!this.docPaths.has(op.ref.path), 'Batch should not contain writes to the same document'); + this.docPaths.add(op.ref.path); + this.pendingOps.push(op); + } } /** - * Returns true of the given {@link SpanContext} is valid. - * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * Used to represent a buffered BulkWriterOperation. * - * @param context - the {@link SpanContext} to validate. - * - * @returns true if the {@link SpanContext} is valid, false otherwise. + * @private + * @internal */ -function isSpanContextValid(context) { - return api.trace.isSpanContextValid(context); +class BufferedOperation { + constructor(operation, sendFn) { + this.operation = operation; + this.sendFn = sendFn; + } } -function getTracer(name, version) { - return api.trace.getTracer(name || "azure/core-tracing", version); +/** + * The error thrown when a BulkWriter operation fails. + * + * @class BulkWriterError + */ +class BulkWriterError extends Error { + /** + * @private + * @internal + */ + constructor( + /** The status code of the error. */ + code, + /** The error message of the error. */ + message, + /** The document reference the operation was performed on. */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + documentRef, + /** The type of operation performed. */ + operationType, + /** How many times this operation has been attempted unsuccessfully. */ + failedAttempts) { + super(message); + this.code = code; + this.message = message; + this.documentRef = documentRef; + this.operationType = operationType; + this.failedAttempts = failedAttempts; + } } -/** Entrypoint for context API */ -const context = api.context; -(function (SpanStatusCode) { +exports.BulkWriterError = BulkWriterError; +/** + * A Firestore BulkWriter that can be used to perform a large number of writes + * in parallel. + * + * @class BulkWriter + */ +class BulkWriter { + // Visible for testing. /** - * The default status. + * @private + * @internal */ - SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + _getBufferedOperationsCount() { + return this._bufferedOperations.length; + } + // Visible for testing. /** - * The operation has been validated by an Application developer or - * Operator to have completed successfully. + * @private + * @internal */ - SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + _setMaxBatchSize(size) { + assert(this._bulkCommitBatch.pendingOps.length === 0, 'BulkCommitBatch should be empty'); + this._maxBatchSize = size; + this._bulkCommitBatch = new BulkCommitBatch(this.firestore, size); + } + // Visible for testing. /** - * The operation contains an error. + * @private + * @internal */ - SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; -})(exports.SpanStatusCode || (exports.SpanStatusCode = {})); + _setMaxPendingOpCount(newMax) { + this._maxPendingOpCount = newMax; + } + /** @private */ + constructor(firestore, options) { + var _a, _b; + this.firestore = firestore; + /** + * The maximum number of writes that can be in a single batch. + * Visible for testing. + * @private + * @internal + */ + this._maxBatchSize = MAX_BATCH_SIZE; + /** + * The batch that is currently used to schedule operations. Once this batch + * reaches maximum capacity, a new batch is created. + * @private + * @internal + */ + this._bulkCommitBatch = new BulkCommitBatch(this.firestore, this._maxBatchSize); + /** + * A pointer to the tail of all active BulkWriter operations. This pointer + * is advanced every time a new write is enqueued. + * @private + * @internal + */ + this._lastOp = Promise.resolve(); + /** + * Whether this BulkWriter instance has started to close. Afterwards, no + * new operations can be enqueued, except for retry operations scheduled by + * the error handler. + * @private + * @internal + */ + this._closing = false; + /** + * The number of pending operations enqueued on this BulkWriter instance. + * An operation is considered pending if BulkWriter has sent it via RPC and + * is awaiting the result. + * @private + * @internal + */ + this._pendingOpsCount = 0; + /** + * An array containing buffered BulkWriter operations after the maximum number + * of pending operations has been enqueued. + * @private + * @internal + */ + this._bufferedOperations = []; + /** + * Whether a custom error handler has been set. BulkWriter only swallows + * errors if an error handler is set. Otherwise, an UnhandledPromiseRejection + * is thrown by Node if an operation promise is rejected without being + * handled. + * @private + * @internal + */ + this._errorHandlerSet = false; + /** + * The maximum number of pending operations that can be enqueued onto this + * BulkWriter instance. Once the this number of writes have been enqueued, + * subsequent writes are buffered. + * @private + * @internal + */ + this._maxPendingOpCount = DEFAULT_MAXIMUM_PENDING_OPERATIONS_COUNT; + /** + * The user-provided callback to be run every time a BulkWriter operation + * successfully completes. + * @private + * @internal + */ + this._successFn = () => { }; + /** + * The user-provided callback to be run every time a BulkWriter operation + * fails. + * @private + * @internal + */ + this._errorFn = error => { + const isRetryableDeleteError = error.operationType === 'delete' && + error.code === 13 /* StatusCode.INTERNAL */; + const retryCodes = (0, util_1.getRetryCodes)('batchWrite'); + return ((retryCodes.includes(error.code) || isRetryableDeleteError) && + error.failedAttempts < backoff_1.MAX_RETRY_ATTEMPTS); + }; + this.firestore._incrementBulkWritersCount(); + validateBulkWriterOptions(options); + if ((options === null || options === void 0 ? void 0 : options.throttling) === false) { + this._rateLimiter = new rate_limiter_1.RateLimiter(Number.POSITIVE_INFINITY, Number.POSITIVE_INFINITY, Number.POSITIVE_INFINITY, Number.POSITIVE_INFINITY); + } + else { + let startingRate = exports.DEFAULT_INITIAL_OPS_PER_SECOND_LIMIT; + let maxRate = exports.DEFAULT_MAXIMUM_OPS_PER_SECOND_LIMIT; + if (typeof (options === null || options === void 0 ? void 0 : options.throttling) !== 'boolean') { + if (((_a = options === null || options === void 0 ? void 0 : options.throttling) === null || _a === void 0 ? void 0 : _a.maxOpsPerSecond) !== undefined) { + maxRate = options.throttling.maxOpsPerSecond; + } + if (((_b = options === null || options === void 0 ? void 0 : options.throttling) === null || _b === void 0 ? void 0 : _b.initialOpsPerSecond) !== undefined) { + startingRate = options.throttling.initialOpsPerSecond; + } + // The initial validation step ensures that the maxOpsPerSecond is + // greater than initialOpsPerSecond. If this inequality is true, that + // means initialOpsPerSecond was not set and maxOpsPerSecond is less + // than the default starting rate. + if (maxRate < startingRate) { + startingRate = maxRate; + } + // Ensure that the batch size is not larger than the number of allowed + // operations per second. + if (startingRate < this._maxBatchSize) { + this._maxBatchSize = startingRate; + } + } + this._rateLimiter = new rate_limiter_1.RateLimiter(startingRate, RATE_LIMITER_MULTIPLIER, RATE_LIMITER_MULTIPLIER_MILLIS, maxRate); + } + } + /** + * Create a document with the provided data. This single operation will fail + * if a document exists at its location. + * + * @param {DocumentReference} documentRef A reference to the document to be + * created. + * @param {T} data The object to serialize as the document. + * @throws {Error} If the provided input is not a valid Firestore document. + * @returns {Promise} A promise that resolves with the result of + * the write. If the write fails, the promise is rejected with a + * [BulkWriterError]{@link BulkWriterError}. + * + * @example + * ``` + * let bulkWriter = firestore.bulkWriter(); + * let documentRef = firestore.collection('col').doc(); + * + * bulkWriter + * .create(documentRef, {foo: 'bar'}) + * .then(result => { + * console.log('Successfully executed write at: ', result); + * }) + * .catch(err => { + * console.log('Write failed with: ', err); + * }); + * }); + * ``` + */ + create(documentRef, data) { + this._verifyNotClosed(); + return this._enqueue(documentRef, 'create', bulkCommitBatch => bulkCommitBatch.create(documentRef, data)); + } + /** + * Delete a document from the database. + * + * @param {DocumentReference} documentRef A reference to the document to be + * deleted. + * @param {Precondition=} precondition A precondition to enforce for this + * delete. + * @param {Timestamp=} precondition.lastUpdateTime If set, enforces that the + * document was last updated at lastUpdateTime. Fails the batch if the + * document doesn't exist or was last updated at a different time. + * @returns {Promise} A promise that resolves with the result of + * the delete. If the delete fails, the promise is rejected with a + * [BulkWriterError]{@link BulkWriterError}. + * + * @example + * ``` + * let bulkWriter = firestore.bulkWriter(); + * let documentRef = firestore.doc('col/doc'); + * + * bulkWriter + * .delete(documentRef) + * .then(result => { + * console.log('Successfully deleted document'); + * }) + * .catch(err => { + * console.log('Delete failed with: ', err); + * }); + * }); + * ``` + */ + delete(documentRef, precondition) { + this._verifyNotClosed(); + return this._enqueue(documentRef, 'delete', bulkCommitBatch => bulkCommitBatch.delete(documentRef, precondition)); + } + /** + * Write to the document referred to by the provided + * [DocumentReference]{@link DocumentReference}. If the document does not + * exist yet, it will be created. If you pass [SetOptions]{@link SetOptions}., + * the provided data can be merged into the existing document. + * + * @param {DocumentReference} documentRef A reference to the document to be + * set. + * @param {T} data The object to serialize as the document. + * @param {SetOptions=} options An object to configure the set behavior. + * @throws {Error} If the provided input is not a valid Firestore document. + * @param {boolean=} options.merge - If true, set() merges the values + * specified in its data argument. Fields omitted from this set() call remain + * untouched. If your input sets any field to an empty map, all nested fields + * are overwritten. + * @param {Array.=} options.mergeFields - If provided, set() + * only replaces the specified field paths. Any field path that is not + * specified is ignored and remains untouched. If your input sets any field to + * an empty map, all nested fields are overwritten. + * @returns {Promise} A promise that resolves with the result of + * the write. If the write fails, the promise is rejected with a + * [BulkWriterError]{@link BulkWriterError}. + * + * + * @example + * ``` + * let bulkWriter = firestore.bulkWriter(); + * let documentRef = firestore.collection('col').doc(); + * + * bulkWriter + * .set(documentRef, {foo: 'bar'}) + * .then(result => { + * console.log('Successfully executed write at: ', result); + * }) + * .catch(err => { + * console.log('Write failed with: ', err); + * }); + * }); + * ``` + */ + set(documentRef, data, options) { + this._verifyNotClosed(); + return this._enqueue(documentRef, 'set', bulkCommitBatch => { + if (options) { + return bulkCommitBatch.set(documentRef, data, options); + } + else { + return bulkCommitBatch.set(documentRef, data); + } + }); + } + /** + * Update fields of the document referred to by the provided + * [DocumentReference]{@link DocumentReference}. If the document doesn't yet + * exist, the update fails and the entire batch will be rejected. + * + * The update() method accepts either an object with field paths encoded as + * keys and field values encoded as values, or a variable number of arguments + * that alternate between field paths and field values. Nested fields can be + * updated by providing dot-separated field path strings or by providing + * FieldPath objects. + * + * + * A Precondition restricting this update can be specified as the last + * argument. + * + * @param {DocumentReference} documentRef A reference to the document to be + * updated. + * @param {UpdateData|string|FieldPath} dataOrField An object containing the + * fields and values with which to update the document or the path of the + * first field to update. + * @param {...(Precondition|*|string|FieldPath)} preconditionOrValues - An + * alternating list of field paths and values to update or a Precondition to + * restrict this update + * @throws {Error} If the provided input is not valid Firestore data. + * @returns {Promise} A promise that resolves with the result of + * the write. If the write fails, the promise is rejected with a + * [BulkWriterError]{@link BulkWriterError}. + * + * @example + * ``` + * let bulkWriter = firestore.bulkWriter(); + * let documentRef = firestore.doc('col/doc'); + * + * bulkWriter + * .update(documentRef, {foo: 'bar'}) + * .then(result => { + * console.log('Successfully executed write at: ', result); + * }) + * .catch(err => { + * console.log('Write failed with: ', err); + * }); + * }); + * ``` + */ + update(documentRef, dataOrField, ...preconditionOrValues) { + this._verifyNotClosed(); + return this._enqueue(documentRef, 'update', bulkCommitBatch => bulkCommitBatch.update(documentRef, dataOrField, ...preconditionOrValues)); + } + /** + * Callback function set by {@link BulkWriter#onWriteResult} that is run + * every time a {@link BulkWriter} operation successfully completes. + * + * @callback BulkWriter~successCallback + * @param {DocumentReference} documentRef The document reference the + * operation was performed on + * @param {WriteResult} result The server write time of the operation. + */ + /** + * Attaches a listener that is run every time a BulkWriter operation + * successfully completes. + * + * @param {BulkWriter~successCallback} successCallback A callback to be + * called every time a BulkWriter operation successfully completes. + * @example + * ``` + * let bulkWriter = firestore.bulkWriter(); + * + * bulkWriter + * .onWriteResult((documentRef, result) => { + * console.log( + * 'Successfully executed write on document: ', + * documentRef, + * ' at: ', + * result + * ); + * }); + * ``` + */ + onWriteResult(successCallback) { + this._successFn = successCallback; + } + /** + * Callback function set by {@link BulkWriter#onWriteError} that is run when + * a write fails in order to determine whether {@link BulkWriter} should + * retry the operation. + * + * @callback BulkWriter~shouldRetryCallback + * @param {BulkWriterError} error The error object with information about the + * operation and error. + * @returns {boolean} Whether or not to retry the failed operation. Returning + * `true` retries the operation. Returning `false` will stop the retry loop. + */ + /** + * Attaches an error handler listener that is run every time a BulkWriter + * operation fails. + * + * BulkWriter has a default error handler that retries UNAVAILABLE and + * ABORTED errors up to a maximum of 10 failed attempts. When an error + * handler is specified, the default error handler will be overwritten. + * + * @param shouldRetryCallback {BulkWriter~shouldRetryCallback} A callback to + * be called every time a BulkWriter operation fails. Returning `true` will + * retry the operation. Returning `false` will stop the retry loop. + * @example + * ``` + * let bulkWriter = firestore.bulkWriter(); + * + * bulkWriter + * .onWriteError((error) => { + * if ( + * error.code === GrpcStatus.UNAVAILABLE && + * error.failedAttempts < MAX_RETRY_ATTEMPTS + * ) { + * return true; + * } else { + * console.log('Failed write at document: ', error.documentRef); + * return false; + * } + * }); + * ``` + */ + onWriteError(shouldRetryCallback) { + this._errorHandlerSet = true; + this._errorFn = shouldRetryCallback; + } + /** + * Commits all writes that have been enqueued up to this point in parallel. + * + * Returns a Promise that resolves when all currently queued operations have + * been committed. The Promise will never be rejected since the results for + * each individual operation are conveyed via their individual Promises. + * + * The Promise resolves immediately if there are no pending writes. Otherwise, + * the Promise waits for all previously issued writes, but it does not wait + * for writes that were added after the method is called. If you want to wait + * for additional writes, call `flush()` again. + * + * @return {Promise} A promise that resolves when all enqueued writes + * up to this point have been committed. + * + * @example + * ``` + * let bulkWriter = firestore.bulkWriter(); + * + * bulkWriter.create(documentRef, {foo: 'bar'}); + * bulkWriter.update(documentRef2, {foo: 'bar'}); + * bulkWriter.delete(documentRef3); + * await flush().then(() => { + * console.log('Executed all writes'); + * }); + * ``` + */ + flush() { + this._verifyNotClosed(); + this._scheduleCurrentBatch(/* flush= */ true); + // Mark the most recent operation as flushed to ensure that the batch + // containing it will be sent once it's popped from the buffer. + if (this._bufferedOperations.length > 0) { + this._bufferedOperations[this._bufferedOperations.length - 1].operation.markFlushed(); + } + return this._lastOp; + } + /** + * Commits all enqueued writes and marks the BulkWriter instance as closed. + * + * After calling `close()`, calling any method will throw an error. Any + * retries scheduled as part of an `onWriteError()` handler will be run + * before the `close()` promise resolves. + * + * Returns a Promise that resolves when there are no more pending writes. The + * Promise will never be rejected. Calling this method will send all requests. + * The promise resolves immediately if there are no pending writes. + * + * @return {Promise} A promise that resolves when all enqueued writes + * up to this point have been committed. + * + * @example + * ``` + * let bulkWriter = firestore.bulkWriter(); + * + * bulkWriter.create(documentRef, {foo: 'bar'}); + * bulkWriter.update(documentRef2, {foo: 'bar'}); + * bulkWriter.delete(documentRef3); + * await close().then(() => { + * console.log('Executed all writes'); + * }); + * ``` + */ + close() { + this._verifyNotClosed(); + this.firestore._decrementBulkWritersCount(); + const flushPromise = this.flush(); + this._closing = true; + return flushPromise; + } + /** + * Throws an error if the BulkWriter instance has been closed. + * @private + * @internal + */ + _verifyNotClosed() { + if (this._closing) { + throw new Error('BulkWriter has already been closed.'); + } + } + /** + * Sends the current batch and resets `this._bulkCommitBatch`. + * + * @param flush If provided, keeps re-sending operations until no more + * operations are enqueued. This allows retries to resolve as part of a + * `flush()` or `close()` call. + * @private + * @internal + */ + _scheduleCurrentBatch(flush = false) { + if (this._bulkCommitBatch._opCount === 0) + return; + const pendingBatch = this._bulkCommitBatch; + this._bulkCommitBatch = new BulkCommitBatch(this.firestore, this._maxBatchSize); + // Use the write with the longest backoff duration when determining backoff. + const highestBackoffDuration = pendingBatch.pendingOps.reduce((prev, cur) => (prev.backoffDuration > cur.backoffDuration ? prev : cur)).backoffDuration; + const backoffMsWithJitter = BulkWriter._applyJitter(highestBackoffDuration); + const delayedExecution = new util_1.Deferred(); + if (backoffMsWithJitter > 0) { + (0, backoff_1.delayExecution)(() => delayedExecution.resolve(), backoffMsWithJitter); + } + else { + delayedExecution.resolve(); + } + delayedExecution.promise.then(() => this._sendBatch(pendingBatch, flush)); + } + /** + * Sends the provided batch once the rate limiter does not require any delay. + * @private + * @internal + */ + async _sendBatch(batch, flush = false) { + const tag = (0, util_1.requestTag)(); + // Send the batch if it is does not require any delay, or schedule another + // attempt after the appropriate timeout. + const underRateLimit = this._rateLimiter.tryMakeRequest(batch._opCount); + if (underRateLimit) { + await batch.bulkCommit({ requestTag: tag }); + if (flush) + this._scheduleCurrentBatch(flush); + } + else { + const delayMs = this._rateLimiter.getNextRequestDelayMs(batch._opCount); + (0, logger_1.logger)('BulkWriter._sendBatch', tag, `Backing off for ${delayMs} seconds`); + (0, backoff_1.delayExecution)(() => this._sendBatch(batch, flush), delayMs); + } + } + /** + * Adds a 30% jitter to the provided backoff. + * + * @private + * @internal + */ + static _applyJitter(backoffMs) { + if (backoffMs === 0) + return 0; + // Random value in [-0.3, 0.3]. + const jitter = exports.DEFAULT_JITTER_FACTOR * (Math.random() * 2 - 1); + return Math.min(backoff_1.DEFAULT_BACKOFF_MAX_DELAY_MS, backoffMs + jitter * backoffMs); + } + /** + * Schedules and runs the provided operation on the next available batch. + * @private + * @internal + */ + _enqueue(ref, type, enqueueOnBatchCallback) { + const bulkWriterOp = new BulkWriterOperation(ref, type, this._sendFn.bind(this, enqueueOnBatchCallback), this._errorFn.bind(this), this._successFn.bind(this)); + // Swallow the error if the developer has set an error listener. This + // prevents UnhandledPromiseRejections from being thrown if a floating + // BulkWriter operation promise fails when an error handler is specified. + // + // This is done here in order to chain the caught promise onto `lastOp`, + // which ensures that flush() resolves after the operation promise. + const userPromise = bulkWriterOp.promise.catch(err => { + if (!this._errorHandlerSet) { + throw err; + } + else { + return bulkWriterOp.promise; + } + }); + // Advance the `_lastOp` pointer. This ensures that `_lastOp` only resolves + // when both the previous and the current write resolve. + this._lastOp = this._lastOp.then(() => (0, util_1.silencePromise)(userPromise)); + // Schedule the operation if the BulkWriter has fewer than the maximum + // number of allowed pending operations, or add the operation to the + // buffer. + if (this._pendingOpsCount < this._maxPendingOpCount) { + this._pendingOpsCount++; + this._sendFn(enqueueOnBatchCallback, bulkWriterOp); + } + else { + this._bufferedOperations.push(new BufferedOperation(bulkWriterOp, () => { + this._pendingOpsCount++; + this._sendFn(enqueueOnBatchCallback, bulkWriterOp); + })); + } + // Chain the BulkWriter operation promise with the buffer processing logic + // in order to ensure that it runs and that subsequent operations are + // enqueued before the next batch is scheduled in `_sendBatch()`. + return userPromise + .then(res => { + this._pendingOpsCount--; + this._processBufferedOps(); + return res; + }) + .catch(err => { + this._pendingOpsCount--; + this._processBufferedOps(); + throw err; + }); + } + /** + * Manages the pending operation counter and schedules the next BulkWriter + * operation if we're under the maximum limit. + * @private + * @internal + */ + _processBufferedOps() { + if (this._pendingOpsCount < this._maxPendingOpCount && + this._bufferedOperations.length > 0) { + const nextOp = this._bufferedOperations.shift(); + nextOp.sendFn(); + } + } + /** + * Schedules the provided operations on current BulkCommitBatch. + * Sends the BulkCommitBatch if it reaches maximum capacity. + * + * @private + * @internal + */ + _sendFn(enqueueOnBatchCallback, op) { + // A backoff duration greater than 0 implies that this batch is a retry. + // Retried writes are sent with a batch size of 10 in order to guarantee + // that the batch is under the 10MiB limit. + if (op.backoffDuration > 0) { + if (this._bulkCommitBatch.pendingOps.length >= exports.RETRY_MAX_BATCH_SIZE) { + this._scheduleCurrentBatch(/* flush= */ false); + } + this._bulkCommitBatch.setMaxBatchSize(exports.RETRY_MAX_BATCH_SIZE); + } + if (this._bulkCommitBatch.has(op.ref)) { + // Create a new batch since the backend doesn't support batches with two + // writes to the same document. + this._scheduleCurrentBatch(); + } + enqueueOnBatchCallback(this._bulkCommitBatch); + this._bulkCommitBatch.processLastOperation(op); + if (this._bulkCommitBatch._opCount === this._bulkCommitBatch.maxBatchSize) { + this._scheduleCurrentBatch(); + } + else if (op.flushed) { + // If flush() was called before this operation was enqueued into a batch, + // we still need to schedule it. + this._scheduleCurrentBatch(/* flush= */ true); + } + } +} +exports.BulkWriter = BulkWriter; +/** + * Validates the use of 'value' as BulkWriterOptions. + * + * @private + * @internal + * @param value The BulkWriterOptions object to validate. + * @throws if the input is not a valid BulkWriterOptions object. + */ +function validateBulkWriterOptions(value) { + if ((0, validate_1.validateOptional)(value, { optional: true })) { + return; + } + const argName = 'options'; + if (!(0, util_1.isObject)(value)) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(argName, 'bulkWriter() options argument')} Input is not an object.`); + } + const options = value; + if (options.throttling === undefined || + typeof options.throttling === 'boolean') { + return; + } + if (options.throttling.initialOpsPerSecond !== undefined) { + (0, validate_1.validateInteger)('initialOpsPerSecond', options.throttling.initialOpsPerSecond, { + minValue: 1, + }); + } + if (options.throttling.maxOpsPerSecond !== undefined) { + (0, validate_1.validateInteger)('maxOpsPerSecond', options.throttling.maxOpsPerSecond, { + minValue: 1, + }); + if (options.throttling.initialOpsPerSecond !== undefined && + options.throttling.initialOpsPerSecond > + options.throttling.maxOpsPerSecond) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(argName, 'bulkWriter() options argument')} "maxOpsPerSecond" cannot be less than "initialOpsPerSecond".`); + } + } +} +//# sourceMappingURL=bulk-writer.js.map -// Copyright (c) Microsoft Corporation. -function isTracingDisabled() { - var _a; - if (typeof process === "undefined") { - // not supported in browser for now without polyfills - return false; +/***/ }), + +/***/ 71550: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BundleBuilder = void 0; +const document_1 = __nccwpck_require__(98912); +const query_snapshot_1 = __nccwpck_require__(81796); +const timestamp_1 = __nccwpck_require__(29061); +const validate_1 = __nccwpck_require__(33822); +const BUNDLE_VERSION = 1; +/** + * Builds a Firestore data bundle with results from the given document and query snapshots. + */ +class BundleBuilder { + constructor(bundleId) { + this.bundleId = bundleId; + // Resulting documents for the bundle, keyed by full document path. + this.documents = new Map(); + // Named queries saved in the bundle, keyed by query name. + this.namedQueries = new Map(); + // The latest read time among all bundled documents and queries. + this.latestReadTime = new timestamp_1.Timestamp(0, 0); } - const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { - return false; + /** + * Adds a Firestore document snapshot or query snapshot to the bundle. + * Both the documents data and the query read time will be included in the bundle. + * + * @param {DocumentSnapshot | string} documentOrName A document snapshot to add or a name of a query. + * @param {Query=} querySnapshot A query snapshot to add to the bundle, if provided. + * @returns {BundleBuilder} This instance. + * + * @example + * ``` + * const bundle = firestore.bundle('data-bundle'); + * const docSnapshot = await firestore.doc('abc/123').get(); + * const querySnapshot = await firestore.collection('coll').get(); + * + * const bundleBuffer = bundle.add(docSnapshot) // Add a document + * .add('coll-query', querySnapshot) // Add a named query. + * .build() + * // Save `bundleBuffer` to CDN or stream it to clients. + * ``` + */ + add(documentOrName, querySnapshot) { + // eslint-disable-next-line prefer-rest-params + (0, validate_1.validateMinNumberOfArguments)('BundleBuilder.add', arguments, 1); + // eslint-disable-next-line prefer-rest-params + (0, validate_1.validateMaxNumberOfArguments)('BundleBuilder.add', arguments, 2); + if (arguments.length === 1) { + validateDocumentSnapshot('documentOrName', documentOrName); + this.addBundledDocument(documentOrName); + } + else { + (0, validate_1.validateString)('documentOrName', documentOrName); + validateQuerySnapshot('querySnapshot', querySnapshot); + this.addNamedQuery(documentOrName, querySnapshot); + } + return this; + } + addBundledDocument(snap, queryName) { + const originalDocument = this.documents.get(snap.ref.path); + const originalQueries = originalDocument === null || originalDocument === void 0 ? void 0 : originalDocument.metadata.queries; + // Update with document built from `snap` because it is newer. + if (!originalDocument || + timestamp_1.Timestamp.fromProto(originalDocument.metadata.readTime) < snap.readTime) { + const docProto = snap.toDocumentProto(); + this.documents.set(snap.ref.path, { + document: snap.exists ? docProto : undefined, + metadata: { + name: docProto.name, + readTime: snap.readTime.toProto().timestampValue, + exists: snap.exists, + }, + }); + } + // Update `queries` to include both original and `queryName`. + const newDocument = this.documents.get(snap.ref.path); + newDocument.metadata.queries = originalQueries || []; + if (queryName) { + newDocument.metadata.queries.push(queryName); + } + if (snap.readTime > this.latestReadTime) { + this.latestReadTime = snap.readTime; + } + } + addNamedQuery(name, querySnap) { + if (this.namedQueries.has(name)) { + throw new Error(`Query name conflict: ${name} has already been added.`); + } + this.namedQueries.set(name, { + name, + bundledQuery: querySnap.query._toBundledQuery(), + readTime: querySnap.readTime.toProto().timestampValue, + }); + for (const snap of querySnap.docs) { + this.addBundledDocument(snap, name); + } + if (querySnap.readTime > this.latestReadTime) { + this.latestReadTime = querySnap.readTime; + } + } + /** + * Converts a IBundleElement to a Buffer whose content is the length prefixed JSON representation + * of the element. + * @private + * @internal + */ + elementToLengthPrefixedBuffer(bundleElement) { + // Convert to a valid proto message object then take its JSON representation. + // This take cares of stuff like converting internal byte array fields + // to Base64 encodings. + // We lazy-load the Proto file to reduce cold-start times. + const message = (__nccwpck_require__(14079).firestore.BundleElement.fromObject)(bundleElement) + .toJSON(); + const buffer = Buffer.from(JSON.stringify(message), 'utf-8'); + const lengthBuffer = Buffer.from(buffer.length.toString()); + return Buffer.concat([lengthBuffer, buffer]); + } + build() { + let bundleBuffer = Buffer.alloc(0); + for (const namedQuery of this.namedQueries.values()) { + bundleBuffer = Buffer.concat([ + bundleBuffer, + this.elementToLengthPrefixedBuffer({ namedQuery }), + ]); + } + for (const bundledDocument of this.documents.values()) { + const documentMetadata = bundledDocument.metadata; + bundleBuffer = Buffer.concat([ + bundleBuffer, + this.elementToLengthPrefixedBuffer({ documentMetadata }), + ]); + // Write to the bundle if document exists. + const document = bundledDocument.document; + if (document) { + bundleBuffer = Buffer.concat([ + bundleBuffer, + this.elementToLengthPrefixedBuffer({ document }), + ]); + } + } + const metadata = { + id: this.bundleId, + createTime: this.latestReadTime.toProto().timestampValue, + version: BUNDLE_VERSION, + totalDocuments: this.documents.size, + totalBytes: bundleBuffer.length, + }; + // Prepends the metadata element to the bundleBuffer: `bundleBuffer` is the second argument to `Buffer.concat`. + bundleBuffer = Buffer.concat([ + this.elementToLengthPrefixedBuffer({ metadata }), + bundleBuffer, + ]); + return bundleBuffer; + } +} +exports.BundleBuilder = BundleBuilder; +/** + * Convenient class to hold both the metadata and the actual content of a document to be bundled. + * @private + * @internal + */ +class BundledDocument { + constructor(metadata, document) { + this.metadata = metadata; + this.document = document; } - return Boolean(azureTracingDisabledValue); } /** - * Creates a function that can be used to create spans using the global tracer. + * Validates that 'value' is DocumentSnapshot. * - * Usage: + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + */ +function validateDocumentSnapshot(arg, value) { + if (!(value instanceof document_1.DocumentSnapshot)) { + throw new Error((0, validate_1.invalidArgumentMessage)(arg, 'DocumentSnapshot')); + } +} +/** + * Validates that 'value' is QuerySnapshot. * - * ```typescript - * // once - * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + */ +function validateQuerySnapshot(arg, value) { + if (!(value instanceof query_snapshot_1.QuerySnapshot)) { + throw new Error((0, validate_1.invalidArgumentMessage)(arg, 'QuerySnapshot')); + } +} +//# sourceMappingURL=bundle.js.map + +/***/ }), + +/***/ 85391: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2020 Google LLC * - * // in each operation - * const span = createSpan("deleteConfigurationSetting", operationOptions); - * // code... - * span.end(); - * ``` + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at * - * @hidden - * @param args - allows configuration of the prefix for each span as well as the az.namespace field. - */ -function createSpanFunction(args) { - return function (operationName, operationOptions) { - const tracer = getTracer(); - const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; - const spanOptions = Object.assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); - const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; - let span; - if (isTracingDisabled()) { - span = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CollectionGroup = void 0; +const query_partition_1 = __nccwpck_require__(88357); +const util_1 = __nccwpck_require__(15468); +const logger_1 = __nccwpck_require__(42718); +const query_1 = __nccwpck_require__(38621); +const query_options_1 = __nccwpck_require__(47188); +const path_1 = __nccwpck_require__(34908); +const validate_1 = __nccwpck_require__(33822); +const types_1 = __nccwpck_require__(75371); +const order_1 = __nccwpck_require__(66849); +const trace_util_1 = __nccwpck_require__(2693); +/** + * A `CollectionGroup` refers to all documents that are contained in a + * collection or subcollection with a specific collection ID. + * + * @class CollectionGroup + */ +class CollectionGroup extends query_1.Query { + /** @private */ + constructor(firestore, collectionId, converter) { + super(firestore, query_options_1.QueryOptions.forCollectionGroupQuery(collectionId, converter)); + } + /** + * Partitions a query by returning partition cursors that can be used to run + * the query in parallel. The returned cursors are split points that can be + * used as starting and end points for individual query invocations. + * + * @example + * ``` + * const query = firestore.collectionGroup('collectionId'); + * for await (const partition of query.getPartitions(42)) { + * const partitionedQuery = partition.toQuery(); + * const querySnapshot = await partitionedQuery.get(); + * console.log(`Partition contained ${querySnapshot.length} documents`); + * } + * + * ``` + * @param {number} desiredPartitionCount The desired maximum number of + * partition points. The number must be strictly positive. The actual number + * of partitions returned may be fewer. + * @return {AsyncIterable} An AsyncIterable of + * `QueryPartition`s. + */ + async *getPartitions(desiredPartitionCount) { + const partitions = []; + await this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_PARTITION_QUERY, async () => { + var _a; + (0, validate_1.validateInteger)('desiredPartitionCount', desiredPartitionCount, { + minValue: 1, + }); + const tag = (0, util_1.requestTag)(); + await this.firestore.initializeIfNeeded(tag); + if (desiredPartitionCount > 1) { + // Partition queries require explicit ordering by __name__. + const queryWithDefaultOrder = this.orderBy(path_1.FieldPath.documentId()); + const request = queryWithDefaultOrder.toProto(); + // Since we are always returning an extra partition (with an empty endBefore + // cursor), we reduce the desired partition count by one. + request.partitionCount = desiredPartitionCount - 1; + const stream = await this.firestore.requestStream('partitionQueryStream', + /* bidirectional= */ false, request, tag); + stream.resume(); + for await (const currentCursor of stream) { + partitions.push((_a = currentCursor.values) !== null && _a !== void 0 ? _a : []); + } + } + (0, logger_1.logger)('Firestore.getPartitions', tag, 'Received %d partitions', partitions.length); + // Sort the partitions as they may not be ordered if responses are paged. + partitions.sort((l, r) => (0, order_1.compareArrays)(l, r)); + }); + for (let i = 0; i < partitions.length; ++i) { + yield new query_partition_1.QueryPartition(this._firestore, this._queryOptions.collectionId, this._queryOptions.converter, i > 0 ? partitions[i - 1] : undefined, partitions[i]); } - else { - span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); + // Return the extra partition with the empty cursor. + yield new query_partition_1.QueryPartition(this._firestore, this._queryOptions.collectionId, this._queryOptions.converter, partitions.pop(), undefined); + } + withConverter(converter) { + return new CollectionGroup(this.firestore, this._queryOptions.collectionId, converter !== null && converter !== void 0 ? converter : (0, types_1.defaultConverter)()); + } +} +exports.CollectionGroup = CollectionGroup; +//# sourceMappingURL=collection-group.js.map + +/***/ }), + +/***/ 36674: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.timestampFromJson = timestampFromJson; +exports.detectValueType = detectValueType; +exports.detectGoogleProtobufValueType = detectGoogleProtobufValueType; +exports.valueFromJson = valueFromJson; +exports.fieldsFromJson = fieldsFromJson; +const validate_1 = __nccwpck_require__(33822); +const map_type_1 = __nccwpck_require__(16723); +/*! + * @module firestore/convert + * @private + * @internal + * + * This module contains utility functions to convert + * `firestore.v1.Documents` from Proto3 JSON to their equivalent + * representation in Protobuf JS. Protobuf JS is the only encoding supported by + * this client, and dependencies that use Proto3 JSON (such as the Google Cloud + * Functions SDK) are supported through this conversion and its usage in + * {@see Firestore#snapshot_}. + */ +/** + * Converts an ISO 8601 or google.protobuf.Timestamp proto into Protobuf JS. + * + * @private + * @internal + * @param timestampValue The value to convert. + * @param argumentName The argument name to use in the error message if the + * conversion fails. If omitted, 'timestampValue' is used. + * @return The value as expected by Protobuf JS or undefined if no input was + * provided. + */ +function timestampFromJson(timestampValue, argumentName) { + let timestampProto = {}; + if (typeof timestampValue === 'string') { + const date = new Date(timestampValue); + const seconds = Math.floor(date.getTime() / 1000); + let nanos = 0; + if (timestampValue.length > 20) { + const nanoString = timestampValue.substring(20, timestampValue.length - 1); + const trailingZeroes = 9 - nanoString.length; + nanos = Number(nanoString) * Math.pow(10, trailingZeroes); + } + if (isNaN(seconds) || isNaN(nanos)) { + argumentName = argumentName || 'timestampValue'; + throw new Error(`Specify a valid ISO 8601 timestamp for "${argumentName}".`); + } + timestampProto = { + seconds: seconds || undefined, + nanos: nanos || undefined, + }; + } + else if (timestampValue !== undefined) { + (0, validate_1.validateObject)('timestampValue', timestampValue); + timestampProto = { + seconds: timestampValue.seconds || undefined, + nanos: timestampValue.nanos || undefined, + }; + } + return timestampProto; +} +/** + * Converts a Proto3 JSON 'bytesValue' field into Protobuf JS. + * + * @private + * @internal + * @param bytesValue The value to convert. + * @return The value as expected by Protobuf JS. + */ +function bytesFromJson(bytesValue) { + if (typeof bytesValue === 'string') { + return Buffer.from(bytesValue, 'base64'); + } + else { + return bytesValue; + } +} +/** + * Detects 'valueType' from a Proto3 JSON `firestore.v1.Value` proto. + * + * @private + * @internal + * @param proto The `firestore.v1.Value` proto. + * @return The string value for 'valueType'. + */ +function detectValueType(proto) { + var _a; + let valueType; + if (proto.valueType) { + valueType = proto.valueType; + } + else { + const detectedValues = []; + if (proto.stringValue !== undefined) { + detectedValues.push('stringValue'); } - if (args.namespace) { - span.setAttribute("az.namespace", args.namespace); + if (proto.booleanValue !== undefined) { + detectedValues.push('booleanValue'); } - let newSpanOptions = tracingOptions.spanOptions || {}; - if (span.isRecording() && args.namespace) { - newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + if (proto.integerValue !== undefined) { + detectedValues.push('integerValue'); } - const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); - const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); - return { - span, - updatedOptions: newOperationOptions - }; - }; + if (proto.doubleValue !== undefined) { + detectedValues.push('doubleValue'); + } + if (proto.timestampValue !== undefined) { + detectedValues.push('timestampValue'); + } + if (proto.referenceValue !== undefined) { + detectedValues.push('referenceValue'); + } + if (proto.arrayValue !== undefined) { + detectedValues.push('arrayValue'); + } + if (proto.nullValue !== undefined) { + detectedValues.push('nullValue'); + } + if (proto.mapValue !== undefined) { + detectedValues.push('mapValue'); + } + if (proto.geoPointValue !== undefined) { + detectedValues.push('geoPointValue'); + } + if (proto.bytesValue !== undefined) { + detectedValues.push('bytesValue'); + } + if (detectedValues.length !== 1) { + throw new Error(`Unable to infer type value from '${JSON.stringify(proto)}'.`); + } + valueType = detectedValues[0]; + } + // Special handling of mapValues used to represent other data types + if (valueType === 'mapValue') { + const fields = (_a = proto.mapValue) === null || _a === void 0 ? void 0 : _a.fields; + if (fields) { + const props = Object.keys(fields); + if (props.indexOf(map_type_1.RESERVED_MAP_KEY) !== -1 && + detectValueType(fields[map_type_1.RESERVED_MAP_KEY]) === 'stringValue' && + fields[map_type_1.RESERVED_MAP_KEY].stringValue === map_type_1.RESERVED_MAP_KEY_VECTOR_VALUE) { + valueType = 'vectorValue'; + } + } + } + return valueType; } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const VERSION = "00"; /** - * Generates a `SpanContext` given a `traceparent` header value. - * @param traceParent - Serialized span context data as a `traceparent` header value. - * @returns The `SpanContext` generated from the `traceparent` value. + * Detects the value kind from a Proto3 JSON `google.protobuf.Value` proto. + * + * @private + * @internal + * @param proto The `firestore.v1.Value` proto. + * @return The string value for 'valueType'. */ -function extractSpanContextFromTraceParentHeader(traceParentHeader) { - const parts = traceParentHeader.split("-"); - if (parts.length !== 4) { - return; +function detectGoogleProtobufValueType(proto) { + const detectedValues = []; + if (proto.nullValue !== undefined) { + detectedValues.push('nullValue'); } - const [version, traceId, spanId, traceOptions] = parts; - if (version !== VERSION) { - return; + if (proto.numberValue !== undefined) { + detectedValues.push('numberValue'); } - const traceFlags = parseInt(traceOptions, 16); - const spanContext = { - spanId, - traceId, - traceFlags - }; - return spanContext; + if (proto.stringValue !== undefined) { + detectedValues.push('stringValue'); + } + if (proto.boolValue !== undefined) { + detectedValues.push('boolValue'); + } + if (proto.structValue !== undefined) { + detectedValues.push('structValue'); + } + if (proto.listValue !== undefined) { + detectedValues.push('listValue'); + } + if (detectedValues.length !== 1) { + throw new Error(`Unable to infer type value from '${JSON.stringify(proto)}'.`); + } + return detectedValues[0]; +} +/** + * Converts a `firestore.v1.Value` in Proto3 JSON encoding into the + * Protobuf JS format expected by this client. + * + * @private + * @internal + * @param fieldValue The `firestore.v1.Value` in Proto3 JSON format. + * @return The `firestore.v1.Value` in Protobuf JS format. + */ +function valueFromJson(fieldValue) { + const valueType = detectValueType(fieldValue); + switch (valueType) { + case 'timestampValue': + return { + timestampValue: timestampFromJson(fieldValue.timestampValue), + }; + case 'bytesValue': + return { + bytesValue: bytesFromJson(fieldValue.bytesValue), + }; + case 'doubleValue': + return { + doubleValue: Number(fieldValue.doubleValue), + }; + case 'arrayValue': { + const arrayValue = []; + if (Array.isArray(fieldValue.arrayValue.values)) { + for (const value of fieldValue.arrayValue.values) { + arrayValue.push(valueFromJson(value)); + } + } + return { + arrayValue: { + values: arrayValue, + }, + }; + } + case 'mapValue': + case 'vectorValue': { + const mapValue = {}; + const fields = fieldValue.mapValue.fields; + if (fields) { + for (const prop of Object.keys(fields)) { + mapValue[prop] = valueFromJson(fieldValue.mapValue.fields[prop]); + } + } + return { + mapValue: { + fields: mapValue, + }, + }; + } + default: + return fieldValue; + } +} +/** + * Converts a map of IValues in Proto3 JSON encoding into the Protobuf JS format + * expected by this client. This conversion creates a copy of the underlying + * fields. + * + * @private + * @internal + * @param document An object with IValues in Proto3 JSON format. + * @return The object in Protobuf JS format. + */ +function fieldsFromJson(document) { + const result = {}; + for (const prop of Object.keys(document)) { + result[prop] = valueFromJson(document[prop]); + } + return result; } +//# sourceMappingURL=convert.js.map + +/***/ }), + +/***/ 62270: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/*! + * Copyright 2018 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DocumentChange = void 0; /** - * Generates a `traceparent` value given a span context. - * @param spanContext - Contains context for a specific span. - * @returns The `spanContext` represented as a `traceparent` value. + * A DocumentChange represents a change to the documents matching a query. + * It contains the document affected and the type of change that occurred. + * + * @class DocumentChange */ -function getTraceParentHeader(spanContext) { - const missingFields = []; - if (!spanContext.traceId) { - missingFields.push("traceId"); +class DocumentChange { + /** + * @private + * + * @param {string} type 'added' | 'removed' | 'modified'. + * @param {QueryDocumentSnapshot} document The document. + * @param {number} oldIndex The index in the documents array prior to this + * change. + * @param {number} newIndex The index in the documents array after this + * change. + */ + constructor(type, document, oldIndex, newIndex) { + this._type = type; + this._document = document; + this._oldIndex = oldIndex; + this._newIndex = newIndex; } - if (!spanContext.spanId) { - missingFields.push("spanId"); + /** + * The type of change ('added', 'modified', or 'removed'). + * + * @type {string} + * @name DocumentChange#type + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * let docsArray = []; + * + * let unsubscribe = query.onSnapshot(querySnapshot => { + * for (let change of querySnapshot.docChanges) { + * console.log(`Type of change is ${change.type}`); + * } + * }); + * + * // Remove this listener. + * unsubscribe(); + * ``` + */ + get type() { + return this._type; } - if (missingFields.length) { - return; + /** + * The document affected by this change. + * + * @type {QueryDocumentSnapshot} + * @name DocumentChange#doc + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * let unsubscribe = query.onSnapshot(querySnapshot => { + * for (let change of querySnapshot.docChanges) { + * console.log(change.doc.data()); + * } + * }); + * + * // Remove this listener. + * unsubscribe(); + * ``` + */ + get doc() { + return this._document; + } + /** + * The index of the changed document in the result set immediately prior to + * this DocumentChange (i.e. supposing that all prior DocumentChange objects + * have been applied). Is -1 for 'added' events. + * + * @type {number} + * @name DocumentChange#oldIndex + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * let docsArray = []; + * + * let unsubscribe = query.onSnapshot(querySnapshot => { + * for (let change of querySnapshot.docChanges) { + * if (change.oldIndex !== -1) { + * docsArray.splice(change.oldIndex, 1); + * } + * if (change.newIndex !== -1) { + * docsArray.splice(change.newIndex, 0, change.doc); + * } + * } + * }); + * + * // Remove this listener. + * unsubscribe(); + * ``` + */ + get oldIndex() { + return this._oldIndex; + } + /** + * The index of the changed document in the result set immediately after + * this DocumentChange (i.e. supposing that all prior DocumentChange + * objects and the current DocumentChange object have been applied). + * Is -1 for 'removed' events. + * + * @type {number} + * @name DocumentChange#newIndex + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * let docsArray = []; + * + * let unsubscribe = query.onSnapshot(querySnapshot => { + * for (let change of querySnapshot.docChanges) { + * if (change.oldIndex !== -1) { + * docsArray.splice(change.oldIndex, 1); + * } + * if (change.newIndex !== -1) { + * docsArray.splice(change.newIndex, 0, change.doc); + * } + * } + * }); + * + * // Remove this listener. + * unsubscribe(); + * ``` + */ + get newIndex() { + return this._newIndex; + } + /** + * Returns true if the data in this `DocumentChange` is equal to the provided + * value. + * + * @param {*} other The value to compare against. + * @return true if this `DocumentChange` is equal to the provided value. + */ + isEqual(other) { + if (this === other) { + return true; + } + return (other instanceof DocumentChange && + this._type === other._type && + this._oldIndex === other._oldIndex && + this._newIndex === other._newIndex && + this._document.isEqual(other._document)); } - const flags = spanContext.traceFlags || 0 /* NONE */; - const hexFlags = flags.toString(16); - const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; - // https://www.w3.org/TR/trace-context/#traceparent-header-field-values - return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; } +exports.DocumentChange = DocumentChange; +//# sourceMappingURL=document-change.js.map -exports.context = context; -exports.createSpanFunction = createSpanFunction; -exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader; -exports.getSpan = getSpan; -exports.getSpanContext = getSpanContext; -exports.getTraceParentHeader = getTraceParentHeader; -exports.getTracer = getTracer; -exports.isSpanContextValid = isSpanContextValid; -exports.setSpan = setSpan; -exports.setSpanContext = setSpanContext; -//# sourceMappingURL=index.js.map +/***/ }), + +/***/ 81080: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +/*! + * Copyright 2021 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DocumentReader = void 0; +const document_1 = __nccwpck_require__(98912); +const util_1 = __nccwpck_require__(15468); +const logger_1 = __nccwpck_require__(42718); +const timestamp_1 = __nccwpck_require__(29061); +/** + * A wrapper around BatchGetDocumentsRequest that retries request upon stream + * failure and returns ordered results. + * + * @private + * @internal + */ +class DocumentReader { + /** + * Creates a new DocumentReader that fetches the provided documents (via + * `get()`). + * + * @param firestore The Firestore instance to use. + * @param allDocuments The documents to get. + * @param fieldMask An optional field mask to apply to this read + * @param transactionOrReadTime An optional transaction ID to use for this + * read or options for beginning a new transaction with this read + */ + constructor(firestore, allDocuments, fieldMask, transactionOrReadTime) { + this.firestore = firestore; + this.allDocuments = allDocuments; + this.fieldMask = fieldMask; + this.transactionOrReadTime = transactionOrReadTime; + this.outstandingDocuments = new Set(); + this.retrievedDocuments = new Map(); + for (const docRef of this.allDocuments) { + this.outstandingDocuments.add(docRef.formattedName); + } + } + /** + * Invokes the BatchGetDocuments RPC and returns the results as an array of + * documents. + * + * @param requestTag A unique client-assigned identifier for this request. + */ + async get(requestTag) { + const { result } = await this._get(requestTag); + return result; + } + /** + * Invokes the BatchGetDocuments RPC and returns the results with transaction + * metadata. + * + * @param requestTag A unique client-assigned identifier for this request. + */ + async _get(requestTag) { + await this.fetchDocuments(requestTag); + // BatchGetDocuments doesn't preserve document order. We use the request + // order to sort the resulting documents. + const orderedDocuments = []; + for (const docRef of this.allDocuments) { + const document = this.retrievedDocuments.get(docRef.formattedName); + if (document !== undefined) { + // Recreate the DocumentSnapshot with the DocumentReference + // containing the original converter. + const finalDoc = new document_1.DocumentSnapshotBuilder(docRef); + finalDoc.fieldsProto = document._fieldsProto; + finalDoc.readTime = document.readTime; + finalDoc.createTime = document.createTime; + finalDoc.updateTime = document.updateTime; + orderedDocuments.push(finalDoc.build()); + } + else { + throw new Error(`Did not receive document for "${docRef.path}".`); + } + } + return { + result: orderedDocuments, + transaction: this.retrievedTransactionId, + }; + } + async fetchDocuments(requestTag) { + var _a; + if (!this.outstandingDocuments.size) { + return; + } + const request = { + database: this.firestore.formattedName, + documents: Array.from(this.outstandingDocuments), + }; + if (this.transactionOrReadTime instanceof Uint8Array) { + request.transaction = this.transactionOrReadTime; + } + else if (this.transactionOrReadTime instanceof timestamp_1.Timestamp) { + request.readTime = this.transactionOrReadTime.toProto().timestampValue; + } + else if (this.transactionOrReadTime) { + request.newTransaction = this.transactionOrReadTime; + } + if (this.fieldMask) { + const fieldPaths = this.fieldMask.map(fieldPath => fieldPath.formattedName); + request.mask = { fieldPaths }; + } + let resultCount = 0; + try { + const stream = await this.firestore.requestStream('batchGetDocuments', + /* bidirectional= */ false, request, requestTag); + stream.resume(); + for await (const response of stream) { + // Proto comes with zero-length buffer by default + if ((_a = response.transaction) === null || _a === void 0 ? void 0 : _a.length) { + this.retrievedTransactionId = response.transaction; + } + let snapshot; + if (response.found) { + (0, logger_1.logger)('DocumentReader.fetchDocuments', requestTag, 'Received document: %s', response.found.name); + snapshot = this.firestore.snapshot_(response.found, response.readTime); + } + else if (response.missing) { + (0, logger_1.logger)('DocumentReader.fetchDocuments', requestTag, 'Document missing: %s', response.missing); + snapshot = this.firestore.snapshot_(response.missing, response.readTime); + } + if (snapshot) { + const path = snapshot.ref.formattedName; + this.outstandingDocuments.delete(path); + this.retrievedDocuments.set(path, snapshot); + ++resultCount; + } + } + } + catch (error) { + const shouldRetry = + // Transactional reads are retried via the transaction runner. + !request.transaction && + !request.newTransaction && + // Only retry if we made progress. + resultCount > 0 && + // Don't retry permanent errors. + error.code !== undefined && + !(0, util_1.isPermanentRpcError)(error, 'batchGetDocuments'); + (0, logger_1.logger)('DocumentReader.fetchDocuments', requestTag, 'BatchGetDocuments failed with error: %s. Retrying: %s', error, shouldRetry); + if (shouldRetry) { + return this.fetchDocuments(requestTag); + } + else { + throw error; + } + } + finally { + (0, logger_1.logger)('DocumentReader.fetchDocuments', requestTag, 'Received %d results', resultCount); + } + } +} +exports.DocumentReader = DocumentReader; +//# sourceMappingURL=document-reader.js.map /***/ }), -/***/ 70890: -/***/ ((module) => { - -/****************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, Symbol, Reflect, Promise, SuppressedError */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __esDecorate; -var __runInitializers; -var __propKey; -var __setFunctionName; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __classPrivateFieldIn; -var __createBinding; -var __addDisposableResource; -var __disposeResources; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { - function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } - var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; - var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; - var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); - var _, done = false; - for (var i = decorators.length - 1; i >= 0; i--) { - var context = {}; - for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context.access[p] = contextIn.access[p]; - context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); - if (kind === "accessor") { - if (result === void 0) continue; - if (result === null || typeof result !== "object") throw new TypeError("Object expected"); - if (_ = accept(result.get)) descriptor.get = _; - if (_ = accept(result.set)) descriptor.set = _; - if (_ = accept(result.init)) initializers.unshift(_); - } - else if (_ = accept(result)) { - if (kind === "field") initializers.unshift(_); - else descriptor[key] = _; - } - } - if (target) Object.defineProperty(target, contextIn.name, descriptor); - done = true; - }; - - __runInitializers = function (thisArg, initializers, value) { - var useValue = arguments.length > 2; - for (var i = 0; i < initializers.length; i++) { - value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); - } - return useValue ? value : void 0; - }; - - __propKey = function (x) { - return typeof x === "symbol" ? x : "".concat(x); - }; - - __setFunctionName = function (f, name, prefix) { - if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; - return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - __classPrivateFieldIn = function (state, receiver) { - if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); - return typeof state === "function" ? receiver === state : state.has(receiver); - }; - - __addDisposableResource = function (env, value, async) { - if (value !== null && value !== void 0) { - if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); - var dispose; - if (async) { - if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); - dispose = value[Symbol.asyncDispose]; - } - if (dispose === void 0) { - if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); - dispose = value[Symbol.dispose]; - } - if (typeof dispose !== "function") throw new TypeError("Object not disposable."); - env.stack.push({ value: value, dispose: dispose, async: async }); - } - else if (async) { - env.stack.push({ async: true }); - } - return value; - }; - - var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { - var e = new Error(message); - return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; - }; - - __disposeResources = function (env) { - function fail(e) { - env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; - env.hasError = true; - } - function next() { - while (env.stack.length) { - var rec = env.stack.pop(); - try { - var result = rec.dispose && rec.dispose.call(rec.value); - if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); - } - catch (e) { - fail(e); - } - } - if (env.hasError) throw env.error; - } - return next(); - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__esDecorate", __esDecorate); - exporter("__runInitializers", __runInitializers); - exporter("__propKey", __propKey); - exporter("__setFunctionName", __setFunctionName); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); - exporter("__classPrivateFieldIn", __classPrivateFieldIn); - exporter("__addDisposableResource", __addDisposableResource); - exporter("__disposeResources", __disposeResources); -}); - - -/***/ }), - -/***/ 40334: -/***/ ((module) => { - -"use strict"; - -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - createTokenAuth: () => createTokenAuth -}); -module.exports = __toCommonJS(dist_src_exports); - -// pkg/dist-src/auth.js -var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -var REGEX_IS_INSTALLATION = /^ghs_/; -var REGEX_IS_USER_TO_SERVER = /^ghu_/; -async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token, - tokenType - }; -} - -// pkg/dist-src/with-authorization-prefix.js -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - return `token ${token}`; -} - -// pkg/dist-src/hook.js -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge( - route, - parameters - ); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -// pkg/dist-src/index.js -var createTokenAuth = function createTokenAuth2(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - if (typeof token !== "string") { - throw new Error( - "[@octokit/auth-token] Token passed to createTokenAuth is not a string" - ); - } - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; -// Annotate the CommonJS export names for ESM import in node: -0 && (0); - - -/***/ }), - -/***/ 76762: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 98912: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - Octokit: () => Octokit -}); -module.exports = __toCommonJS(dist_src_exports); -var import_universal_user_agent = __nccwpck_require__(45030); -var import_before_after_hook = __nccwpck_require__(83682); -var import_request = __nccwpck_require__(36234); -var import_graphql = __nccwpck_require__(88467); -var import_auth_token = __nccwpck_require__(40334); - -// pkg/dist-src/version.js -var VERSION = "5.0.2"; - -// pkg/dist-src/index.js -var noop = () => { -}; -var consoleWarn = console.warn.bind(console); -var consoleError = console.error.bind(console); -var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; -var Octokit = class { - static { - this.VERSION = VERSION; - } - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - super( - Object.assign( - {}, - defaults, - options, - options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null - ) - ); - } - }; - return OctokitWithDefaults; - } - static { - this.plugins = []; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - static plugin(...newPlugins) { - const currentPlugins = this.plugins; - const NewOctokit = class extends this { - static { - this.plugins = currentPlugins.concat( - newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) - ); - } - }; - return NewOctokit; - } - constructor(options = {}) { - const hook = new import_before_after_hook.Collection(); - const requestDefaults = { - baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; - requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Precondition = exports.DocumentTransform = exports.DocumentMask = exports.QueryDocumentSnapshot = exports.DocumentSnapshot = exports.DocumentSnapshotBuilder = void 0; +const deepEqual = __nccwpck_require__(28206); +const assert = __nccwpck_require__(39491); +const field_value_1 = __nccwpck_require__(16888); +const path_1 = __nccwpck_require__(34908); +const document_reference_1 = __nccwpck_require__(502); +const types_1 = __nccwpck_require__(75371); +const util_1 = __nccwpck_require__(15468); +/** + * Returns a builder for DocumentSnapshot and QueryDocumentSnapshot instances. + * Invoke `.build()' to assemble the final snapshot. + * + * @private + * @internal + */ +class DocumentSnapshotBuilder { + // We include the DocumentReference in the constructor in order to allow the + // DocumentSnapshotBuilder to be typed with when + // it is constructed. + constructor(ref) { + this.ref = ref; } - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; + /** + * Builds the DocumentSnapshot. + * + * @private + * @internal + * @returns Returns either a QueryDocumentSnapshot (if `fieldsProto` was + * provided) or a DocumentSnapshot. + */ + build() { + assert((this.fieldsProto !== undefined) === (this.createTime !== undefined), 'Create time should be set iff document exists.'); + assert((this.fieldsProto !== undefined) === (this.updateTime !== undefined), 'Update time should be set iff document exists.'); + return this.fieldsProto + ? new QueryDocumentSnapshot(this.ref, this.fieldsProto, this.readTime, this.createTime, this.updateTime) + : new DocumentSnapshot(this.ref, undefined, this.readTime); } - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; +} +exports.DocumentSnapshotBuilder = DocumentSnapshotBuilder; +/** + * A DocumentSnapshot is an immutable representation for a document in a + * Firestore database. The data can be extracted with + * [data()]{@link DocumentSnapshot#data} or + * [get(fieldPath)]{@link DocumentSnapshot#get} to get a + * specific field. + * + *

For a DocumentSnapshot that points to a non-existing document, any data + * access will return 'undefined'. You can use the + * [exists]{@link DocumentSnapshot#exists} property to explicitly verify a + * document's existence. + * + * @class DocumentSnapshot + */ +class DocumentSnapshot { + /** + * @private + * @internal + * + * @param ref The reference to the document. + * @param _fieldsProto The fields of the Firestore `Document` Protobuf backing + * this document (or undefined if the document does not exist). + * @param readTime The time when this snapshot was read (or undefined if + * the document exists only locally). + * @param createTime The time when the document was created (or undefined if + * the document does not exist). + * @param updateTime The time when the document was last updated (or undefined + * if the document does not exist). + */ + constructor(ref, + /** + * @internal + * @private + **/ + _fieldsProto, readTime, createTime, updateTime) { + this._fieldsProto = _fieldsProto; + this._ref = ref; + this._serializer = ref.firestore._serializer; + this._readTime = readTime; + this._createTime = createTime; + this._updateTime = updateTime; } - this.request = import_request.request.defaults(requestDefaults); - this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); - this.log = Object.assign( - { - debug: noop, - info: noop, - warn: consoleWarn, - error: consoleError - }, - options.log - ); - this.hook = hook; - if (!options.authStrategy) { - if (!options.auth) { - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - const auth = (0, import_auth_token.createTokenAuth)(options.auth); - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const { authStrategy, ...otherOptions } = options; - const auth = authStrategy( - Object.assign( - { - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, - options.auth - ) - ); - hook.wrap("request", auth.hook); - this.auth = auth; + /** + * Creates a DocumentSnapshot from an object. + * + * @private + * @internal + * @param ref The reference to the document. + * @param obj The object to store in the DocumentSnapshot. + * @return The created DocumentSnapshot. + */ + static fromObject(ref, obj) { + const serializer = ref.firestore._serializer; + return new DocumentSnapshot(ref, serializer.encodeFields(obj)); } - const classConstructor = this.constructor; - for (let i = 0; i < classConstructor.plugins.length; ++i) { - Object.assign(this, classConstructor.plugins[i](this, options)); + /** + * Creates a DocumentSnapshot from an UpdateMap. + * + * This methods expands the top-level field paths in a JavaScript map and + * turns { foo.bar : foobar } into { foo { bar : foobar }} + * + * @private + * @internal + * @param ref The reference to the document. + * @param data The field/value map to expand. + * @return The created DocumentSnapshot. + */ + static fromUpdateMap(ref, data) { + const serializer = ref + .firestore._serializer; + /** + * Merges 'value' at the field path specified by the path array into + * 'target'. + */ + function merge(target, value, path, pos) { + const key = path[pos]; + const isLast = pos === path.length - 1; + if (target[key] === undefined) { + if (isLast) { + if (value instanceof field_value_1.FieldTransform) { + // If there is already data at this path, we need to retain it. + // Otherwise, we don't include it in the DocumentSnapshot. + return !(0, util_1.isEmpty)(target) ? target : null; + } + // The merge is done. + const leafNode = serializer.encodeValue(value); + if (leafNode) { + target[key] = leafNode; + } + return target; + } + else { + // We need to expand the target object. + const childNode = { + mapValue: { + fields: {}, + }, + }; + const nestedValue = merge(childNode.mapValue.fields, value, path, pos + 1); + if (nestedValue) { + childNode.mapValue.fields = nestedValue; + target[key] = childNode; + return target; + } + else { + return !(0, util_1.isEmpty)(target) ? target : null; + } + } + } + else { + assert(!isLast, "Can't merge current value into a nested object"); + target[key].mapValue.fields = merge(target[key].mapValue.fields, value, path, pos + 1); + return target; + } + } + const res = {}; + for (const [key, value] of data) { + const path = key.toArray(); + merge(res, value, path, 0); + } + return new DocumentSnapshot(ref, res); } - } -}; -// Annotate the CommonJS export names for ESM import in node: -0 && (0); - - -/***/ }), - -/***/ 59440: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - endpoint: () => endpoint -}); -module.exports = __toCommonJS(dist_src_exports); - -// pkg/dist-src/defaults.js -var import_universal_user_agent = __nccwpck_require__(45030); - -// pkg/dist-src/version.js -var VERSION = "9.0.4"; - -// pkg/dist-src/defaults.js -var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; -var DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "" - } -}; - -// pkg/dist-src/util/lowercase-keys.js -function lowercaseKeys(object) { - if (!object) { - return {}; - } - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -// pkg/dist-src/util/is-plain-object.js -function isPlainObject(value) { - if (typeof value !== "object" || value === null) - return false; - if (Object.prototype.toString.call(value) !== "[object Object]") - return false; - const proto = Object.getPrototypeOf(value); - if (proto === null) - return true; - const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; - return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); -} - -// pkg/dist-src/util/merge-deep.js -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach((key) => { - if (isPlainObject(options[key])) { - if (!(key in defaults)) - Object.assign(result, { [key]: options[key] }); - else - result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { [key]: options[key] }); + /** + * True if the document exists. + * + * @type {boolean} + * @name DocumentSnapshot#exists + * @readonly + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then((documentSnapshot) => { + * if (documentSnapshot.exists) { + * console.log(`Data: ${JSON.stringify(documentSnapshot.data())}`); + * } + * }); + * ``` + */ + get exists() { + return this._fieldsProto !== undefined; } - }); - return result; -} - -// pkg/dist-src/util/remove-undefined-properties.js -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === void 0) { - delete obj[key]; + /** + * A [DocumentReference]{@link DocumentReference} for the document + * stored in this snapshot. + * + * @type {DocumentReference} + * @name DocumentSnapshot#ref + * @readonly + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then((documentSnapshot) => { + * if (documentSnapshot.exists) { + * console.log(`Found document at '${documentSnapshot.ref.path}'`); + * } + * }); + * ``` + */ + get ref() { + return this._ref; } - } - return obj; -} - -// pkg/dist-src/merge.js -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { method, url } : { url: method }, options); - } else { - options = Object.assign({}, route); - } - options.headers = lowercaseKeys(options.headers); - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); - if (options.url === "/graphql") { - if (defaults && defaults.mediaType.previews?.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( - (preview) => !mergedOptions.mediaType.previews.includes(preview) - ).concat(mergedOptions.mediaType.previews); + /** + * The ID of the document for which this DocumentSnapshot contains data. + * + * @type {string} + * @name DocumentSnapshot#id + * @readonly + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then((documentSnapshot) => { + * if (documentSnapshot.exists) { + * console.log(`Document found with name '${documentSnapshot.id}'`); + * } + * }); + * ``` + */ + get id() { + return this._ref.id; } - mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); - } - return mergedOptions; -} - -// pkg/dist-src/util/add-query-parameters.js -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - if (names.length === 0) { - return url; - } - return url + separator + names.map((name) => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + /** + * The time the document was created. Undefined for documents that don't + * exist. + * + * @type {Timestamp|undefined} + * @name DocumentSnapshot#createTime + * @readonly + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then(documentSnapshot => { + * if (documentSnapshot.exists) { + * let createTime = documentSnapshot.createTime; + * console.log(`Document created at '${createTime.toDate()}'`); + * } + * }); + * ``` + */ + get createTime() { + return this._createTime; } - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -// pkg/dist-src/util/extract-url-variable-names.js -var urlVariableRegex = /\{[^}]+\}/g; -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - if (!matches) { - return []; - } - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -// pkg/dist-src/util/omit.js -function omit(object, keysToOmit) { - const result = { __proto__: null }; - for (const key of Object.keys(object)) { - if (keysToOmit.indexOf(key) === -1) { - result[key] = object[key]; + /** + * The time the document was last updated (at the time the snapshot was + * generated). Undefined for documents that don't exist. + * + * @type {Timestamp|undefined} + * @name DocumentSnapshot#updateTime + * @readonly + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then(documentSnapshot => { + * if (documentSnapshot.exists) { + * let updateTime = documentSnapshot.updateTime; + * console.log(`Document updated at '${updateTime.toDate()}'`); + * } + * }); + * ``` + */ + get updateTime() { + return this._updateTime; } - } - return result; -} - -// pkg/dist-src/util/url-template.js -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + /** + * The time this snapshot was read. + * + * @type {Timestamp} + * @name DocumentSnapshot#readTime + * @readonly + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then(documentSnapshot => { + * let readTime = documentSnapshot.readTime; + * console.log(`Document read at '${readTime.toDate()}'`); + * }); + * ``` + */ + get readTime() { + if (this._readTime === undefined) { + throw new Error("Called 'readTime' on a local document"); + } + return this._readTime; } - return part; - }).join(""); -} -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} -function isDefined(value) { - return value !== void 0 && value !== null; -} -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} -function getValues(context, operator, key, modifier) { - var value = context[key], result = []; - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - result.push( - encodeValue(operator, value, isKeyOperator(operator) ? key : "") - ); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function(value2) { - result.push( - encodeValue(operator, value2, isKeyOperator(operator) ? key : "") - ); - }); - } else { - Object.keys(value).forEach(function(k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); + /** + * Retrieves all fields in the document as an object. Returns 'undefined' if + * the document doesn't exist. + * + * @returns {T|undefined} An object containing all fields in the document or + * 'undefined' if the document doesn't exist. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then(documentSnapshot => { + * let data = documentSnapshot.data(); + * console.log(`Retrieved data: ${JSON.stringify(data)}`); + * }); + * ``` + */ + data() { + const fields = this._fieldsProto; + if (fields === undefined) { + return undefined; } - } else { - const tmp = []; - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function(value2) { - tmp.push(encodeValue(operator, value2)); - }); - } else { - Object.keys(value).forEach(function(k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); + // We only want to use the converter and create a new QueryDocumentSnapshot + // if a converter has been provided. + if (this.ref._converter !== (0, types_1.defaultConverter)()) { + const untypedReference = new document_reference_1.DocumentReference(this.ref.firestore, this.ref._path); + return this.ref._converter.fromFirestore(new QueryDocumentSnapshot(untypedReference, this._fieldsProto, this.readTime, this.createTime, this.updateTime)); } - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); + else { + const obj = {}; + for (const prop of Object.keys(fields)) { + obj[prop] = this._serializer.decodeValue(fields[prop]); + } + return obj; } - } } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); + /** + * Retrieves the field specified by `field`. + * + * @param {string|FieldPath} field The field path + * (e.g. 'foo' or 'foo.bar') to a specific field. + * @returns {*} The data at the specified field location or undefined if no + * such field exists. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.set({ a: { b: 'c' }}).then(() => { + * return documentRef.get(); + * }).then(documentSnapshot => { + * let field = documentSnapshot.get('a.b'); + * console.log(`Retrieved field value: ${field}`); + * }); + * ``` + */ + // We deliberately use `any` in the external API to not impose type-checking + // on end users. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + get(field) { + (0, path_1.validateFieldPath)('field', field); + const protoField = this.protoField(field); + if (protoField === undefined) { + return undefined; + } + return this._serializer.decodeValue(protoField); } - } - return result; -} -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - template = template.replace( - /\{([^\{\}]+)\}|([^\{\}]+)/g, - function(_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); + /** + * Retrieves the field specified by 'fieldPath' in its Protobuf JS + * representation. + * + * @private + * @internal + * @param field The path (e.g. 'foo' or 'foo.bar') to a specific field. + * @returns The Protobuf-encoded data at the specified field location or + * undefined if no such field exists. + */ + protoField(field) { + let fields = this._fieldsProto; + if (fields === undefined) { + return undefined; } - expression.split(/,/g).forEach(function(variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - if (operator && operator !== "+") { - var separator = ","; - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); + const components = path_1.FieldPath.fromArgument(field).toArray(); + while (components.length > 1) { + fields = fields[components.shift()]; + if (!fields || !fields.mapValue) { + return undefined; + } + fields = fields.mapValue.fields; } - } else { - return encodeReserved(literal); - } + return fields[components[0]]; + } + /** + * Convert a document snapshot to the Firestore 'Write' proto. + * + * @private + * @internal + */ + toWriteProto() { + return { + update: { + name: this._ref.formattedName, + fields: this._fieldsProto, + }, + }; + } + /** + * Convert a document snapshot to the Firestore 'Document' proto. + * + * @private + * @internal + */ + toDocumentProto() { + var _a, _b; + return { + name: this._ref.formattedName, + createTime: (_a = this.createTime) === null || _a === void 0 ? void 0 : _a.toProto().timestampValue, + updateTime: (_b = this.updateTime) === null || _b === void 0 ? void 0 : _b.toProto().timestampValue, + fields: this._fieldsProto, + }; + } + /** + * Returns true if the document's data and path in this `DocumentSnapshot` is + * equal to the provided value. + * + * @param {*} other The value to compare against. + * @return {boolean} true if this `DocumentSnapshot` is equal to the provided + * value. + */ + isEqual(other) { + // Since the read time is different on every document read, we explicitly + // ignore all document metadata in this comparison. + return (this === other || + (other instanceof DocumentSnapshot && + this._ref.isEqual(other._ref) && + deepEqual(this._fieldsProto, other._fieldsProto))); } - ); - if (template === "/") { - return template; - } else { - return template.replace(/\/$/, ""); - } } - -// pkg/dist-src/parse.js -function parse(options) { - let method = options.method.toUpperCase(); - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, [ - "method", - "baseUrl", - "url", - "headers", - "request", - "mediaType" - ]); - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - if (!isBinaryRequest) { - if (options.mediaType.format) { - headers.accept = headers.accept.split(/,/).map( - (format) => format.replace( - /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, - `application/vnd$1$2.${options.mediaType.format}` - ) - ).join(","); +exports.DocumentSnapshot = DocumentSnapshot; +/** + * A QueryDocumentSnapshot contains data read from a document in your + * Firestore database as part of a query. The document is guaranteed to exist + * and its data can be extracted with [data()]{@link QueryDocumentSnapshot#data} + * or [get()]{@link DocumentSnapshot#get} to get a specific field. + * + * A QueryDocumentSnapshot offers the same API surface as a + * {@link DocumentSnapshot}. Since query results contain only existing + * documents, the [exists]{@link DocumentSnapshot#exists} property will + * always be true and [data()]{@link QueryDocumentSnapshot#data} will never + * return 'undefined'. + * + * @class QueryDocumentSnapshot + * @extends DocumentSnapshot + */ +class QueryDocumentSnapshot extends DocumentSnapshot { + /** + * The time the document was created. + * + * @type {Timestamp} + * @name QueryDocumentSnapshot#createTime + * @readonly + * @override + * + * @example + * ``` + * let query = firestore.collection('col'); + * + * query.get().forEach(snapshot => { + * console.log(`Document created at '${snapshot.createTime.toDate()}'`); + * }); + * ``` + */ + get createTime() { + return super.createTime; } - if (url.endsWith("/graphql")) { - if (options.mediaType.previews?.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } + /** + * The time the document was last updated (at the time the snapshot was + * generated). + * + * @type {Timestamp} + * @name QueryDocumentSnapshot#updateTime + * @readonly + * @override + * + * @example + * ``` + * let query = firestore.collection('col'); + * + * query.get().forEach(snapshot => { + * console.log(`Document updated at '${snapshot.updateTime.toDate()}'`); + * }); + * ``` + */ + get updateTime() { + return super.updateTime; } - } - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } + /** + * Retrieves all fields in the document as an object. + * + * @override + * + * @returns {T} An object containing all fields in the document. + * + * @example + * ``` + * let query = firestore.collection('col'); + * + * query.get().forEach(documentSnapshot => { + * let data = documentSnapshot.data(); + * console.log(`Retrieved data: ${JSON.stringify(data)}`); + * }); + * ``` + */ + data() { + const data = super.data(); + if (!data) { + throw new Error('The data in a QueryDocumentSnapshot should always exist.'); + } + return data; } - } - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } - return Object.assign( - { method, url, headers }, - typeof body !== "undefined" ? { body } : null, - options.request ? { request: options.request } : null - ); } - -// pkg/dist-src/endpoint-with-defaults.js -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); +exports.QueryDocumentSnapshot = QueryDocumentSnapshot; +/** + * A Firestore Document Mask contains the field paths affected by an update. + * + * @class + * @private + * @internal + */ +class DocumentMask { + /** + * @private + * @internal + * @private + * + * @param fieldPaths The field paths in this mask. + */ + constructor(fieldPaths) { + this._sortedPaths = fieldPaths; + this._sortedPaths.sort((a, b) => a.compareTo(b)); + } + /** + * Creates a document mask with the field paths of a document. + * + * @private + * @internal + * @param data A map with fields to modify. Only the keys are used to extract + * the document mask. + */ + static fromUpdateMap(data) { + const fieldPaths = []; + data.forEach((value, key) => { + if (!(value instanceof field_value_1.FieldTransform) || value.includeInDocumentMask) { + fieldPaths.push(path_1.FieldPath.fromArgument(key)); + } + }); + return new DocumentMask(fieldPaths); + } + /** + * Creates a document mask from an array of field paths. + * + * @private + * @internal + * @param fieldMask A list of field paths. + */ + static fromFieldMask(fieldMask) { + const fieldPaths = []; + for (const fieldPath of fieldMask) { + fieldPaths.push(path_1.FieldPath.fromArgument(fieldPath)); + } + return new DocumentMask(fieldPaths); + } + /** + * Creates a document mask with the field names of a document. + * + * @private + * @internal + * @param data An object with fields to modify. Only the keys are used to + * extract the document mask. + */ + static fromObject(data) { + const fieldPaths = []; + function extractFieldPaths(currentData, currentPath) { + let isEmpty = true; + for (const key of Object.keys(currentData)) { + isEmpty = false; + // We don't split on dots since fromObject is called with + // DocumentData. + const childSegment = new path_1.FieldPath(key); + const childPath = currentPath + ? currentPath.append(childSegment) + : childSegment; + const value = currentData[key]; + if (value instanceof field_value_1.FieldTransform) { + if (value.includeInDocumentMask) { + fieldPaths.push(childPath); + } + } + else if ((0, util_1.isPlainObject)(value)) { + extractFieldPaths(value, childPath); + } + else if (value !== undefined) { + // If the value is undefined it can never participate in the document + // mask. With `ignoreUndefinedProperties` set to false, + // `validateDocumentData` will reject an undefined value before even + // computing the document mask. + fieldPaths.push(childPath); + } + } + // Add a field path for an explicitly updated empty map. + if (currentPath && isEmpty) { + fieldPaths.push(currentPath); + } + } + extractFieldPaths(data); + return new DocumentMask(fieldPaths); + } + /** + * Returns true if this document mask contains no fields. + * + * @private + * @internal + * @return {boolean} Whether this document mask is empty. + */ + get isEmpty() { + return this._sortedPaths.length === 0; + } + /** + * Removes the specified values from a sorted field path array. + * + * @private + * @internal + * @param input A sorted array of FieldPaths. + * @param values An array of FieldPaths to remove. + */ + static removeFromSortedArray(input, values) { + for (let i = 0; i < input.length;) { + let removed = false; + for (const fieldPath of values) { + if (input[i].isEqual(fieldPath)) { + input.splice(i, 1); + removed = true; + break; + } + } + if (!removed) { + ++i; + } + } + } + /** + * Removes the field path specified in 'fieldPaths' from this document mask. + * + * @private + * @internal + * @param fieldPaths An array of FieldPaths. + */ + removeFields(fieldPaths) { + DocumentMask.removeFromSortedArray(this._sortedPaths, fieldPaths); + } + /** + * Returns whether this document mask contains 'fieldPath'. + * + * @private + * @internal + * @param fieldPath The field path to test. + * @return Whether this document mask contains 'fieldPath'. + */ + contains(fieldPath) { + for (const sortedPath of this._sortedPaths) { + const cmp = sortedPath.compareTo(fieldPath); + if (cmp === 0) { + return true; + } + else if (cmp > 0) { + return false; + } + } + return false; + } + /** + * Removes all properties from 'data' that are not contained in this document + * mask. + * + * @private + * @internal + * @param data An object to filter. + * @return A shallow copy of the object filtered by this document mask. + */ + applyTo(data) { + /*! + * Applies this DocumentMask to 'data' and computes the list of field paths + * that were specified in the mask but are not present in 'data'. + */ + const applyDocumentMask = data => { + const remainingPaths = this._sortedPaths.slice(0); + const processObject = (currentData, currentPath) => { + let result = null; + Object.keys(currentData).forEach(key => { + const childPath = currentPath + ? currentPath.append(key) + : new path_1.FieldPath(key); + if (this.contains(childPath)) { + DocumentMask.removeFromSortedArray(remainingPaths, [childPath]); + result = result || {}; + result[key] = currentData[key]; + } + else if ((0, util_1.isObject)(currentData[key])) { + const childObject = processObject(currentData[key], childPath); + if (childObject) { + result = result || {}; + result[key] = childObject; + } + } + }); + return result; + }; + // processObject() returns 'null' if the DocumentMask is empty. + const filteredData = processObject(data) || {}; + return { + filteredData, + remainingPaths, + }; + }; + const result = applyDocumentMask(data); + if (result.remainingPaths.length !== 0) { + throw new Error(`Input data is missing for field "${result.remainingPaths[0]}".`); + } + return result.filteredData; + } + /** + * Converts a document mask to the Firestore 'DocumentMask' Proto. + * + * @private + * @internal + * @returns A Firestore 'DocumentMask' Proto. + */ + toProto() { + if (this.isEmpty) { + return {}; + } + const encodedPaths = []; + for (const fieldPath of this._sortedPaths) { + encodedPaths.push(fieldPath.formattedName); + } + return { + fieldPaths: encodedPaths, + }; + } } - -// pkg/dist-src/with-defaults.js -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS2 = merge(oldDefaults, newDefaults); - const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); - return Object.assign(endpoint2, { - DEFAULTS: DEFAULTS2, - defaults: withDefaults.bind(null, DEFAULTS2), - merge: merge.bind(null, DEFAULTS2), - parse - }); +exports.DocumentMask = DocumentMask; +/** + * A Firestore Document Transform. + * + * A DocumentTransform contains pending server-side transforms and their + * corresponding field paths. + * + * @private + * @internal + * @class + */ +class DocumentTransform { + /** + * @private + * @internal + * @private + * + * @param ref The DocumentReference for this transform. + * @param transforms A Map of FieldPaths to FieldTransforms. + */ + constructor(ref, transforms) { + this.ref = ref; + this.transforms = transforms; + } + /** + * Generates a DocumentTransform from a JavaScript object. + * + * @private + * @internal + * @param ref The `DocumentReference` to use for the DocumentTransform. + * @param obj The object to extract the transformations from. + * @returns The Document Transform. + */ + static fromObject(ref, obj) { + const updateMap = new Map(); + for (const prop of Object.keys(obj)) { + updateMap.set(new path_1.FieldPath(prop), obj[prop]); + } + return DocumentTransform.fromUpdateMap(ref, updateMap); + } + /** + * Generates a DocumentTransform from an Update Map. + * + * @private + * @internal + * @param ref The `DocumentReference` to use for the DocumentTransform. + * @param data The update data to extract the transformations from. + * @returns The Document Transform. + */ + static fromUpdateMap(ref, data) { + const transforms = new Map(); + function encode_(val, path, allowTransforms) { + if (val instanceof field_value_1.FieldTransform && val.includeInDocumentTransform) { + if (allowTransforms) { + transforms.set(path, val); + } + else { + throw new Error(`${val.methodName}() is not supported inside of array values.`); + } + } + else if (Array.isArray(val)) { + for (let i = 0; i < val.length; ++i) { + // We need to verify that no array value contains a document transform + encode_(val[i], path.append(String(i)), false); + } + } + else if ((0, util_1.isPlainObject)(val)) { + for (const prop of Object.keys(val)) { + encode_(val[prop], path.append(new path_1.FieldPath(prop)), allowTransforms); + } + } + } + data.forEach((value, key) => { + encode_(value, path_1.FieldPath.fromArgument(key), true); + }); + return new DocumentTransform(ref, transforms); + } + /** + * Whether this DocumentTransform contains any actionable transformations. + * + * @private + * @internal + */ + get isEmpty() { + return this.transforms.size === 0; + } + /** + * Returns the array of fields in this DocumentTransform. + * + * @private + * @internal + */ + get fields() { + return Array.from(this.transforms.keys()); + } + /** + * Validates the user provided field values in this document transform. + * @private + * @internal + */ + validate() { + const allowUndefined = !!this.ref.firestore._settings.ignoreUndefinedProperties; + this.transforms.forEach(transform => transform.validate(allowUndefined)); + } + /** + * Converts a document transform to the Firestore 'FieldTransform' Proto. + * + * @private + * @internal + * @param serializer The Firestore serializer + * @returns A list of Firestore 'FieldTransform' Protos + */ + toProto(serializer) { + return Array.from(this.transforms, ([path, transform]) => transform.toProto(serializer, path)); + } } - -// pkg/dist-src/index.js -var endpoint = withDefaults(null, DEFAULTS); -// Annotate the CommonJS export names for ESM import in node: -0 && (0); - +exports.DocumentTransform = DocumentTransform; +/** + * A Firestore Precondition encapsulates options for database writes. + * + * @private + * @internal + * @class + */ +class Precondition { + /** + * @private + * @internal + * @private + * + * @param options.exists - Whether the referenced document should exist in + * Firestore, + * @param options.lastUpdateTime - The last update time of the referenced + * document in Firestore. + * @param options + */ + constructor(options) { + if (options !== undefined) { + this._exists = options.exists; + this._lastUpdateTime = options.lastUpdateTime; + } + } + /** + * Generates the Protobuf `Preconditon` object for this precondition. + * + * @private + * @internal + * @returns The `Preconditon` Protobuf object or 'null' if there are no + * preconditions. + */ + toProto() { + if (this.isEmpty) { + return null; + } + const proto = {}; + if (this._lastUpdateTime !== undefined) { + proto.updateTime = this._lastUpdateTime.toProto().timestampValue; + } + else { + proto.exists = this._exists; + } + return proto; + } + /** + * Whether this DocumentTransform contains any enforcement. + * + * @private + * @internal + */ + get isEmpty() { + return this._exists === undefined && !this._lastUpdateTime; + } +} +exports.Precondition = Precondition; +//# sourceMappingURL=document.js.map /***/ }), -/***/ 88467: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 16888: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - GraphqlResponseError: () => GraphqlResponseError, - graphql: () => graphql2, - withCustomRequest: () => withCustomRequest -}); -module.exports = __toCommonJS(dist_src_exports); -var import_request3 = __nccwpck_require__(36234); -var import_universal_user_agent = __nccwpck_require__(45030); - -// pkg/dist-src/version.js -var VERSION = "7.0.2"; - -// pkg/dist-src/with-defaults.js -var import_request2 = __nccwpck_require__(36234); - -// pkg/dist-src/graphql.js -var import_request = __nccwpck_require__(36234); - -// pkg/dist-src/error.js -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors: -` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +/*! + * Copyright 2018 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DeleteTransform = exports.FieldTransform = exports.FieldValue = exports.VectorValue = void 0; +const deepEqual = __nccwpck_require__(28206); +const serializer_1 = __nccwpck_require__(49170); +const util_1 = __nccwpck_require__(15468); +const validate_1 = __nccwpck_require__(33822); +/** + * Represent a vector type in Firestore documents. + * Create an instance with {@link FieldValue.vector}. + * + * @class VectorValue + */ +class VectorValue { + /** + * @private + * @internal + */ + constructor(values) { + // Making a copy of the parameter. + this._values = (values || []).map(n => n); + } + /** + * Returns a copy of the raw number array form of the vector. + */ + toArray() { + return this._values.map(n => n); + } + /** + * @private + * @internal + */ + _toProto(serializer) { + return serializer.encodeVector(this._values); + } + /** + * @private + * @internal + */ + static _fromProto(valueArray) { + var _a, _b; + const values = (_b = (_a = valueArray.arrayValue) === null || _a === void 0 ? void 0 : _a.values) === null || _b === void 0 ? void 0 : _b.map(v => { + return v.doubleValue; + }); + return new VectorValue(values); + } + /** + * Returns `true` if the two VectorValue has the same raw number arrays, returns `false` otherwise. + */ + isEqual(other) { + return (0, util_1.isPrimitiveArrayEqual)(this._values, other._values); + } } -var GraphqlResponseError = class extends Error { - constructor(request2, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request2; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; - this.errors = response.errors; - this.data = response.data; - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); +exports.VectorValue = VectorValue; +/** + * Sentinel values that can be used when writing documents with set(), create() + * or update(). + * + * @class FieldValue + */ +class FieldValue { + /** @private */ + constructor() { } + /** + * Creates a new `VectorValue` constructed with a copy of the given array of numbers. + * + * @param values - Create a `VectorValue` instance with a copy of this array of numbers. + * + * @returns A new `VectorValue` constructed with a copy of the given array of numbers. + */ + static vector(values) { + return new VectorValue(values); } - } -}; - -// pkg/dist-src/graphql.js -var NON_VARIABLE_OPTIONS = [ - "method", - "baseUrl", - "url", - "headers", - "request", - "query", - "mediaType" -]; -var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request2, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject( - new Error(`[@octokit/graphql] "query" cannot be used as variable name`) - ); + /** + * Returns a sentinel for use with update() or set() with {merge:true} to mark + * a field for deletion. + * + * @returns {FieldValue} The sentinel value to use in your objects. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * let data = { a: 'b', c: 'd' }; + * + * documentRef.set(data).then(() => { + * return documentRef.update({a: Firestore.FieldValue.delete()}); + * }).then(() => { + * // Document now only contains { c: 'd' } + * }); + * ``` + */ + static delete() { + return DeleteTransform.DELETE_SENTINEL; } - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) - continue; - return Promise.reject( - new Error( - `[@octokit/graphql] "${key}" cannot be used as variable name` - ) - ); + /** + * Returns a sentinel used with set(), create() or update() to include a + * server-generated timestamp in the written data. + * + * @return {FieldValue} The FieldValue sentinel for use in a call to set(), + * create() or update(). + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.set({ + * time: Firestore.FieldValue.serverTimestamp() + * }).then(() => { + * return documentRef.get(); + * }).then(doc => { + * console.log(`Server time set to ${doc.get('time')}`); + * }); + * ``` + */ + static serverTimestamp() { + return ServerTimestampTransform.SERVER_TIMESTAMP_SENTINEL; } - } - const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; - const requestOptions = Object.keys( - parsedOptions - ).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; + /** + * Returns a special value that can be used with set(), create() or update() + * that tells the server to increment the the field's current value by the + * given value. + * + * If either current field value or the operand uses floating point + * precision, both values will be interpreted as floating point numbers and + * all arithmetic will follow IEEE 754 semantics. Otherwise, integer + * precision is kept and the result is capped between -2^63 and 2^63-1. + * + * If the current field value is not of type 'number', or if the field does + * not yet exist, the transformation will set the field to the given value. + * + * @param {number} n The value to increment by. + * @return {FieldValue} The FieldValue sentinel for use in a call to set(), + * create() or update(). + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.update( + * 'counter', Firestore.FieldValue.increment(1) + * ).then(() => { + * return documentRef.get(); + * }).then(doc => { + * // doc.get('counter') was incremented + * }); + * ``` + */ + static increment(n) { + // eslint-disable-next-line prefer-rest-params + (0, validate_1.validateMinNumberOfArguments)('FieldValue.increment', arguments, 1); + return new NumericIncrementTransform(n); } - if (!result.variables) { - result.variables = {}; + /** + * Returns a special value that can be used with set(), create() or update() + * that tells the server to union the given elements with any array value that + * already exists on the server. Each specified element that doesn't already + * exist in the array will be added to the end. If the field being modified is + * not already an array it will be overwritten with an array containing + * exactly the specified elements. + * + * @param {...*} elements The elements to union into the array. + * @return {FieldValue} The FieldValue sentinel for use in a call to set(), + * create() or update(). + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.update( + * 'array', Firestore.FieldValue.arrayUnion('foo') + * ).then(() => { + * return documentRef.get(); + * }).then(doc => { + * // doc.get('array') contains field 'foo' + * }); + * ``` + */ + static arrayUnion(...elements) { + (0, validate_1.validateMinNumberOfArguments)('FieldValue.arrayUnion', elements, 1); + return new ArrayUnionTransform(elements); } - result.variables[key] = parsedOptions[key]; - return result; - }, {}); - const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - return request2(requestOptions).then((response) => { - if (response.data.errors) { - const headers = {}; - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - throw new GraphqlResponseError( - requestOptions, - headers, - response.data - ); + /** + * Returns a special value that can be used with set(), create() or update() + * that tells the server to remove the given elements from any array value + * that already exists on the server. All instances of each element specified + * will be removed from the array. If the field being modified is not already + * an array it will be overwritten with an empty array. + * + * @param {...*} elements The elements to remove from the array. + * @return {FieldValue} The FieldValue sentinel for use in a call to set(), + * create() or update(). + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.update( + * 'array', Firestore.FieldValue.arrayRemove('foo') + * ).then(() => { + * return documentRef.get(); + * }).then(doc => { + * // doc.get('array') no longer contains field 'foo' + * }); + * ``` + */ + static arrayRemove(...elements) { + (0, validate_1.validateMinNumberOfArguments)('FieldValue.arrayRemove', elements, 1); + return new ArrayRemoveTransform(elements); + } + /** + * Returns true if this `FieldValue` is equal to the provided value. + * + * @param {*} other The value to compare against. + * @return {boolean} true if this `FieldValue` is equal to the provided value. + * + * @example + * ``` + * let fieldValues = [ + * Firestore.FieldValue.increment(-1.0), + * Firestore.FieldValue.increment(-1), + * Firestore.FieldValue.increment(-0.0), + * Firestore.FieldValue.increment(-0), + * Firestore.FieldValue.increment(0), + * Firestore.FieldValue.increment(0.0), + * Firestore.FieldValue.increment(1), + * Firestore.FieldValue.increment(1.0) + * ]; + * + * let equal = 0; + * for (let i = 0; i < fieldValues.length; ++i) { + * for (let j = i + 1; j < fieldValues.length; ++j) { + * if (fieldValues[i].isEqual(fieldValues[j])) { + * ++equal; + * } + * } + * } + * console.log(`Found ${equal} equalities.`); + * ``` + */ + isEqual(other) { + return this === other; } - return response.data.data; - }); } - -// pkg/dist-src/with-defaults.js -function withDefaults(request2, newDefaults) { - const newRequest = request2.defaults(newDefaults); - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: newRequest.endpoint - }); +exports.FieldValue = FieldValue; +/** + * An internal interface shared by all field transforms. + * + * A 'FieldTransform` subclass should implement '.includeInDocumentMask', + * '.includeInDocumentTransform' and 'toProto' (if '.includeInDocumentTransform' + * is 'true'). + * + * @private + * @internal + * @abstract + */ +class FieldTransform extends FieldValue { } - -// pkg/dist-src/index.js -var graphql2 = withDefaults(import_request3.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); +exports.FieldTransform = FieldTransform; +/** + * A transform that deletes a field from a Firestore document. + * + * @private + * @internal + */ +class DeleteTransform extends FieldTransform { + constructor() { + super(); + } + /** + * Deletes are included in document masks. + * @private + * @internal + */ + get includeInDocumentMask() { + return true; + } + /** + * Deletes are are omitted from document transforms. + * @private + * @internal + */ + get includeInDocumentTransform() { + return false; + } + get methodName() { + return 'FieldValue.delete'; + } + validate() { } + toProto() { + throw new Error('FieldValue.delete() should not be included in a FieldTransform'); + } } -// Annotate the CommonJS export names for ESM import in node: -0 && (0); - +exports.DeleteTransform = DeleteTransform; +/** + * Sentinel value for a field delete. + * @private + * @internal + */ +DeleteTransform.DELETE_SENTINEL = new DeleteTransform(); +/** + * A transform that sets a field to the Firestore server time. + * + * @private + * @internal + */ +class ServerTimestampTransform extends FieldTransform { + constructor() { + super(); + } + /** + * Server timestamps are omitted from document masks. + * + * @private + * @internal + */ + get includeInDocumentMask() { + return false; + } + /** + * Server timestamps are included in document transforms. + * + * @private + * @internal + */ + get includeInDocumentTransform() { + return true; + } + get methodName() { + return 'FieldValue.serverTimestamp'; + } + validate() { } + toProto(serializer, fieldPath) { + return { + fieldPath: fieldPath.formattedName, + setToServerValue: 'REQUEST_TIME', + }; + } +} +/** + * Sentinel value for a server timestamp. + * + * @private + * @internal + */ +ServerTimestampTransform.SERVER_TIMESTAMP_SENTINEL = new ServerTimestampTransform(); +/** + * Increments a field value on the backend. + * + * @private + * @internal + */ +class NumericIncrementTransform extends FieldTransform { + constructor(operand) { + super(); + this.operand = operand; + } + /** + * Numeric transforms are omitted from document masks. + * + * @private + * @internal + */ + get includeInDocumentMask() { + return false; + } + /** + * Numeric transforms are included in document transforms. + * + * @private + * @internal + */ + get includeInDocumentTransform() { + return true; + } + get methodName() { + return 'FieldValue.increment'; + } + validate() { + (0, validate_1.validateNumber)('FieldValue.increment()', this.operand); + } + toProto(serializer, fieldPath) { + const encodedOperand = serializer.encodeValue(this.operand); + return { fieldPath: fieldPath.formattedName, increment: encodedOperand }; + } + isEqual(other) { + return (this === other || + (other instanceof NumericIncrementTransform && + this.operand === other.operand)); + } +} +/** + * Transforms an array value via a union operation. + * + * @private + * @internal + */ +class ArrayUnionTransform extends FieldTransform { + constructor(elements) { + super(); + this.elements = elements; + } + /** + * Array transforms are omitted from document masks. + * @private + * @internal + */ + get includeInDocumentMask() { + return false; + } + /** + * Array transforms are included in document transforms. + * @private + * @internal + */ + get includeInDocumentTransform() { + return true; + } + get methodName() { + return 'FieldValue.arrayUnion'; + } + validate(allowUndefined) { + for (let i = 0; i < this.elements.length; ++i) { + validateArrayElement(i, this.elements[i], allowUndefined); + } + } + toProto(serializer, fieldPath) { + const encodedElements = serializer.encodeValue(this.elements).arrayValue; + return { + fieldPath: fieldPath.formattedName, + appendMissingElements: encodedElements, + }; + } + isEqual(other) { + return (this === other || + (other instanceof ArrayUnionTransform && + deepEqual(this.elements, other.elements))); + } +} +/** + * Transforms an array value via a remove operation. + * + * @private + * @internal + */ +class ArrayRemoveTransform extends FieldTransform { + constructor(elements) { + super(); + this.elements = elements; + } + /** + * Array transforms are omitted from document masks. + * @private + * @internal + */ + get includeInDocumentMask() { + return false; + } + /** + * Array transforms are included in document transforms. + * @private + * @internal + */ + get includeInDocumentTransform() { + return true; + } + get methodName() { + return 'FieldValue.arrayRemove'; + } + validate(allowUndefined) { + for (let i = 0; i < this.elements.length; ++i) { + validateArrayElement(i, this.elements[i], allowUndefined); + } + } + toProto(serializer, fieldPath) { + const encodedElements = serializer.encodeValue(this.elements).arrayValue; + return { + fieldPath: fieldPath.formattedName, + removeAllFromArray: encodedElements, + }; + } + isEqual(other) { + return (this === other || + (other instanceof ArrayRemoveTransform && + deepEqual(this.elements, other.elements))); + } +} +/** + * Validates that `value` can be used as an element inside of an array. Certain + * field values (such as ServerTimestamps) are rejected. Nested arrays are also + * rejected. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The value to validate. + * @param allowUndefined Whether to allow nested properties that are `undefined`. + */ +function validateArrayElement(arg, value, allowUndefined) { + if (Array.isArray(value)) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'array element')} Nested arrays are not supported.`); + } + (0, serializer_1.validateUserInput)(arg, value, 'array element', + /*path=*/ { allowDeletes: 'none', allowTransforms: false, allowUndefined }, + /*path=*/ undefined, + /*level=*/ 0, + /*inArray=*/ true); +} +//# sourceMappingURL=field-value.js.map /***/ }), -/***/ 64193: -/***/ ((module) => { +/***/ 47864: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - composePaginateRest: () => composePaginateRest, - isPaginatingEndpoint: () => isPaginatingEndpoint, - paginateRest: () => paginateRest, - paginatingEndpoints: () => paginatingEndpoints -}); -module.exports = __toCommonJS(dist_src_exports); +/*! + * Copyright 2023 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CompositeFilter = exports.UnaryFilter = exports.Filter = void 0; +/** + * A `Filter` represents a restriction on one or more field values and can + * be used to refine the results of a {@link Query}. + * `Filters`s are created by invoking {@link Filter#where}, {@link Filter#or}, + * or {@link Filter#and} and can then be passed to {@link Query#where} + * to create a new {@link Query} instance that also contains this `Filter`. + */ +class Filter { + /** + * Creates and returns a new [Filter]{@link Filter}, which can be + * applied to [Query.where()]{@link Query#where}, [Filter.or()]{@link Filter#or}, + * or [Filter.and()]{@link Filter#and}. When applied to a [Query]{@link Query} + * it requires that documents must contain the specified field and that its value should + * satisfy the relation constraint provided. + * + * @param {string|FieldPath} fieldPath The name of a property value to compare. + * @param {string} opStr A comparison operation in the form of a string. + * Acceptable operator strings are "<", "<=", "==", "!=", ">=", ">", "array-contains", + * "in", "not-in", and "array-contains-any". + * @param {*} value The value to which to compare the field for inclusion in + * a query. + * @returns {Filter} The created Filter. + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * + * collectionRef.where(Filter.where('foo', '==', 'bar')).get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + static where(fieldPath, opStr, value) { + return new UnaryFilter(fieldPath, opStr, value); + } + /** + * Creates and returns a new [Filter]{@link Filter} that is a + * disjunction of the given {@link Filter}s. A disjunction filter includes + * a document if it satisfies any of the given {@link Filter}s. + * + * The returned Filter can be applied to [Query.where()]{@link Query#where}, + * [Filter.or()]{@link Filter#or}, or [Filter.and()]{@link Filter#and}. When + * applied to a [Query]{@link Query} it requires that documents must satisfy + * one of the provided {@link Filter}s. + * + * @param {...Filter} filters Optional. The {@link Filter}s + * for OR operation. These must be created with calls to {@link Filter#where}, + * {@link Filter#or}, or {@link Filter#and}. + * @returns {Filter} The created {@link Filter}. + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * + * // doc.foo == 'bar' || doc.baz > 0 + * let orFilter = Filter.or(Filter.where('foo', '==', 'bar'), Filter.where('baz', '>', 0)); + * + * collectionRef.where(orFilter).get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + static or(...filters) { + return new CompositeFilter(filters, 'OR'); + } + /** + * Creates and returns a new [Filter]{@link Filter} that is a + * conjunction of the given {@link Filter}s. A conjunction filter includes + * a document if it satisfies all of the given {@link Filter}s. + * + * The returned Filter can be applied to [Query.where()]{@link Query#where}, + * [Filter.or()]{@link Filter#or}, or [Filter.and()]{@link Filter#and}. When + * applied to a [Query]{@link Query} it requires that documents must satisfy + * one of the provided {@link Filter}s. + * + * @param {...Filter} filters Optional. The {@link Filter}s + * for AND operation. These must be created with calls to {@link Filter#where}, + * {@link Filter#or}, or {@link Filter#and}. + * @returns {Filter} The created {@link Filter}. + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * + * // doc.foo == 'bar' && doc.baz > 0 + * let andFilter = Filter.and(Filter.where('foo', '==', 'bar'), Filter.where('baz', '>', 0)); + * + * collectionRef.where(andFilter).get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + static and(...filters) { + return new CompositeFilter(filters, 'AND'); + } +} +exports.Filter = Filter; +/** + * A `UnaryFilter` represents a restriction on one field value and can + * be used to refine the results of a {@link Query}. + * `UnaryFilter`s are created by invoking {@link Filter#where} and can then + * be passed to {@link Query#where} to create a new {@link Query} instance + * that also contains this `UnaryFilter`. + * + * @private + * @internal + */ +class UnaryFilter extends Filter { + /** + @private + @internal + */ + constructor(field, operator, value) { + super(); + this.field = field; + this.operator = operator; + this.value = value; + } + /** + @private + @internal + */ + _getField() { + return this.field; + } + /** + @private + @internal + */ + _getOperator() { + return this.operator; + } + /** + @private + @internal + */ + _getValue() { + return this.value; + } +} +exports.UnaryFilter = UnaryFilter; +/** + * A `CompositeFilter` is used to narrow the set of documents returned + * by a Firestore query by performing the logical OR or AND of multiple + * {@link Filters}s. `CompositeFilters`s are created by invoking {@link Filter#or} + * or {@link Filter#and} and can then be passed to {@link Query#where} + * to create a new query instance that also contains the `CompositeFilter`. + * + * @private + * @internal + */ +class CompositeFilter extends Filter { + /** + @private + @internal + */ + constructor(filters, operator) { + super(); + this.filters = filters; + this.operator = operator; + } + /** + @private + @internal + */ + _getFilters() { + return this.filters; + } + /** + @private + @internal + */ + _getOperator() { + return this.operator; + } +} +exports.CompositeFilter = CompositeFilter; +//# sourceMappingURL=filter.js.map -// pkg/dist-src/version.js -var VERSION = "9.1.5"; +/***/ }), -// pkg/dist-src/normalize-paginated-list-response.js -function normalizePaginatedListResponse(response) { - if (!response.data) { - return { - ...response, - data: [] - }; - } - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) - return response; - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - response.data.total_count = totalCount; - return response; -} +/***/ 98854: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// pkg/dist-src/iterator.js -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) - return { done: true }; - try { - const response = await requestMethod({ method, url, headers }); - const normalizedResponse = normalizePaginatedListResponse(response); - url = ((normalizedResponse.headers.link || "").match( - /<([^>]+)>;\s*rel="next"/ - ) || [])[1]; - return { value: normalizedResponse }; - } catch (error) { - if (error.status !== 409) - throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - }) - }; -} +"use strict"; -// pkg/dist-src/paginate.js -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = void 0; - } - return gather( - octokit, - [], - iterator(octokit, route, parameters)[Symbol.asyncIterator](), - mapFn - ); -} -function gather(octokit, results, iterator2, mapFn) { - return iterator2.next().then((result) => { - if (result.done) { - return results; +/*! + * Copyright 2018 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GeoPoint = void 0; +const validate_1 = __nccwpck_require__(33822); +/** + * An immutable object representing a geographic location in Firestore. The + * location is represented as a latitude/longitude pair. + * + * @class + */ +class GeoPoint { + /** + * Creates a [GeoPoint]{@link GeoPoint}. + * + * @param {number} latitude The latitude as a number between -90 and 90. + * @param {number} longitude The longitude as a number between -180 and 180. + * + * @example + * ``` + * let data = { + * google: new Firestore.GeoPoint(37.422, 122.084) + * }; + * + * firestore.doc('col/doc').set(data).then(() => { + * console.log(`Location is ${data.google.latitude}, ` + + * `${data.google.longitude}`); + * }); + * ``` + */ + constructor(latitude, longitude) { + (0, validate_1.validateNumber)('latitude', latitude, { minValue: -90, maxValue: 90 }); + (0, validate_1.validateNumber)('longitude', longitude, { minValue: -180, maxValue: 180 }); + this._latitude = latitude; + this._longitude = longitude; } - let earlyExit = false; - function done() { - earlyExit = true; + /** + * The latitude as a number between -90 and 90. + * + * @type {number} + * @name GeoPoint#latitude + * @readonly + */ + get latitude() { + return this._latitude; } - results = results.concat( - mapFn ? mapFn(result.value, done) : result.value.data - ); - if (earlyExit) { - return results; + /** + * The longitude as a number between -180 and 180. + * + * @type {number} + * @name GeoPoint#longitude + * @readonly + */ + get longitude() { + return this._longitude; + } + /** + * Returns true if this `GeoPoint` is equal to the provided value. + * + * @param {*} other The value to compare against. + * @return {boolean} true if this `GeoPoint` is equal to the provided value. + */ + isEqual(other) { + return (this === other || + (other instanceof GeoPoint && + this.latitude === other.latitude && + this.longitude === other.longitude)); + } + /** + * Converts the GeoPoint to a google.type.LatLng proto. + * @private + * @internal + */ + toProto() { + return { + geoPointValue: { + latitude: this.latitude, + longitude: this.longitude, + }, + }; + } + /** + * Converts a google.type.LatLng proto to its GeoPoint representation. + * @private + * @internal + */ + static fromProto(proto) { + return new GeoPoint(proto.latitude || 0, proto.longitude || 0); } - return gather(octokit, results, iterator2, mapFn); - }); } +exports.GeoPoint = GeoPoint; +//# sourceMappingURL=geo-point.js.map -// pkg/dist-src/compose-paginate.js -var composePaginateRest = Object.assign(paginate, { - iterator -}); +/***/ }), -// pkg/dist-src/generated/paginating-endpoints.js -var paginatingEndpoints = [ - "GET /advisories", - "GET /app/hook/deliveries", - "GET /app/installation-requests", - "GET /app/installations", - "GET /assignments/{assignment_id}/accepted_assignments", - "GET /classrooms", - "GET /classrooms/{classroom_id}/assignments", - "GET /enterprises/{enterprise}/dependabot/alerts", - "GET /enterprises/{enterprise}/secret-scanning/alerts", - "GET /events", - "GET /gists", - "GET /gists/public", - "GET /gists/starred", - "GET /gists/{gist_id}/comments", - "GET /gists/{gist_id}/commits", - "GET /gists/{gist_id}/forks", - "GET /installation/repositories", - "GET /issues", - "GET /licenses", - "GET /marketplace_listing/plans", - "GET /marketplace_listing/plans/{plan_id}/accounts", - "GET /marketplace_listing/stubbed/plans", - "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", - "GET /networks/{owner}/{repo}/events", - "GET /notifications", - "GET /organizations", - "GET /orgs/{org}/actions/cache/usage-by-repository", - "GET /orgs/{org}/actions/permissions/repositories", - "GET /orgs/{org}/actions/runners", - "GET /orgs/{org}/actions/secrets", - "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", - "GET /orgs/{org}/actions/variables", - "GET /orgs/{org}/actions/variables/{name}/repositories", - "GET /orgs/{org}/blocks", - "GET /orgs/{org}/code-scanning/alerts", - "GET /orgs/{org}/codespaces", - "GET /orgs/{org}/codespaces/secrets", - "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", - "GET /orgs/{org}/copilot/billing/seats", - "GET /orgs/{org}/dependabot/alerts", - "GET /orgs/{org}/dependabot/secrets", - "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", - "GET /orgs/{org}/events", - "GET /orgs/{org}/failed_invitations", - "GET /orgs/{org}/hooks", - "GET /orgs/{org}/hooks/{hook_id}/deliveries", - "GET /orgs/{org}/installations", - "GET /orgs/{org}/invitations", - "GET /orgs/{org}/invitations/{invitation_id}/teams", - "GET /orgs/{org}/issues", - "GET /orgs/{org}/members", - "GET /orgs/{org}/members/{username}/codespaces", - "GET /orgs/{org}/migrations", - "GET /orgs/{org}/migrations/{migration_id}/repositories", - "GET /orgs/{org}/outside_collaborators", - "GET /orgs/{org}/packages", - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - "GET /orgs/{org}/personal-access-token-requests", - "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", - "GET /orgs/{org}/personal-access-tokens", - "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", - "GET /orgs/{org}/projects", - "GET /orgs/{org}/properties/values", - "GET /orgs/{org}/public_members", - "GET /orgs/{org}/repos", - "GET /orgs/{org}/rulesets", - "GET /orgs/{org}/rulesets/rule-suites", - "GET /orgs/{org}/secret-scanning/alerts", - "GET /orgs/{org}/security-advisories", - "GET /orgs/{org}/teams", - "GET /orgs/{org}/teams/{team_slug}/discussions", - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", - "GET /orgs/{org}/teams/{team_slug}/invitations", - "GET /orgs/{org}/teams/{team_slug}/members", - "GET /orgs/{org}/teams/{team_slug}/projects", - "GET /orgs/{org}/teams/{team_slug}/repos", - "GET /orgs/{org}/teams/{team_slug}/teams", - "GET /projects/columns/{column_id}/cards", - "GET /projects/{project_id}/collaborators", - "GET /projects/{project_id}/columns", - "GET /repos/{owner}/{repo}/actions/artifacts", - "GET /repos/{owner}/{repo}/actions/caches", - "GET /repos/{owner}/{repo}/actions/organization-secrets", - "GET /repos/{owner}/{repo}/actions/organization-variables", - "GET /repos/{owner}/{repo}/actions/runners", - "GET /repos/{owner}/{repo}/actions/runs", - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", - "GET /repos/{owner}/{repo}/actions/secrets", - "GET /repos/{owner}/{repo}/actions/variables", - "GET /repos/{owner}/{repo}/actions/workflows", - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", - "GET /repos/{owner}/{repo}/activity", - "GET /repos/{owner}/{repo}/assignees", - "GET /repos/{owner}/{repo}/branches", - "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", - "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", - "GET /repos/{owner}/{repo}/code-scanning/alerts", - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", - "GET /repos/{owner}/{repo}/code-scanning/analyses", - "GET /repos/{owner}/{repo}/codespaces", - "GET /repos/{owner}/{repo}/codespaces/devcontainers", - "GET /repos/{owner}/{repo}/codespaces/secrets", - "GET /repos/{owner}/{repo}/collaborators", - "GET /repos/{owner}/{repo}/comments", - "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", - "GET /repos/{owner}/{repo}/commits", - "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", - "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", - "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", - "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", - "GET /repos/{owner}/{repo}/commits/{ref}/status", - "GET /repos/{owner}/{repo}/commits/{ref}/statuses", - "GET /repos/{owner}/{repo}/contributors", - "GET /repos/{owner}/{repo}/dependabot/alerts", - "GET /repos/{owner}/{repo}/dependabot/secrets", - "GET /repos/{owner}/{repo}/deployments", - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", - "GET /repos/{owner}/{repo}/environments", - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", - "GET /repos/{owner}/{repo}/events", - "GET /repos/{owner}/{repo}/forks", - "GET /repos/{owner}/{repo}/hooks", - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", - "GET /repos/{owner}/{repo}/invitations", - "GET /repos/{owner}/{repo}/issues", - "GET /repos/{owner}/{repo}/issues/comments", - "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", - "GET /repos/{owner}/{repo}/issues/events", - "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", - "GET /repos/{owner}/{repo}/issues/{issue_number}/events", - "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", - "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", - "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", - "GET /repos/{owner}/{repo}/keys", - "GET /repos/{owner}/{repo}/labels", - "GET /repos/{owner}/{repo}/milestones", - "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", - "GET /repos/{owner}/{repo}/notifications", - "GET /repos/{owner}/{repo}/pages/builds", - "GET /repos/{owner}/{repo}/projects", - "GET /repos/{owner}/{repo}/pulls", - "GET /repos/{owner}/{repo}/pulls/comments", - "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", - "GET /repos/{owner}/{repo}/releases", - "GET /repos/{owner}/{repo}/releases/{release_id}/assets", - "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", - "GET /repos/{owner}/{repo}/rules/branches/{branch}", - "GET /repos/{owner}/{repo}/rulesets", - "GET /repos/{owner}/{repo}/rulesets/rule-suites", - "GET /repos/{owner}/{repo}/secret-scanning/alerts", - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", - "GET /repos/{owner}/{repo}/security-advisories", - "GET /repos/{owner}/{repo}/stargazers", - "GET /repos/{owner}/{repo}/subscribers", - "GET /repos/{owner}/{repo}/tags", - "GET /repos/{owner}/{repo}/teams", - "GET /repos/{owner}/{repo}/topics", - "GET /repositories", - "GET /repositories/{repository_id}/environments/{environment_name}/secrets", - "GET /repositories/{repository_id}/environments/{environment_name}/variables", - "GET /search/code", - "GET /search/commits", - "GET /search/issues", - "GET /search/labels", - "GET /search/repositories", - "GET /search/topics", - "GET /search/users", - "GET /teams/{team_id}/discussions", - "GET /teams/{team_id}/discussions/{discussion_number}/comments", - "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", - "GET /teams/{team_id}/discussions/{discussion_number}/reactions", - "GET /teams/{team_id}/invitations", - "GET /teams/{team_id}/members", - "GET /teams/{team_id}/projects", - "GET /teams/{team_id}/repos", - "GET /teams/{team_id}/teams", - "GET /user/blocks", - "GET /user/codespaces", - "GET /user/codespaces/secrets", - "GET /user/emails", - "GET /user/followers", - "GET /user/following", - "GET /user/gpg_keys", - "GET /user/installations", - "GET /user/installations/{installation_id}/repositories", - "GET /user/issues", - "GET /user/keys", - "GET /user/marketplace_purchases", - "GET /user/marketplace_purchases/stubbed", - "GET /user/memberships/orgs", - "GET /user/migrations", - "GET /user/migrations/{migration_id}/repositories", - "GET /user/orgs", - "GET /user/packages", - "GET /user/packages/{package_type}/{package_name}/versions", - "GET /user/public_emails", - "GET /user/repos", - "GET /user/repository_invitations", - "GET /user/social_accounts", - "GET /user/ssh_signing_keys", - "GET /user/starred", - "GET /user/subscriptions", - "GET /user/teams", - "GET /users", - "GET /users/{username}/events", - "GET /users/{username}/events/orgs/{org}", - "GET /users/{username}/events/public", - "GET /users/{username}/followers", - "GET /users/{username}/following", - "GET /users/{username}/gists", - "GET /users/{username}/gpg_keys", - "GET /users/{username}/keys", - "GET /users/{username}/orgs", - "GET /users/{username}/packages", - "GET /users/{username}/projects", - "GET /users/{username}/received_events", - "GET /users/{username}/received_events/public", - "GET /users/{username}/repos", - "GET /users/{username}/social_accounts", - "GET /users/{username}/ssh_signing_keys", - "GET /users/{username}/starred", - "GET /users/{username}/subscriptions" -]; +/***/ 32210: +/***/ ((module, exports, __nccwpck_require__) => { -// pkg/dist-src/paginating-endpoints.js -function isPaginatingEndpoint(arg) { - if (typeof arg === "string") { - return paginatingEndpoints.includes(arg); - } else { - return false; - } +"use strict"; + +/*! + * Copyright 2017 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Firestore = exports.DEFAULT_MAX_TRANSACTION_ATTEMPTS = exports.MAX_REQUEST_RETRIES = exports.AggregateField = exports.Aggregate = exports.setLogFunction = exports.QueryPartition = exports.CollectionGroup = exports.GeoPoint = exports.FieldPath = exports.DocumentChange = exports.Timestamp = exports.Transaction = exports.WriteResult = exports.WriteBatch = exports.Filter = exports.VectorValue = exports.FieldValue = exports.QueryDocumentSnapshot = exports.DocumentSnapshot = exports.BulkWriter = exports.Query = exports.QuerySnapshot = exports.DocumentReference = exports.CollectionReference = void 0; +const stream_1 = __nccwpck_require__(12781); +const url_1 = __nccwpck_require__(57310); +const backoff_1 = __nccwpck_require__(63544); +const bulk_writer_1 = __nccwpck_require__(58427); +const bundle_1 = __nccwpck_require__(71550); +const convert_1 = __nccwpck_require__(36674); +const document_reader_1 = __nccwpck_require__(81080); +const document_1 = __nccwpck_require__(98912); +const logger_1 = __nccwpck_require__(42718); +const path_1 = __nccwpck_require__(34908); +const pool_1 = __nccwpck_require__(74197); +const collection_reference_1 = __nccwpck_require__(37253); +const document_reference_1 = __nccwpck_require__(502); +const serializer_1 = __nccwpck_require__(49170); +const timestamp_1 = __nccwpck_require__(29061); +const transaction_1 = __nccwpck_require__(95382); +const util_1 = __nccwpck_require__(15468); +const validate_1 = __nccwpck_require__(33822); +const write_batch_1 = __nccwpck_require__(76012); +const firestore_client_config_json_1 = __nccwpck_require__(89671); +const serviceConfig = firestore_client_config_json_1.interfaces['google.firestore.v1.Firestore']; +const collection_group_1 = __nccwpck_require__(85391); +Object.defineProperty(exports, "CollectionGroup", ({ enumerable: true, get: function () { return collection_group_1.CollectionGroup; } })); +const recursive_delete_1 = __nccwpck_require__(3023); +const trace_util_1 = __nccwpck_require__(2693); +const disabled_trace_util_1 = __nccwpck_require__(28083); +const enabled_trace_util_1 = __nccwpck_require__(25111); +var collection_reference_2 = __nccwpck_require__(37253); +Object.defineProperty(exports, "CollectionReference", ({ enumerable: true, get: function () { return collection_reference_2.CollectionReference; } })); +var document_reference_2 = __nccwpck_require__(502); +Object.defineProperty(exports, "DocumentReference", ({ enumerable: true, get: function () { return document_reference_2.DocumentReference; } })); +var query_snapshot_1 = __nccwpck_require__(81796); +Object.defineProperty(exports, "QuerySnapshot", ({ enumerable: true, get: function () { return query_snapshot_1.QuerySnapshot; } })); +var query_1 = __nccwpck_require__(38621); +Object.defineProperty(exports, "Query", ({ enumerable: true, get: function () { return query_1.Query; } })); +var bulk_writer_2 = __nccwpck_require__(58427); +Object.defineProperty(exports, "BulkWriter", ({ enumerable: true, get: function () { return bulk_writer_2.BulkWriter; } })); +var document_2 = __nccwpck_require__(98912); +Object.defineProperty(exports, "DocumentSnapshot", ({ enumerable: true, get: function () { return document_2.DocumentSnapshot; } })); +Object.defineProperty(exports, "QueryDocumentSnapshot", ({ enumerable: true, get: function () { return document_2.QueryDocumentSnapshot; } })); +var field_value_1 = __nccwpck_require__(16888); +Object.defineProperty(exports, "FieldValue", ({ enumerable: true, get: function () { return field_value_1.FieldValue; } })); +Object.defineProperty(exports, "VectorValue", ({ enumerable: true, get: function () { return field_value_1.VectorValue; } })); +var filter_1 = __nccwpck_require__(47864); +Object.defineProperty(exports, "Filter", ({ enumerable: true, get: function () { return filter_1.Filter; } })); +var write_batch_2 = __nccwpck_require__(76012); +Object.defineProperty(exports, "WriteBatch", ({ enumerable: true, get: function () { return write_batch_2.WriteBatch; } })); +Object.defineProperty(exports, "WriteResult", ({ enumerable: true, get: function () { return write_batch_2.WriteResult; } })); +var transaction_2 = __nccwpck_require__(95382); +Object.defineProperty(exports, "Transaction", ({ enumerable: true, get: function () { return transaction_2.Transaction; } })); +var timestamp_2 = __nccwpck_require__(29061); +Object.defineProperty(exports, "Timestamp", ({ enumerable: true, get: function () { return timestamp_2.Timestamp; } })); +var document_change_1 = __nccwpck_require__(62270); +Object.defineProperty(exports, "DocumentChange", ({ enumerable: true, get: function () { return document_change_1.DocumentChange; } })); +var path_2 = __nccwpck_require__(34908); +Object.defineProperty(exports, "FieldPath", ({ enumerable: true, get: function () { return path_2.FieldPath; } })); +var geo_point_1 = __nccwpck_require__(98854); +Object.defineProperty(exports, "GeoPoint", ({ enumerable: true, get: function () { return geo_point_1.GeoPoint; } })); +var query_partition_1 = __nccwpck_require__(88357); +Object.defineProperty(exports, "QueryPartition", ({ enumerable: true, get: function () { return query_partition_1.QueryPartition; } })); +var logger_2 = __nccwpck_require__(42718); +Object.defineProperty(exports, "setLogFunction", ({ enumerable: true, get: function () { return logger_2.setLogFunction; } })); +var aggregate_1 = __nccwpck_require__(97114); +Object.defineProperty(exports, "Aggregate", ({ enumerable: true, get: function () { return aggregate_1.Aggregate; } })); +Object.defineProperty(exports, "AggregateField", ({ enumerable: true, get: function () { return aggregate_1.AggregateField; } })); +const libVersion = (__nccwpck_require__(49830)/* .version */ .i8); +(0, logger_1.setLibVersion)(libVersion); +/*! + * DO NOT REMOVE THE FOLLOWING NAMESPACE DEFINITIONS + */ +/** + * @namespace google.protobuf + */ +/** + * @namespace google.rpc + */ +/** + * @namespace google.longrunning + */ +/** + * @namespace google.firestore.v1 + */ +/** + * @namespace google.firestore.v1beta1 + */ +/** + * @namespace google.firestore.admin.v1 + */ +/*! + * HTTP header for the resource prefix to improve routing and project isolation + * by the backend. + */ +const CLOUD_RESOURCE_HEADER = 'google-cloud-resource-prefix'; +/** + * The maximum number of times to retry idempotent requests. + * @private + */ +exports.MAX_REQUEST_RETRIES = 5; +/** + * The maximum number of times to attempt a transaction before failing. + * @private + */ +exports.DEFAULT_MAX_TRANSACTION_ATTEMPTS = 5; +/*! + * The default number of idle GRPC channel to keep. + */ +const DEFAULT_MAX_IDLE_CHANNELS = 1; +/*! + * The maximum number of concurrent requests supported by a single GRPC channel, + * as enforced by Google's Frontend. If the SDK issues more than 100 concurrent + * operations, we need to use more than one GAPIC client since these clients + * multiplex all requests over a single channel. + */ +const MAX_CONCURRENT_REQUESTS_PER_CLIENT = 100; +/** + * Document data (e.g. for use with + * [set()]{@link DocumentReference#set}) consisting of fields mapped + * to values. + * + * @typedef {Object.} DocumentData + */ +/** + * Converter used by [withConverter()]{@link Query#withConverter} to transform + * user objects of type `AppModelType` into Firestore data of type + * `DbModelType`. + * + * Using the converter allows you to specify generic type arguments when storing + * and retrieving objects from Firestore. + * + * @example + * ``` + * class Post { + * constructor(readonly title: string, readonly author: string) {} + * + * toString(): string { + * return this.title + ', by ' + this.author; + * } + * } + * + * const postConverter = { + * toFirestore(post: Post): FirebaseFirestore.DocumentData { + * return {title: post.title, author: post.author}; + * }, + * fromFirestore( + * snapshot: FirebaseFirestore.QueryDocumentSnapshot + * ): Post { + * const data = snapshot.data(); + * return new Post(data.title, data.author); + * } + * }; + * + * const postSnap = await Firestore() + * .collection('posts') + * .withConverter(postConverter) + * .doc().get(); + * const post = postSnap.data(); + * if (post !== undefined) { + * post.title; // string + * post.toString(); // Should be defined + * post.someNonExistentProperty; // TS error + * } + * + * ``` + * @property {Function} toFirestore Called by the Firestore SDK to convert a + * custom model object of type `AppModelType` into a plain Javascript object + * (suitable for writing directly to the Firestore database). + * @property {Function} fromFirestore Called by the Firestore SDK to convert + * Firestore data into an object of type `AppModelType`. + * @typedef {Object} FirestoreDataConverter + */ +/** + * Update data (for use with [update]{@link DocumentReference#update}) + * that contains paths mapped to values. Fields that contain dots + * reference nested fields within the document. + * + * You can update a top-level field in your document by using the field name + * as a key (e.g. `foo`). The provided value completely replaces the contents + * for this field. + * + * You can also update a nested field directly by using its field path as a key + * (e.g. `foo.bar`). This nested field update replaces the contents at `bar` + * but does not modify other data under `foo`. + * + * @example + * ``` + * const documentRef = firestore.doc('coll/doc'); + * documentRef.set({a1: {a2: 'val'}, b1: {b2: 'val'}, c1: {c2: 'val'}}); + * documentRef.update({ + * b1: {b3: 'val'}, + * 'c1.c3': 'val', + * }); + * // Value is {a1: {a2: 'val'}, b1: {b3: 'val'}, c1: {c2: 'val', c3: 'val'}} + * + * ``` + * @typedef {Object.} UpdateData + */ +/** + * An options object that configures conditional behavior of + * [update()]{@link DocumentReference#update} and + * [delete()]{@link DocumentReference#delete} calls in + * [DocumentReference]{@link DocumentReference}, + * [WriteBatch]{@link WriteBatch}, [BulkWriter]{@link BulkWriter}, and + * [Transaction]{@link Transaction}. Using Preconditions, these calls + * can be restricted to only apply to documents that match the specified + * conditions. + * + * @example + * ``` + * const documentRef = firestore.doc('coll/doc'); + * + * documentRef.get().then(snapshot => { + * const updateTime = snapshot.updateTime; + * + * console.log(`Deleting document at update time: ${updateTime.toDate()}`); + * return documentRef.delete({ lastUpdateTime: updateTime }); + * }); + * + * ``` + * @property {Timestamp} lastUpdateTime The update time to enforce. If set, + * enforces that the document was last updated at lastUpdateTime. Fails the + * operation if the document was last updated at a different time. + * @property {boolean} exists If set, enforces that the target document must + * or must not exist. + * @typedef {Object} Precondition + */ +/** + * An options object that configures the behavior of + * [set()]{@link DocumentReference#set} calls in + * [DocumentReference]{@link DocumentReference}, + * [WriteBatch]{@link WriteBatch}, and + * [Transaction]{@link Transaction}. These calls can be + * configured to perform granular merges instead of overwriting the target + * documents in their entirety by providing a SetOptions object with + * { merge : true }. + * + * @property {boolean} merge Changes the behavior of a set() call to only + * replace the values specified in its data argument. Fields omitted from the + * set() call remain untouched. + * @property {Array<(string|FieldPath)>} mergeFields Changes the behavior of + * set() calls to only replace the specified field paths. Any field path that is + * not specified is ignored and remains untouched. + * It is an error to pass a SetOptions object to a set() call that is missing a + * value for any of the fields specified here. + * @typedef {Object} SetOptions + */ +/** + * An options object that can be used to configure the behavior of + * [getAll()]{@link Firestore#getAll} calls. By providing a `fieldMask`, these + * calls can be configured to only return a subset of fields. + * + * @property {Array<(string|FieldPath)>} fieldMask Specifies the set of fields + * to return and reduces the amount of data transmitted by the backend. + * Adding a field mask does not filter results. Documents do not need to + * contain values for all the fields in the mask to be part of the result set. + * @typedef {Object} ReadOptions + */ +/** + * An options object to configure throttling on BulkWriter. + * + * Whether to disable or configure throttling. By default, throttling is + * enabled. `throttling` can be set to either a boolean or a config object. + * Setting it to `true` will use default values. You can override the defaults + * by setting it to `false` to disable throttling, or by setting the config + * values to enable throttling with the provided values. + * + * @property {boolean|Object} throttling Whether to disable or enable + * throttling. Throttling is enabled by default, if the field is set to `true` + * or if any custom throttling options are provided. `{ initialOpsPerSecond: + * number }` sets the initial maximum number of operations per second allowed by + * the throttler. If `initialOpsPerSecond` is not set, the default is 500 + * operations per second. `{ maxOpsPerSecond: number }` sets the maximum number + * of operations per second allowed by the throttler. If `maxOpsPerSecond` is + * not set, no maximum is enforced. + * @typedef {Object} BulkWriterOptions + */ +/** + * An error thrown when a BulkWriter operation fails. + * + * The error used by {@link BulkWriter~shouldRetryCallback} set in + * {@link BulkWriter#onWriteError}. + * + * @property {GrpcStatus} code The status code of the error. + * @property {string} message The error message of the error. + * @property {DocumentReference} documentRef The document reference the + * operation was performed on. + * @property {'create' | 'set' | 'update' | 'delete'} operationType The type + * of operation performed. + * @property {number} failedAttempts How many times this operation has been + * attempted unsuccessfully. + * @typedef {Error} BulkWriterError + */ +/** + * Status codes returned by GRPC operations. + * + * @see https://github.com/grpc/grpc/blob/master/doc/statuscodes.md + * + * @enum {number} + * @typedef {Object} GrpcStatus + */ +/** + * The Firestore client represents a Firestore Database and is the entry point + * for all Firestore operations. + * + * @see [Firestore Documentation]{@link https://firebase.google.com/docs/firestore/} + * + * @class + * + * @example Install the client library with npm: + * ``` + * npm install --save @google-cloud/firestore + * + * ``` + * @example Import the client library + * ``` + * var Firestore = require('@google-cloud/firestore'); + * + * ``` + * @example Create a client that uses Application Default Credentials (ADC): + * ``` + * var firestore = new Firestore(); + * + * ``` + * @example Create a client with explicit credentials: + * ``` + * var firestore = new Firestore({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * + * ``` + * @example include:samples/quickstart.js + * region_tag:firestore_quickstart + * Full quickstart example: + */ +class Firestore { + /** + * Lazy-load the Firestore's default BulkWriter. + * + * @private + * @internal + */ + getBulkWriter() { + if (!this._bulkWriter) { + this._bulkWriter = this.bulkWriter(); + } + return this._bulkWriter; + } + /** + * @param {Object=} settings [Configuration object](#/docs). + * @param {string=} settings.projectId The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check the + * environment variable GCLOUD_PROJECT for your project ID. Can be omitted in + * environments that support + * {@link https://cloud.google.com/docs/authentication Application Default + * Credentials} + * @param {string=} settings.keyFilename Local file containing the Service + * Account credentials as downloaded from the Google Developers Console. Can + * be omitted in environments that support + * {@link https://cloud.google.com/docs/authentication Application Default + * Credentials}. To configure Firestore with custom credentials, use + * `settings.credentials` and provide the `client_email` and `private_key` of + * your service account. + * @param {{client_email:string=, private_key:string=}=} settings.credentials + * The `client_email` and `private_key` properties of the service account + * to use with your Firestore project. Can be omitted in environments that + * support {@link https://cloud.google.com/docs/authentication Application + * Default Credentials}. If your credentials are stored in a JSON file, you + * can specify a `keyFilename` instead. + * @param {string=} settings.host The host to connect to. + * @param {boolean=} settings.ssl Whether to use SSL when connecting. + * @param {number=} settings.maxIdleChannels The maximum number of idle GRPC + * channels to keep. A smaller number of idle channels reduces memory usage + * but increases request latency for clients with fluctuating request rates. + * If set to 0, shuts down all GRPC channels when the client becomes idle. + * Defaults to 1. + * @param {boolean=} settings.ignoreUndefinedProperties Whether to skip nested + * properties that are set to `undefined` during object serialization. If set + * to `true`, these properties are skipped and not written to Firestore. If + * set `false` or omitted, the SDK throws an exception when it encounters + * properties of type `undefined`. + * @param {boolean=} settings.preferRest Whether to force the use of HTTP/1.1 REST + * transport until a method that requires gRPC is called. When a method requires gRPC, + * this Firestore client will load dependent gRPC libraries and then use gRPC transport + * for communication from that point forward. Currently the only operation + * that requires gRPC is creating a snapshot listener with the method + * `DocumentReference.onSnapshot()`, `CollectionReference.onSnapshot()`, or + * `Query.onSnapshot()`. If specified, this setting value will take precedent over the + * environment variable `FIRESTORE_PREFER_REST`. If not specified, the + * SDK will use the value specified in the environment variable `FIRESTORE_PREFER_REST`. + * Valid values of `FIRESTORE_PREFER_REST` are `true` ('1') or `false` (`0`). Values are + * not case-sensitive. Any other value for the environment variable will be ignored and + * a warning will be logged to the console. + */ + constructor(settings) { + /** + * The configuration options for the GAPIC client. + * @private + * @internal + */ + this._settings = {}; + /** + * Whether the initialization settings can still be changed by invoking + * `settings()`. + * @private + * @internal + */ + this._settingsFrozen = false; + /** + * The serializer to use for the Protobuf transformation. + * @private + * @internal + */ + this._serializer = null; + /** + * The project ID for this client. + * + * The project ID is auto-detected during the first request unless a project + * ID is passed to the constructor (or provided via `.settings()`). + * @private + * @internal + */ + this._projectId = undefined; + /** + * The database ID provided via `.settings()`. + * + * @private + * @internal + */ + this._databaseId = undefined; + /** + * Count of listeners that have been registered on the client. + * + * The client can only be terminated when there are no pending writes or + * registered listeners. + * @private + * @internal + */ + this.registeredListenersCount = 0; + /** + * Number of pending operations on the client. + * + * The client can only be terminated when there are no pending writes or + * registered listeners. + * @private + * @internal + */ + this.bulkWritersCount = 0; + const libraryHeader = { + libName: 'gccl', + libVersion, + }; + if (settings && settings.firebaseVersion) { + libraryHeader.libVersion += ' fire/' + settings.firebaseVersion; + } + this.validateAndApplySettings({ ...settings, ...libraryHeader }); + this._traceUtil = this.newTraceUtilInstance(this._settings); + const retryConfig = serviceConfig.retry_params.default; + this._backoffSettings = { + initialDelayMs: retryConfig.initial_retry_delay_millis, + maxDelayMs: retryConfig.max_retry_delay_millis, + backoffFactor: retryConfig.retry_delay_multiplier, + }; + const maxIdleChannels = this._settings.maxIdleChannels === undefined + ? DEFAULT_MAX_IDLE_CHANNELS + : this._settings.maxIdleChannels; + this._clientPool = new pool_1.ClientPool(MAX_CONCURRENT_REQUESTS_PER_CLIENT, maxIdleChannels, + /* clientFactory= */ (requiresGrpc) => { + var _a; + let client; + // Use the rest fallback if enabled and if the method does not require GRPC + const useFallback = !this._settings.preferRest || requiresGrpc ? false : 'rest'; + let gax; + if (useFallback) { + if (!this._gaxFallback) { + gax = this._gaxFallback = __nccwpck_require__(90418); + } + else { + gax = this._gaxFallback; + } + } + else { + if (!this._gax) { + gax = this._gax = __nccwpck_require__(12263); + } + else { + gax = this._gax; + } + } + if (this._settings.ssl === false) { + const grpcModule = (_a = this._settings.grpc) !== null && _a !== void 0 ? _a : (__nccwpck_require__(12263).grpc); + const sslCreds = grpcModule.credentials.createInsecure(); + const settings = { + sslCreds, + ...this._settings, + fallback: useFallback, + }; + // Since `ssl === false`, if we're using the GAX fallback then + // also set the `protocol` option for GAX fallback to force http + if (useFallback) { + settings.protocol = 'http'; + } + client = new module.exports.v1(settings, gax); + } + else { + client = new module.exports.v1({ + ...this._settings, + fallback: useFallback, + }, gax); + } + (0, logger_1.logger)('clientFactory', null, 'Initialized Firestore GAPIC Client (useFallback: %s)', useFallback); + return client; + }, + /* clientDestructor= */ client => client.close()); + (0, logger_1.logger)('Firestore', null, 'Initialized Firestore'); + } + /** + * Specifies custom settings to be used to configure the `Firestore` + * instance. Can only be invoked once and before any other Firestore method. + * + * If settings are provided via both `settings()` and the `Firestore` + * constructor, both settings objects are merged and any settings provided via + * `settings()` take precedence. + * + * @param {object} settings The settings to use for all Firestore operations. + */ + settings(settings) { + (0, validate_1.validateObject)('settings', settings); + (0, validate_1.validateString)('settings.projectId', settings.projectId, { optional: true }); + (0, validate_1.validateString)('settings.databaseId', settings.databaseId, { + optional: true, + }); + if (this._settingsFrozen) { + throw new Error('Firestore has already been initialized. You can only call ' + + 'settings() once, and only before calling any other methods on a ' + + 'Firestore object.'); + } + const mergedSettings = { ...this._settings, ...settings }; + this.validateAndApplySettings(mergedSettings); + this._settingsFrozen = true; + } + validateAndApplySettings(settings) { + var _a; + if (settings.projectId !== undefined) { + (0, validate_1.validateString)('settings.projectId', settings.projectId); + this._projectId = settings.projectId; + } + if (settings.databaseId !== undefined) { + (0, validate_1.validateString)('settings.databaseId', settings.databaseId); + this._databaseId = settings.databaseId; + } + let url = null; + // If preferRest is not specified in settings, but is set as environment variable, + // then use the environment variable value. + const preferRestEnvValue = (0, util_1.tryGetPreferRestEnvironmentVariable)(); + if (settings.preferRest === undefined && preferRestEnvValue !== undefined) { + settings = { + ...settings, + preferRest: preferRestEnvValue, + }; + } + // If the environment variable is set, it should always take precedence + // over any user passed in settings. + if (process.env.FIRESTORE_EMULATOR_HOST) { + (0, validate_1.validateHost)('FIRESTORE_EMULATOR_HOST', process.env.FIRESTORE_EMULATOR_HOST); + settings = { + ...settings, + host: process.env.FIRESTORE_EMULATOR_HOST, + ssl: false, + }; + url = new url_1.URL(`http://${settings.host}`); + } + else if (settings.host !== undefined) { + (0, validate_1.validateHost)('settings.host', settings.host); + url = new url_1.URL(`http://${settings.host}`); + } + // Only store the host if a valid value was provided in `host`. + if (url !== null) { + if ((settings.servicePath !== undefined && + settings.servicePath !== url.hostname) || + (settings.apiEndpoint !== undefined && + settings.apiEndpoint !== url.hostname)) { + // eslint-disable-next-line no-console + console.warn(`The provided host (${url.hostname}) in "settings" does not ` + + `match the existing host (${(_a = settings.servicePath) !== null && _a !== void 0 ? _a : settings.apiEndpoint}). Using the provided host.`); + } + settings.servicePath = url.hostname; + if (url.port !== '' && settings.port === undefined) { + settings.port = Number(url.port); + } + // We need to remove the `host` and `apiEndpoint` setting, in case a user + // calls `settings()`, which will compare the the provided `host` to the + // existing hostname stored on `servicePath`. + delete settings.host; + delete settings.apiEndpoint; + } + if (settings.ssl !== undefined) { + (0, validate_1.validateBoolean)('settings.ssl', settings.ssl); + } + if (settings.maxIdleChannels !== undefined) { + (0, validate_1.validateInteger)('settings.maxIdleChannels', settings.maxIdleChannels, { + minValue: 0, + }); + } + this._settings = settings; + this._settings.toJSON = function () { + const temp = Object.assign({}, this); + if (temp.credentials) { + temp.credentials = { private_key: '***', client_email: '***' }; + } + return temp; + }; + this._serializer = new serializer_1.Serializer(this); + this._traceUtil = this.newTraceUtilInstance(this._settings); + } + newTraceUtilInstance(settings) { + var _a; + // Take the tracing option from the settings. + let createEnabledInstance = (_a = settings.openTelemetryOptions) === null || _a === void 0 ? void 0 : _a.enableTracing; + // The environment variable can override options to enable/disable telemetry collection. + if ('FIRESTORE_ENABLE_TRACING' in process.env) { + const enableTracingEnvVar = process.env.FIRESTORE_ENABLE_TRACING.toLowerCase(); + if (enableTracingEnvVar === 'on' || enableTracingEnvVar === 'true') { + createEnabledInstance = true; + } + if (enableTracingEnvVar === 'off' || enableTracingEnvVar === 'false') { + createEnabledInstance = false; + } + } + if (createEnabledInstance) { + // Re-use the existing EnabledTraceUtil if one has been created. + if (this._traceUtil && this._traceUtil instanceof enabled_trace_util_1.EnabledTraceUtil) { + return this._traceUtil; + } + return new enabled_trace_util_1.EnabledTraceUtil(settings); + } + else { + return new disabled_trace_util_1.DisabledTraceUtil(); + } + } + /** + * Returns the Project ID for this Firestore instance. Validates that + * `initializeIfNeeded()` was called before. + * + * @private + * @internal + */ + get projectId() { + if (this._projectId === undefined) { + throw new Error('INTERNAL ERROR: Client is not yet ready to issue requests.'); + } + return this._projectId; + } + /** + * Returns the Database ID for this Firestore instance. + */ + get databaseId() { + return this._databaseId || path_1.DEFAULT_DATABASE_ID; + } + /** + * Returns the root path of the database. Validates that + * `initializeIfNeeded()` was called before. + * + * @private + * @internal + */ + get formattedName() { + return `projects/${this.projectId}/databases/${this.databaseId}`; + } + /** + * Gets a [DocumentReference]{@link DocumentReference} instance that + * refers to the document at the specified path. + * + * @param {string} documentPath A slash-separated path to a document. + * @returns {DocumentReference} The + * [DocumentReference]{@link DocumentReference} instance. + * + * @example + * ``` + * let documentRef = firestore.doc('collection/document'); + * console.log(`Path of document is ${documentRef.path}`); + * ``` + */ + doc(documentPath) { + (0, path_1.validateResourcePath)('documentPath', documentPath); + const path = path_1.ResourcePath.EMPTY.append(documentPath); + if (!path.isDocument) { + throw new Error(`Value for argument "documentPath" must point to a document, but was "${documentPath}". Your path does not contain an even number of components.`); + } + return new document_reference_1.DocumentReference(this, path); + } + /** + * Gets a [CollectionReference]{@link CollectionReference} instance + * that refers to the collection at the specified path. + * + * @param {string} collectionPath A slash-separated path to a collection. + * @returns {CollectionReference} The + * [CollectionReference]{@link CollectionReference} instance. + * + * @example + * ``` + * let collectionRef = firestore.collection('collection'); + * + * // Add a document with an auto-generated ID. + * collectionRef.add({foo: 'bar'}).then((documentRef) => { + * console.log(`Added document at ${documentRef.path})`); + * }); + * ``` + */ + collection(collectionPath) { + (0, path_1.validateResourcePath)('collectionPath', collectionPath); + const path = path_1.ResourcePath.EMPTY.append(collectionPath); + if (!path.isCollection) { + throw new Error(`Value for argument "collectionPath" must point to a collection, but was "${collectionPath}". Your path does not contain an odd number of components.`); + } + return new collection_reference_1.CollectionReference(this, path); + } + /** + * Creates and returns a new Query that includes all documents in the + * database that are contained in a collection or subcollection with the + * given collectionId. + * + * @param {string} collectionId Identifies the collections to query over. + * Every collection or subcollection with this ID as the last segment of its + * path will be included. Cannot contain a slash. + * @returns {CollectionGroup} The created CollectionGroup. + * + * @example + * ``` + * let docA = firestore.doc('mygroup/docA').set({foo: 'bar'}); + * let docB = firestore.doc('abc/def/mygroup/docB').set({foo: 'bar'}); + * + * Promise.all([docA, docB]).then(() => { + * let query = firestore.collectionGroup('mygroup'); + * query = query.where('foo', '==', 'bar'); + * return query.get().then(snapshot => { + * console.log(`Found ${snapshot.size} documents.`); + * }); + * }); + * ``` + */ + collectionGroup(collectionId) { + if (collectionId.indexOf('/') !== -1) { + throw new Error(`Invalid collectionId '${collectionId}'. Collection IDs must not contain '/'.`); + } + return new collection_group_1.CollectionGroup(this, collectionId, /* converter= */ undefined); + } + /** + * Creates a [WriteBatch]{@link WriteBatch}, used for performing + * multiple writes as a single atomic operation. + * + * @returns {WriteBatch} A WriteBatch that operates on this Firestore + * client. + * + * @example + * ``` + * let writeBatch = firestore.batch(); + * + * // Add two documents in an atomic batch. + * let data = { foo: 'bar' }; + * writeBatch.set(firestore.doc('col/doc1'), data); + * writeBatch.set(firestore.doc('col/doc2'), data); + * + * writeBatch.commit().then(res => { + * console.log('Successfully executed batch.'); + * }); + * ``` + */ + batch() { + return new write_batch_1.WriteBatch(this); + } + /** + * Creates a [BulkWriter]{@link BulkWriter}, used for performing + * multiple writes in parallel. Gradually ramps up writes as specified + * by the 500/50/5 rule. + * + * If you pass [BulkWriterOptions]{@link BulkWriterOptions}, you can + * configure the throttling rates for the created BulkWriter. + * + * @see [500/50/5 Documentation]{@link https://firebase.google.com/docs/firestore/best-practices#ramping_up_traffic} + * + * @param {BulkWriterOptions=} options BulkWriter options. + * @returns {BulkWriter} A BulkWriter that operates on this Firestore + * client. + * + * @example + * ``` + * let bulkWriter = firestore.bulkWriter(); + * + * bulkWriter.create(firestore.doc('col/doc1'), {foo: 'bar'}) + * .then(res => { + * console.log(`Added document at ${res.writeTime}`); + * }); + * bulkWriter.update(firestore.doc('col/doc2'), {foo: 'bar'}) + * .then(res => { + * console.log(`Updated document at ${res.writeTime}`); + * }); + * bulkWriter.delete(firestore.doc('col/doc3')) + * .then(res => { + * console.log(`Deleted document at ${res.writeTime}`); + * }); + * await bulkWriter.close().then(() => { + * console.log('Executed all writes'); + * }); + * ``` + */ + bulkWriter(options) { + return new bulk_writer_1.BulkWriter(this, options); + } + /** @private */ + snapshot_(documentOrName, readTime, encoding) { + // TODO: Assert that Firestore Project ID is valid. + let convertTimestamp; + let convertFields; + if (encoding === undefined || encoding === 'protobufJS') { + convertTimestamp = data => data; + convertFields = data => data; + } + else if (encoding === 'json') { + // Google Cloud Functions calls us with Proto3 JSON format data, which we + // must convert to Protobuf JS. + convertTimestamp = convert_1.timestampFromJson; + convertFields = convert_1.fieldsFromJson; + } + else { + throw new Error('Unsupported encoding format. Expected "json" or "protobufJS", ' + + `but was "${encoding}".`); + } + let ref; + let document; + if (typeof documentOrName === 'string') { + ref = new document_reference_1.DocumentReference(this, path_1.QualifiedResourcePath.fromSlashSeparatedString(documentOrName)); + document = new document_1.DocumentSnapshotBuilder(ref); + } + else { + ref = new document_reference_1.DocumentReference(this, path_1.QualifiedResourcePath.fromSlashSeparatedString(documentOrName.name)); + document = new document_1.DocumentSnapshotBuilder(ref); + document.fieldsProto = documentOrName.fields + ? convertFields(documentOrName.fields) + : {}; + document.createTime = timestamp_1.Timestamp.fromProto(convertTimestamp(documentOrName.createTime, 'documentOrName.createTime')); + document.updateTime = timestamp_1.Timestamp.fromProto(convertTimestamp(documentOrName.updateTime, 'documentOrName.updateTime')); + } + if (readTime) { + document.readTime = timestamp_1.Timestamp.fromProto(convertTimestamp(readTime, 'readTime')); + } + return document.build(); + } + /** + * Creates a new `BundleBuilder` instance to package selected Firestore data into + * a bundle. + * + * @param bundleId. The id of the bundle. When loaded on clients, client SDKs use this id + * and the timestamp associated with the built bundle to tell if it has been loaded already. + * If not specified, a random identifier will be used. + */ + bundle(name) { + return new bundle_1.BundleBuilder(name || (0, util_1.autoId)()); + } + /** + * Function executed by {@link Firestore#runTransaction} within the transaction + * context. + * + * @callback Firestore~updateFunction + * @template T + * @param {Transaction} transaction The transaction object for this + * transaction. + * @returns {Promise} The promise returned at the end of the transaction. + * This promise will be returned by {@link Firestore#runTransaction} if the + * transaction completed successfully. + */ + /** + * Options object for {@link Firestore#runTransaction} to configure a + * read-only transaction. + * + * @param {true} readOnly Set to true to indicate a read-only transaction. + * @param {Timestamp=} readTime If specified, documents are read at the given + * time. This may not be more than 60 seconds in the past from when the + * request is processed by the server. + * @typedef {Object} Firestore~ReadOnlyTransactionOptions + */ + /** + * Options object for {@link Firestore#runTransaction} to configure a + * read-write transaction. + * + * @param {false=} readOnly Set to false or omit to indicate a read-write + * transaction. + * @param {number=} maxAttempts The maximum number of attempts for this + * transaction. Defaults to 5. + * @typedef {Object} Firestore~ReadWriteTransactionOptions + */ + /** + * Executes the given updateFunction and commits the changes applied within + * the transaction. + * + * You can use the transaction object passed to 'updateFunction' to read and + * modify Firestore documents under lock. You have to perform all reads before + * before you perform any write. + * + * Transactions can be performed as read-only or read-write transactions. By + * default, transactions are executed in read-write mode. + * + * A read-write transaction obtains a pessimistic lock on all documents that + * are read during the transaction. These locks block other transactions, + * batched writes, and other non-transactional writes from changing that + * document. Any writes in a read-write transactions are committed once + * 'updateFunction' resolves, which also releases all locks. + * + * If a read-write transaction fails with contention, the transaction is + * retried up to five times. The `updateFunction` is invoked once for each + * attempt. + * + * Read-only transactions do not lock documents. They can be used to read + * documents at a consistent snapshot in time, which may be up to 60 seconds + * in the past. Read-only transactions are not retried. + * + * Transactions time out after 60 seconds if no documents are read. + * Transactions that are not committed within than 270 seconds are also + * aborted. Any remaining locks are released when a transaction times out. + * + * @template T + * @param {Firestore~updateFunction} updateFunction The user function to + * execute within the transaction context. + * @param { + * Firestore~ReadWriteTransactionOptions|Firestore~ReadOnlyTransactionOptions= + * } transactionOptions Transaction options. + * @returns {Promise} If the transaction completed successfully or was + * explicitly aborted (by the updateFunction returning a failed Promise), the + * Promise returned by the updateFunction will be returned here. Else if the + * transaction failed, a rejected Promise with the corresponding failure + * error will be returned. + * + * @example + * ``` + * let counterTransaction = firestore.runTransaction(transaction => { + * let documentRef = firestore.doc('col/doc'); + * return transaction.get(documentRef).then(doc => { + * if (doc.exists) { + * let count = doc.get('count') || 0; + * if (count > 10) { + * return Promise.reject('Reached maximum count'); + * } + * transaction.update(documentRef, { count: ++count }); + * return Promise.resolve(count); + * } + * + * transaction.create(documentRef, { count: 1 }); + * return Promise.resolve(1); + * }); + * }); + * + * counterTransaction.then(res => { + * console.log(`Count updated to ${res}`); + * }); + * ``` + */ + runTransaction(updateFunction, transactionOptions) { + (0, validate_1.validateFunction)('updateFunction', updateFunction); + const tag = (0, util_1.requestTag)(); + if (transactionOptions) { + (0, validate_1.validateObject)('transactionOptions', transactionOptions); + (0, validate_1.validateBoolean)('transactionOptions.readOnly', transactionOptions.readOnly, { optional: true }); + if (transactionOptions.readOnly) { + (0, validate_1.validateTimestamp)('transactionOptions.readTime', transactionOptions.readTime, { optional: true }); + } + else { + (0, validate_1.validateInteger)('transactionOptions.maxAttempts', transactionOptions.maxAttempts, { optional: true, minValue: 1 }); + } + } + const transaction = new transaction_1.Transaction(this, tag, transactionOptions); + return this.initializeIfNeeded(tag).then(() => transaction.runTransaction(updateFunction)); + } + /** + * Fetches the root collections that are associated with this Firestore + * database. + * + * @returns {Promise.>} A Promise that resolves + * with an array of CollectionReferences. + * + * @example + * ``` + * firestore.listCollections().then(collections => { + * for (let collection of collections) { + * console.log(`Found collection with id: ${collection.id}`); + * } + * }); + * ``` + */ + listCollections() { + const rootDocument = new document_reference_1.DocumentReference(this, path_1.ResourcePath.EMPTY); + return rootDocument.listCollections(); + } + /** + * Retrieves multiple documents from Firestore. + * + * The first argument is required and must be of type `DocumentReference` + * followed by any additional `DocumentReference` documents. If used, the + * optional `ReadOptions` must be the last argument. + * + * @param {...DocumentReference|ReadOptions} documentRefsOrReadOptions The + * `DocumentReferences` to receive, followed by an optional field mask. + * @returns {Promise>} A Promise that + * contains an array with the resulting document snapshots. + * + * @example + * ``` + * let docRef1 = firestore.doc('col/doc1'); + * let docRef2 = firestore.doc('col/doc2'); + * + * firestore.getAll(docRef1, docRef2, { fieldMask: ['user'] }).then(docs => { + * console.log(`First document: ${JSON.stringify(docs[0])}`); + * console.log(`Second document: ${JSON.stringify(docs[1])}`); + * }); + * ``` + */ + getAll(...documentRefsOrReadOptions) { + return this._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_BATCH_GET_DOCUMENTS, () => { + (0, validate_1.validateMinNumberOfArguments)('Firestore.getAll', documentRefsOrReadOptions, 1); + const { documents, fieldMask } = (0, transaction_1.parseGetAllArguments)(documentRefsOrReadOptions); + this._traceUtil.currentSpan().setAttributes({ + [trace_util_1.ATTRIBUTE_KEY_IS_TRANSACTIONAL]: false, + [trace_util_1.ATTRIBUTE_KEY_DOC_COUNT]: documents.length, + }); + const tag = (0, util_1.requestTag)(); + // Capture the error stack to preserve stack tracing across async calls. + const stack = Error().stack; + return this.initializeIfNeeded(tag) + .then(() => { + const reader = new document_reader_1.DocumentReader(this, documents, fieldMask); + return reader.get(tag); + }) + .catch(err => { + throw (0, util_1.wrapError)(err, stack); + }); + }); + } + /** + * Registers a listener on this client, incrementing the listener count. This + * is used to verify that all listeners are unsubscribed when terminate() is + * called. + * + * @private + * @internal + */ + registerListener() { + this.registeredListenersCount += 1; + } + /** + * Unregisters a listener on this client, decrementing the listener count. + * This is used to verify that all listeners are unsubscribed when terminate() + * is called. + * + * @private + * @internal + */ + unregisterListener() { + this.registeredListenersCount -= 1; + } + /** + * Increments the number of open BulkWriter instances. This is used to verify + * that all pending operations are complete when terminate() is called. + * + * @private + * @internal + */ + _incrementBulkWritersCount() { + this.bulkWritersCount += 1; + } + /** + * Decrements the number of open BulkWriter instances. This is used to verify + * that all pending operations are complete when terminate() is called. + * + * @private + * @internal + */ + _decrementBulkWritersCount() { + this.bulkWritersCount -= 1; + } + /** + * Recursively deletes all documents and subcollections at and under the + * specified level. + * + * If any delete fails, the promise is rejected with an error message + * containing the number of failed deletes and the stack trace of the last + * failed delete. The provided reference is deleted regardless of whether + * all deletes succeeded. + * + * `recursiveDelete()` uses a BulkWriter instance with default settings to + * perform the deletes. To customize throttling rates or add success/error + * callbacks, pass in a custom BulkWriter instance. + * + * @param ref The reference of a document or collection to delete. + * @param bulkWriter A custom BulkWriter instance used to perform the + * deletes. + * @return A promise that resolves when all deletes have been performed. + * The promise is rejected if any of the deletes fail. + * + * @example + * ``` + * // Recursively delete a reference and log the references of failures. + * const bulkWriter = firestore.bulkWriter(); + * bulkWriter + * .onWriteError((error) => { + * if ( + * error.failedAttempts < MAX_RETRY_ATTEMPTS + * ) { + * return true; + * } else { + * console.log('Failed write at document: ', error.documentRef.path); + * return false; + * } + * }); + * await firestore.recursiveDelete(docRef, bulkWriter); + * ``` + */ + recursiveDelete(ref, bulkWriter) { + return this._recursiveDelete(ref, recursive_delete_1.RECURSIVE_DELETE_MAX_PENDING_OPS, recursive_delete_1.RECURSIVE_DELETE_MIN_PENDING_OPS, bulkWriter); + } + /** + * This overload is not private in order to test the query resumption with + * startAfter() once the RecursiveDelete instance has MAX_PENDING_OPS pending. + * + * @private + * @internal + */ + // Visible for testing + _recursiveDelete(ref, maxPendingOps, minPendingOps, bulkWriter) { + const writer = bulkWriter !== null && bulkWriter !== void 0 ? bulkWriter : this.getBulkWriter(); + const deleter = new recursive_delete_1.RecursiveDelete(this, writer, ref, maxPendingOps, minPendingOps); + return deleter.run(); + } + /** + * Terminates the Firestore client and closes all open streams. + * + * @return A Promise that resolves when the client is terminated. + */ + terminate() { + if (this.registeredListenersCount > 0 || this.bulkWritersCount > 0) { + return Promise.reject('All onSnapshot() listeners must be unsubscribed, and all BulkWriter ' + + 'instances must be closed before terminating the client. ' + + `There are ${this.registeredListenersCount} active listeners and ` + + `${this.bulkWritersCount} open BulkWriter instances.`); + } + return this._clientPool.terminate(); + } + /** + * Returns the Project ID to serve as the JSON representation of this + * Firestore instance. + * + * @return An object that contains the project ID (or `undefined` if not yet + * available). + */ + toJSON() { + return { projectId: this._projectId }; + } + /** + * Initializes the client if it is not already initialized. All methods in the + * SDK can be used after this method completes. + * + * @private + * @internal + * @param requestTag A unique client-assigned identifier that caused this + * initialization. + * @return A Promise that resolves when the client is initialized. + */ + async initializeIfNeeded(requestTag) { + this._settingsFrozen = true; + if (this._settings.ssl === false) { + // If SSL is false, we assume that we are talking to the emulator. We + // provide an Authorization header by default so that the connection is + // recognized as admin in Firestore Emulator. (If for some reason we're + // not connecting to the emulator, then this will result in denials with + // invalid token, rather than behave like clients not logged in. The user + // can then provide their own Authorization header, which will take + // precedence). + this._settings.customHeaders = { + Authorization: 'Bearer owner', + ...this._settings.customHeaders, + }; + } + if (this._projectId === undefined) { + try { + this._projectId = await this._clientPool.run(requestTag, + /* requiresGrpc= */ false, gapicClient => gapicClient.getProjectId()); + (0, logger_1.logger)('Firestore.initializeIfNeeded', null, 'Detected project ID: %s', this._projectId); + } + catch (err) { + (0, logger_1.logger)('Firestore.initializeIfNeeded', null, 'Failed to detect project ID: %s', err); + return Promise.reject(err); + } + } + } + /** + * Returns GAX call options that set the cloud resource header. + * @private + * @internal + */ + createCallOptions(methodName, retryCodes) { + var _a; + const callOptions = { + otherArgs: { + headers: { + [CLOUD_RESOURCE_HEADER]: this.formattedName, + ...this._settings.customHeaders, + ...(_a = this._settings[methodName]) === null || _a === void 0 ? void 0 : _a.customHeaders, + }, + }, + }; + if (retryCodes) { + const retryParams = (0, util_1.getRetryParams)(methodName); + callOptions.retry = + new ((__nccwpck_require__(90418).RetryOptions))(retryCodes, retryParams); + } + return callOptions; + } + /** + * A function returning a Promise that can be retried. + * + * @private + * @internal + * @callback retryFunction + * @returns {Promise} A Promise indicating the function's success. + */ + /** + * Helper method that retries failed Promises. + * + * If 'delayMs' is specified, waits 'delayMs' between invocations. Otherwise, + * schedules the first attempt immediately, and then waits 100 milliseconds + * for further attempts. + * + * @private + * @internal + * @param methodName Name of the Veneer API endpoint that takes a request + * and GAX options. + * @param requestTag A unique client-assigned identifier for this request. + * @param func Method returning a Promise than can be retried. + * @returns A Promise with the function's result if successful within + * `attemptsRemaining`. Otherwise, returns the last rejected Promise. + */ + async _retry(methodName, requestTag, func) { + const backoff = new backoff_1.ExponentialBackoff(); + let lastError = undefined; + for (let attempt = 0; attempt < exports.MAX_REQUEST_RETRIES; ++attempt) { + if (lastError) { + (0, logger_1.logger)('Firestore._retry', requestTag, 'Retrying request that failed with error:', lastError); + } + try { + await backoff.backoffAndWait(); + return await func(); + } + catch (err) { + lastError = err; + if ((0, util_1.isPermanentRpcError)(err, methodName)) { + break; + } + } + } + (0, logger_1.logger)('Firestore._retry', requestTag, 'Request failed with error:', lastError); + return Promise.reject(lastError); + } + /** + * Waits for the provided stream to become active and returns a paused but + * healthy stream. If an error occurs before the first byte is read, the + * method rejects the returned Promise. + * + * @private + * @internal + * @param backendStream The Node stream to monitor. + * @param lifetime A Promise that resolves when the stream receives an 'end', + * 'close' or 'finish' message. + * @param requestTag A unique client-assigned identifier for this request. + * @param request If specified, the request that should be written to the + * stream after opening. + * @returns A guaranteed healthy stream that should be used instead of + * `backendStream`. + */ + _initializeStream(backendStream, lifetime, requestTag, request) { + const resultStream = new stream_1.PassThrough({ objectMode: true }); + resultStream.pause(); + /** + * Whether we have resolved the Promise and returned the stream to the + * caller. + */ + let streamInitialized = false; + return new Promise((resolve, reject) => { + function streamReady() { + if (!streamInitialized) { + streamInitialized = true; + (0, logger_1.logger)('Firestore._initializeStream', requestTag, 'Stream ready'); + resolve(resultStream); + } + } + function streamEnded() { + (0, logger_1.logger)('Firestore._initializeStream', requestTag, 'Received stream end'); + resultStream.unpipe(backendStream); + resolve(resultStream); + lifetime.resolve(); + } + function streamFailed(err) { + if (!streamInitialized) { + // If we receive an error before we were able to receive any data, + // reject this stream. + (0, logger_1.logger)('Firestore._initializeStream', requestTag, 'Received initial error:', err); + reject(err); + } + else { + (0, logger_1.logger)('Firestore._initializeStream', requestTag, 'Received stream error:', err); + // We execute the forwarding of the 'error' event via setImmediate() as + // V8 guarantees that the Promise chain returned from this method + // is resolved before any code executed via setImmediate(). This + // allows the caller to attach an error handler. + setImmediate(() => { + resultStream.emit('error', err); + }); + } + } + backendStream.on('data', () => streamReady()); + backendStream.on('error', err => streamFailed(err)); + backendStream.on('end', () => streamEnded()); + backendStream.on('close', () => streamEnded()); + backendStream.on('finish', () => streamEnded()); + backendStream.pipe(resultStream); + if (request) { + (0, logger_1.logger)('Firestore._initializeStream', requestTag, 'Sending request: %j', request); + backendStream.write(request, 'utf-8', err => { + if (err) { + streamFailed(err); + } + else { + (0, logger_1.logger)('Firestore._initializeStream', requestTag, 'Marking stream as healthy'); + streamReady(); + } + }); + } + }); + } + /** + * A funnel for all non-streaming API requests, assigning a project ID where + * necessary within the request options. + * + * @private + * @internal + * @param methodName Name of the Veneer API endpoint that takes a request + * and GAX options. + * @param request The Protobuf request to send. + * @param requestTag A unique client-assigned identifier for this request. + * @param retryCodes If provided, a custom list of retry codes. If not + * provided, retry is based on the behavior as defined in the ServiceConfig. + * @returns A Promise with the request result. + */ + request(methodName, request, requestTag, retryCodes) { + const callOptions = this.createCallOptions(methodName, retryCodes); + return this._clientPool.run(requestTag, + /* requiresGrpc= */ false, async (gapicClient) => { + try { + (0, logger_1.logger)('Firestore.request', requestTag, 'Sending request: %j', request); + const [result] = await gapicClient[methodName](request, callOptions); + (0, logger_1.logger)('Firestore.request', requestTag, 'Received response: %j', result); + return result; + } + catch (err) { + (0, logger_1.logger)('Firestore.request', requestTag, 'Received error:', err); + return Promise.reject(err); + } + }); + } + /** + * A funnel for streaming API requests, assigning a project ID where necessary + * within the request options. + * + * The stream is returned in paused state and needs to be resumed once all + * listeners are attached. + * + * @private + * @internal + * @param methodName Name of the streaming Veneer API endpoint that + * takes a request and GAX options. + * @param bidrectional Whether the request is bidirectional (true) or + * unidirectional (false_ + * @param request The Protobuf request to send. + * @param requestTag A unique client-assigned identifier for this request. + * @returns A Promise with the resulting read-only stream. + */ + requestStream(methodName, bidrectional, request, requestTag) { + const callOptions = this.createCallOptions(methodName); + const bidirectional = methodName === 'listen'; + let numResponses = 0; + const NUM_RESPONSES_PER_TRACE_EVENT = 100; + return this._retry(methodName, requestTag, () => { + const result = new util_1.Deferred(); + this._clientPool.run(requestTag, bidrectional, async (gapicClient) => { + (0, logger_1.logger)('Firestore.requestStream', requestTag, 'Sending request: %j', request); + this._traceUtil + .currentSpan() + .addEvent(`Firestore.${methodName}: Start`); + try { + const stream = bidirectional + ? gapicClient[methodName](callOptions) + : gapicClient[methodName](request, callOptions); + const logStream = new stream_1.Transform({ + objectMode: true, + transform: (chunk, encoding, callback) => { + (0, logger_1.logger)('Firestore.requestStream', requestTag, 'Received response: %j', chunk); + numResponses++; + if (numResponses === 1) { + this._traceUtil + .currentSpan() + .addEvent(`Firestore.${methodName}: First response received`); + } + else if (numResponses % NUM_RESPONSES_PER_TRACE_EVENT === 0) { + this._traceUtil + .currentSpan() + .addEvent(`Firestore.${methodName}: Received ${numResponses} responses`); + } + callback(); + }, + }); + stream.pipe(logStream); + const lifetime = new util_1.Deferred(); + const resultStream = await this._initializeStream(stream, lifetime, requestTag, bidirectional ? request : undefined); + resultStream.on('end', () => { + stream.end(); + this._traceUtil + .currentSpan() + .addEvent(`Firestore.${methodName}: Completed`, { + [trace_util_1.ATTRIBUTE_KEY_NUM_RESPONSES]: numResponses, + }); + }); + result.resolve(resultStream); + // While we return the stream to the callee early, we don't want to + // release the GAPIC client until the callee has finished processing the + // stream. + return lifetime.promise; + } + catch (e) { + result.reject(e); + } + }); + return result.promise; + }); + } } +exports.Firestore = Firestore; +/** + * A logging function that takes a single string. + * + * @callback Firestore~logFunction + * @param {string} Log message + */ +// tslint:disable-next-line:no-default-export +/** + * The default export of the `@google-cloud/firestore` package is the + * {@link Firestore} class. + * + * See {@link Firestore} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Firestore} @google-cloud/firestore + * @alias nodejs-firestore + * + * @example Install the client library with npm: + * ``` + * npm install --save @google-cloud/firestore + * + * ``` + * @example Import the client library + * ``` + * var Firestore = require('@google-cloud/firestore'); + * + * ``` + * @example Create a client that uses Application Default Credentials (ADC): + * ``` + * var firestore = new Firestore(); + * + * ``` + * @example Create a client with explicit credentials: + * ``` + * var firestore = new Firestore({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * + * ``` + * @example include:samples/quickstart.js + * region_tag:firestore_quickstart + * Full quickstart example: + */ +// tslint:disable-next-line:no-default-export +exports["default"] = Firestore; +// Horrible hack to ensure backwards compatibility with <= 17.0, which allows +// users to call the default constructor via +// `const Fs = require(`@google-cloud/firestore`); new Fs()`; +const existingExports = module.exports; +module.exports = Firestore; +module.exports = Object.assign(module.exports, existingExports); +/** + * {@link v1beta1} factory function. + * + * @private + * @internal + * @name Firestore.v1beta1 + * @type {function} + */ +Object.defineProperty(module.exports, "v1beta1", ({ + // The v1beta1 module is very large. To avoid pulling it in from static + // scope, we lazy-load the module. + get: () => __nccwpck_require__(92170), +})); +/** + * {@link v1} factory function. + * + * @private + * @internal + * @name Firestore.v1 + * @type {function} + */ +Object.defineProperty(module.exports, "v1", ({ + // The v1 module is very large. To avoid pulling it in from static + // scope, we lazy-load the module. + get: () => __nccwpck_require__(31644), +})); +/** + * {@link Status} factory function. + * + * @private + * @internal + * @name Firestore.GrpcStatus + * @type {function} + */ +Object.defineProperty(module.exports, "GrpcStatus", ({ + // The gax module is very large. To avoid pulling it in from static + // scope, we lazy-load the module. + get: () => (__nccwpck_require__(12263).Status), +})); +//# sourceMappingURL=index.js.map -// pkg/dist-src/index.js -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; +/***/ }), + +/***/ 42718: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2018 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = logger; +exports.setLogFunction = setLogFunction; +exports.setLibVersion = setLibVersion; +const util = __nccwpck_require__(73837); +const validate_1 = __nccwpck_require__(33822); +/*! The Firestore library version */ +let libVersion; +/*! The external function used to emit logs. */ +let logFunction = null; +/** + * Log function to use for debug output. By default, we don't perform any + * logging. + * + * @private + * @internal + */ +function logger(methodName, requestTag, logMessage, ...additionalArgs) { + requestTag = requestTag || '#####'; + if (logFunction) { + const formattedMessage = util.format(logMessage, ...additionalArgs); + const time = new Date().toISOString(); + logFunction(`Firestore (${libVersion}) ${time} ${requestTag} [${methodName}]: ` + + formattedMessage); + } } -paginateRest.VERSION = VERSION; -// Annotate the CommonJS export names for ESM import in node: -0 && (0); +/** + * Sets or disables the log function for all active Firestore instances. + * + * @param logger A log function that takes a message (such as `console.log`) or + * `null` to turn off logging. + */ +function setLogFunction(logger) { + if (logger !== null) + (0, validate_1.validateFunction)('logger', logger); + logFunction = logger; +} +/** + * Sets the library version to be used in log messages. + * + * @private + * @internal + */ +function setLibVersion(version) { + libVersion = version; +} +//# sourceMappingURL=logger.js.map + +/***/ }), + +/***/ 16723: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +/*! + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.VECTOR_MAP_VECTORS_KEY = exports.RESERVED_MAP_KEY_VECTOR_VALUE = exports.RESERVED_MAP_KEY = void 0; +exports.RESERVED_MAP_KEY = '__type__'; +exports.RESERVED_MAP_KEY_VECTOR_VALUE = '__vector__'; +exports.VECTOR_MAP_VECTORS_KEY = 'value'; +//# sourceMappingURL=map-type.js.map /***/ }), -/***/ 83044: -/***/ ((module) => { +/***/ 66849: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); +/*! + * Copyright 2017 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.primitiveComparator = primitiveComparator; +exports.compareArrays = compareArrays; +exports.compare = compare; +const convert_1 = __nccwpck_require__(36674); +const path_1 = __nccwpck_require__(34908); +/*! + * The type order as defined by the backend. + */ +var TypeOrder; +(function (TypeOrder) { + TypeOrder[TypeOrder["NULL"] = 0] = "NULL"; + TypeOrder[TypeOrder["BOOLEAN"] = 1] = "BOOLEAN"; + TypeOrder[TypeOrder["NUMBER"] = 2] = "NUMBER"; + TypeOrder[TypeOrder["TIMESTAMP"] = 3] = "TIMESTAMP"; + TypeOrder[TypeOrder["STRING"] = 4] = "STRING"; + TypeOrder[TypeOrder["BLOB"] = 5] = "BLOB"; + TypeOrder[TypeOrder["REF"] = 6] = "REF"; + TypeOrder[TypeOrder["GEO_POINT"] = 7] = "GEO_POINT"; + TypeOrder[TypeOrder["ARRAY"] = 8] = "ARRAY"; + TypeOrder[TypeOrder["VECTOR"] = 9] = "VECTOR"; + TypeOrder[TypeOrder["OBJECT"] = 10] = "OBJECT"; +})(TypeOrder || (TypeOrder = {})); +/*! + * @private + * @internal + */ +function typeOrder(val) { + const valueType = (0, convert_1.detectValueType)(val); + switch (valueType) { + case 'nullValue': + return TypeOrder.NULL; + case 'integerValue': + return TypeOrder.NUMBER; + case 'doubleValue': + return TypeOrder.NUMBER; + case 'stringValue': + return TypeOrder.STRING; + case 'booleanValue': + return TypeOrder.BOOLEAN; + case 'arrayValue': + return TypeOrder.ARRAY; + case 'timestampValue': + return TypeOrder.TIMESTAMP; + case 'geoPointValue': + return TypeOrder.GEO_POINT; + case 'bytesValue': + return TypeOrder.BLOB; + case 'referenceValue': + return TypeOrder.REF; + case 'mapValue': + return TypeOrder.OBJECT; + case 'vectorValue': + return TypeOrder.VECTOR; + default: + throw new Error('Unexpected value type: ' + valueType); + } +} +/*! + * @private + * @internal + */ +function primitiveComparator(left, right) { + if (left < right) { + return -1; + } + if (left > right) { + return 1; + } + return 0; +} +/*! + * Utility function to compare doubles (using Firestore semantics for NaN). + * @private + * @internal + */ +function compareNumbers(left, right) { + if (left < right) { + return -1; + } + if (left > right) { + return 1; + } + if (left === right) { + return 0; + } + // one or both are NaN. + if (isNaN(left)) { + return isNaN(right) ? 0 : -1; + } + return 1; +} +/*! + * @private + * @internal + */ +function compareNumberProtos(left, right) { + let leftValue, rightValue; + if (left.integerValue !== undefined) { + leftValue = Number(left.integerValue); + } + else { + leftValue = Number(left.doubleValue); + } + if (right.integerValue !== undefined) { + rightValue = Number(right.integerValue); + } + else { + rightValue = Number(right.doubleValue); + } + return compareNumbers(leftValue, rightValue); +} +/*! + * @private + * @internal + */ +function compareTimestamps(left, right) { + const seconds = primitiveComparator(left.seconds || 0, right.seconds || 0); + if (seconds !== 0) { + return seconds; + } + return primitiveComparator(left.nanos || 0, right.nanos || 0); +} +/*! + * @private + * @internal + */ +function compareBlobs(left, right) { + if (!(left instanceof Buffer) || !(right instanceof Buffer)) { + throw new Error('Blobs can only be compared if they are Buffers.'); + } + return Buffer.compare(left, right); +} +/*! + * @private + * @internal + */ +function compareReferenceProtos(left, right) { + const leftPath = path_1.QualifiedResourcePath.fromSlashSeparatedString(left.referenceValue); + const rightPath = path_1.QualifiedResourcePath.fromSlashSeparatedString(right.referenceValue); + return leftPath.compareTo(rightPath); +} +/*! + * @private + * @internal + */ +function compareGeoPoints(left, right) { + return (primitiveComparator(left.latitude || 0, right.latitude || 0) || + primitiveComparator(left.longitude || 0, right.longitude || 0)); +} +/*! + * @private + * @internal + */ +function compareArrays(left, right) { + for (let i = 0; i < left.length && i < right.length; i++) { + const valueComparison = compare(left[i], right[i]); + if (valueComparison !== 0) { + return valueComparison; + } + } + // If all the values matched so far, just check the length. + return primitiveComparator(left.length, right.length); +} +/*! + * @private + * @internal + */ +function compareObjects(left, right) { + // This requires iterating over the keys in the object in order and doing a + // deep comparison. + const leftKeys = Object.keys(left); + const rightKeys = Object.keys(right); + leftKeys.sort(); + rightKeys.sort(); + for (let i = 0; i < leftKeys.length && i < rightKeys.length; i++) { + const keyComparison = primitiveComparator(leftKeys[i], rightKeys[i]); + if (keyComparison !== 0) { + return keyComparison; + } + const key = leftKeys[i]; + const valueComparison = compare(left[key], right[key]); + if (valueComparison !== 0) { + return valueComparison; + } + } + // If all the keys matched so far, just check the length. + return primitiveComparator(leftKeys.length, rightKeys.length); +} +/*! + * @private + * @internal + */ +function compareVectors(left, right) { + var _a, _b, _c, _d, _e, _f; + // The vector is a map, but only vector value is compared. + const leftArray = (_c = (_b = (_a = left === null || left === void 0 ? void 0 : left['value']) === null || _a === void 0 ? void 0 : _a.arrayValue) === null || _b === void 0 ? void 0 : _b.values) !== null && _c !== void 0 ? _c : []; + const rightArray = (_f = (_e = (_d = right === null || right === void 0 ? void 0 : right['value']) === null || _d === void 0 ? void 0 : _d.arrayValue) === null || _e === void 0 ? void 0 : _e.values) !== null && _f !== void 0 ? _f : []; + const lengthCompare = primitiveComparator(leftArray.length, rightArray.length); + if (lengthCompare !== 0) { + return lengthCompare; + } + return compareArrays(leftArray, rightArray); +} +/*! + * @private + * @internal + */ +function compare(left, right) { + // First compare the types. + const leftType = typeOrder(left); + const rightType = typeOrder(right); + const typeComparison = primitiveComparator(leftType, rightType); + if (typeComparison !== 0) { + return typeComparison; + } + // So they are the same type. + switch (leftType) { + case TypeOrder.NULL: + // Nulls are all equal. + return 0; + case TypeOrder.BOOLEAN: + return primitiveComparator(left.booleanValue, right.booleanValue); + case TypeOrder.STRING: + return primitiveComparator(left.stringValue, right.stringValue); + case TypeOrder.NUMBER: + return compareNumberProtos(left, right); + case TypeOrder.TIMESTAMP: + return compareTimestamps(left.timestampValue, right.timestampValue); + case TypeOrder.BLOB: + return compareBlobs(left.bytesValue, right.bytesValue); + case TypeOrder.REF: + return compareReferenceProtos(left, right); + case TypeOrder.GEO_POINT: + return compareGeoPoints(left.geoPointValue, right.geoPointValue); + case TypeOrder.ARRAY: + return compareArrays(left.arrayValue.values || [], right.arrayValue.values || []); + case TypeOrder.OBJECT: + return compareObjects(left.mapValue.fields || {}, right.mapValue.fields || {}); + case TypeOrder.VECTOR: + return compareVectors(left.mapValue.fields || {}, right.mapValue.fields || {}); + default: + throw new Error(`Encountered unknown type order: ${leftType}`); + } +} +//# sourceMappingURL=order.js.map -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - legacyRestEndpointMethods: () => legacyRestEndpointMethods, - restEndpointMethods: () => restEndpointMethods -}); -module.exports = __toCommonJS(dist_src_exports); +/***/ }), -// pkg/dist-src/version.js -var VERSION = "10.2.0"; +/***/ 34908: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// pkg/dist-src/generated/endpoints.js -var Endpoints = { - actions: { - addCustomLabelsToSelfHostedRunnerForOrg: [ - "POST /orgs/{org}/actions/runners/{runner_id}/labels" - ], - addCustomLabelsToSelfHostedRunnerForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" - ], - addSelectedRepoToOrgVariable: [ - "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" - ], - approveWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" - ], - cancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" - ], - createEnvironmentVariable: [ - "POST /repositories/{repository_id}/environments/{environment_name}/variables" - ], - createOrUpdateEnvironmentSecret: [ - "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" - ], - createOrgVariable: ["POST /orgs/{org}/actions/variables"], - createRegistrationTokenForOrg: [ - "POST /orgs/{org}/actions/runners/registration-token" - ], - createRegistrationTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/registration-token" - ], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/remove-token" - ], - createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], - createWorkflowDispatch: [ - "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" - ], - deleteActionsCacheById: [ - "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" - ], - deleteActionsCacheByKey: [ - "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" - ], - deleteArtifact: [ - "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" - ], - deleteEnvironmentSecret: [ - "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - deleteEnvironmentVariable: [ - "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" - ], - deleteRepoVariable: [ - "DELETE /repos/{owner}/{repo}/actions/variables/{name}" - ], - deleteSelfHostedRunnerFromOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}" - ], - deleteSelfHostedRunnerFromRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" - ], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: [ - "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" - ], - disableSelectedRepositoryGithubActionsOrganization: [ - "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" - ], - disableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" - ], - downloadArtifact: [ - "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" - ], - downloadJobLogsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" - ], - downloadWorkflowRunAttemptLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" - ], - downloadWorkflowRunLogs: [ +"use strict"; + +/*! + * Copyright 2017 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FieldPath = exports.QualifiedResourcePath = exports.ResourcePath = exports.DEFAULT_DATABASE_ID = void 0; +exports.validateResourcePath = validateResourcePath; +exports.validateFieldPath = validateFieldPath; +const util_1 = __nccwpck_require__(15468); +const validate_1 = __nccwpck_require__(33822); +/*! + * The default database ID for this Firestore client. We do not yet expose the + * ability to use different databases. + */ +exports.DEFAULT_DATABASE_ID = '(default)'; +/*! + * A regular expression to verify an absolute Resource Path in Firestore. It + * extracts the project ID, the database name and the relative resource path + * if available. + * + * @type {RegExp} + */ +const RESOURCE_PATH_RE = +// Note: [\s\S] matches all characters including newlines. +/^projects\/([^/]*)\/databases\/([^/]*)(?:\/documents\/)?([\s\S]*)$/; +/*! + * A regular expression to verify whether a field name can be passed to the + * backend without escaping. + * + * @type {RegExp} + */ +const UNESCAPED_FIELD_NAME_RE = /^[_a-zA-Z][_a-zA-Z0-9]*$/; +/*! + * A regular expression to verify field paths that are passed to the API as + * strings. Field paths that do not match this expression have to be provided + * as a [FieldPath]{@link FieldPath} object. + * + * @type {RegExp} + */ +const FIELD_PATH_RE = /^[^*~/[\]]+$/; +/** + * An abstract class representing a Firestore path. + * + * Subclasses have to implement `split()` and `canonicalString()`. + * + * @private + * @internal + * @class + */ +class Path { + /** + * Creates a new Path with the given segments. + * + * @private + * @internal + * @private + * @param segments Sequence of parts of a path. + */ + constructor(segments) { + this.segments = segments; + } + /** + * Returns the number of segments of this field path. + * + * @private + * @internal + */ + get size() { + return this.segments.length; + } + /** + * Create a child path beneath the current level. + * + * @private + * @internal + * @param relativePath Relative path to append to the current path. + * @returns The new path. + */ + append(relativePath) { + if (relativePath instanceof Path) { + return this.construct(this.segments.concat(relativePath.segments)); + } + return this.construct(this.segments.concat(this.split(relativePath))); + } + /** + * Returns the path of the parent node. + * + * @private + * @internal + * @returns The new path or null if we are already at the root. + */ + parent() { + if (this.segments.length === 0) { + return null; + } + return this.construct(this.segments.slice(0, this.segments.length - 1)); + } + /** + * Checks whether the current path is a prefix of the specified path. + * + * @private + * @internal + * @param other The path to check against. + * @returns 'true' iff the current path is a prefix match with 'other'. + */ + isPrefixOf(other) { + if (other.segments.length < this.segments.length) { + return false; + } + for (let i = 0; i < this.segments.length; i++) { + if (this.segments[i] !== other.segments[i]) { + return false; + } + } + return true; + } + /** + * Compare the current path against another Path object. + * + * @private + * @internal + * @param other The path to compare to. + * @returns -1 if current < other, 1 if current > other, 0 if equal + */ + compareTo(other) { + const len = Math.min(this.segments.length, other.segments.length); + for (let i = 0; i < len; i++) { + if (this.segments[i] < other.segments[i]) { + return -1; + } + if (this.segments[i] > other.segments[i]) { + return 1; + } + } + if (this.segments.length < other.segments.length) { + return -1; + } + if (this.segments.length > other.segments.length) { + return 1; + } + return 0; + } + /** + * Returns a copy of the underlying segments. + * + * @private + * @internal + * @returns A copy of the segments that make up this path. + */ + toArray() { + return this.segments.slice(); + } + /** + * Pops the last segment from this `Path` and returns a newly constructed + * `Path`. + * + * @private + * @internal + * @returns The newly created Path. + */ + popLast() { + this.segments.pop(); + return this.construct(this.segments); + } + /** + * Returns true if this `Path` is equal to the provided value. + * + * @private + * @internal + * @param other The value to compare against. + * @return true if this `Path` is equal to the provided value. + */ + isEqual(other) { + return this === other || this.compareTo(other) === 0; + } +} +/** + * A slash-separated path for navigating resources within the current Firestore + * instance. + * + * @private + * @internal + */ +class ResourcePath extends Path { + /** + * Constructs a ResourcePath. + * + * @private + * @internal + * @param segments Sequence of names of the parts of the path. + */ + constructor(...segments) { + super(segments); + } + /** + * Indicates whether this path points to a document. + * @private + * @internal + */ + get isDocument() { + return this.segments.length > 0 && this.segments.length % 2 === 0; + } + /** + * Indicates whether this path points to a collection. + * @private + * @internal + */ + get isCollection() { + return this.segments.length % 2 === 1; + } + /** + * The last component of the path. + * @private + * @internal + */ + get id() { + if (this.segments.length > 0) { + return this.segments[this.segments.length - 1]; + } + return null; + } + /** + * Returns the location of this path relative to the root of the project's + * database. + * @private + * @internal + */ + get relativeName() { + return this.segments.join('/'); + } + /** + * Constructs a new instance of ResourcePath. + * + * @private + * @internal + * @param segments Sequence of parts of the path. + * @returns The newly created ResourcePath. + */ + construct(segments) { + return new ResourcePath(...segments); + } + /** + * Splits a string into path segments, using slashes as separators. + * + * @private + * @internal + * @param relativePath The path to split. + * @returns The split path segments. + */ + split(relativePath) { + // We may have an empty segment at the beginning or end if they had a + // leading or trailing slash (which we allow). + return relativePath.split('/').filter(segment => segment.length > 0); + } + /** + * Converts this path to a fully qualified ResourcePath. + * + * @private + * @internal + * @param projectId The project ID of the current Firestore project. + * @return A fully-qualified resource path pointing to the same element. + */ + toQualifiedResourcePath(projectId, databaseId) { + return new QualifiedResourcePath(projectId, databaseId, ...this.segments); + } +} +exports.ResourcePath = ResourcePath; +/** + * A default instance pointing to the root collection. + * @private + * @internal + */ +ResourcePath.EMPTY = new ResourcePath(); +/** + * A slash-separated path that includes a project and database ID for referring + * to resources in any Firestore project. + * + * @private + * @internal + */ +class QualifiedResourcePath extends ResourcePath { + /** + * Constructs a Firestore Resource Path. + * + * @private + * @internal + * @param projectId The Firestore project id. + * @param databaseId The Firestore database id. + * @param segments Sequence of names of the parts of the path. + */ + constructor(projectId, databaseId, ...segments) { + super(...segments); + this.projectId = projectId; + this.databaseId = databaseId; + } + /** + * String representation of the path relative to the database root. + * @private + * @internal + */ + get relativeName() { + return this.segments.join('/'); + } + /** + * Creates a resource path from an absolute Firestore path. + * + * @private + * @internal + * @param absolutePath A string representation of a Resource Path. + * @returns The new ResourcePath. + */ + static fromSlashSeparatedString(absolutePath) { + const elements = RESOURCE_PATH_RE.exec(absolutePath); + if (elements) { + const project = elements[1]; + const database = elements[2]; + const path = elements[3]; + return new QualifiedResourcePath(project, database).append(path); + } + throw new Error(`Resource name '${absolutePath}' is not valid.`); + } + /** + * Create a child path beneath the current level. + * + * @private + * @internal + * @param relativePath Relative path to append to the current path. + * @returns The new path. + */ + append(relativePath) { + // `super.append()` calls `QualifiedResourcePath.construct()` when invoked + // from here and returns a QualifiedResourcePath. + return super.append(relativePath); + } + /** + * Create a child path beneath the current level. + * + * @private + * @internal + * @returns The new path. + */ + parent() { + return super.parent(); + } + /** + * String representation of a ResourcePath as expected by the API. + * + * @private + * @internal + * @returns The representation as expected by the API. + */ + get formattedName() { + const components = [ + 'projects', + this.projectId, + 'databases', + this.databaseId, + 'documents', + ...this.segments, + ]; + return components.join('/'); + } + /** + * Constructs a new instance of ResourcePath. We need this instead of using + * the normal constructor because polymorphic 'this' doesn't work on static + * methods. + * + * @private + * @internal + * @param segments Sequence of names of the parts of the path. + * @returns The newly created QualifiedResourcePath. + */ + construct(segments) { + return new QualifiedResourcePath(this.projectId, this.databaseId, ...segments); + } + /** + * Convenience method to match the ResourcePath API. This method always + * returns the current instance. + * + * @private + * @internal + */ + toQualifiedResourcePath() { + return this; + } + /** + * Compare the current path against another ResourcePath object. + * + * @private + * @internal + * @param other The path to compare to. + * @returns -1 if current < other, 1 if current > other, 0 if equal + */ + compareTo(other) { + if (other instanceof QualifiedResourcePath) { + if (this.projectId < other.projectId) { + return -1; + } + if (this.projectId > other.projectId) { + return 1; + } + if (this.databaseId < other.databaseId) { + return -1; + } + if (this.databaseId > other.databaseId) { + return 1; + } + } + return super.compareTo(other); + } + /** + * Converts this ResourcePath to the Firestore Proto representation. + * @private + * @internal + */ + toProto() { + return { + referenceValue: this.formattedName, + }; + } +} +exports.QualifiedResourcePath = QualifiedResourcePath; +/** + * Validates that the given string can be used as a relative or absolute + * resource path. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param resourcePath The path to validate. + * @throws if the string can't be used as a resource path. + */ +function validateResourcePath(arg, resourcePath) { + if (typeof resourcePath !== 'string' || resourcePath === '') { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'resource path')} Path must be a non-empty string.`); + } + if (resourcePath.indexOf('//') >= 0) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'resource path')} Paths must not contain //.`); + } +} +/** + * A dot-separated path for navigating sub-objects (e.g. nested maps) within a document. + * + * @class + */ +class FieldPath extends Path { + /** + * Constructs a Firestore Field Path. + * + * @param {...string} segments Sequence of field names that form this path. + * + * @example + * ``` + * let query = firestore.collection('col'); + * let fieldPath = new FieldPath('f.o.o', 'bar'); + * + * query.where(fieldPath, '==', 42).get().then(snapshot => { + * snapshot.forEach(document => { + * console.log(`Document contains {'f.o.o' : {'bar' : 42}}`); + * }); + * }); + * ``` + */ + constructor(...segments) { + if (Array.isArray(segments[0])) { + throw new Error('The FieldPath constructor no longer supports an array as its first argument. ' + + 'Please unpack your array and call FieldPath() with individual arguments.'); + } + (0, validate_1.validateMinNumberOfArguments)('FieldPath', segments, 1); + for (let i = 0; i < segments.length; ++i) { + (0, validate_1.validateString)(i, segments[i]); + if (segments[i].length === 0) { + throw new Error(`Element at index ${i} should not be an empty string.`); + } + } + super(segments); + } + /** + * A special FieldPath value to refer to the ID of a document. It can be used + * in queries to sort or filter by the document ID. + * + * @returns {FieldPath} + */ + static documentId() { + return FieldPath._DOCUMENT_ID; + } + /** + * Turns a field path argument into a [FieldPath]{@link FieldPath}. + * Supports FieldPaths as input (which are passed through) and dot-separated + * strings. + * + * @private + * @internal + * @param {string|FieldPath} fieldPath The FieldPath to create. + * @returns {FieldPath} A field path representation. + */ + static fromArgument(fieldPath) { + // validateFieldPath() is used in all public API entry points to validate + // that fromArgument() is only called with a Field Path or a string. + return fieldPath instanceof FieldPath + ? fieldPath + : new FieldPath(...fieldPath.split('.')); + } + /** + * String representation of a FieldPath as expected by the API. + * + * @private + * @internal + * @override + * @returns {string} The representation as expected by the API. + */ + get formattedName() { + return this.segments + .map(str => { + return UNESCAPED_FIELD_NAME_RE.test(str) + ? str + : '`' + str.replace('\\', '\\\\').replace('`', '\\`') + '`'; + }) + .join('.'); + } + /** + * Returns a string representation of this path. + * + * @private + * @internal + * @returns A string representing this path. + */ + toString() { + return this.formattedName; + } + /** + * Splits a string into path segments, using dots as separators. + * + * @private + * @internal + * @override + * @param {string} fieldPath The path to split. + * @returns {Array.} - The split path segments. + */ + split(fieldPath) { + return fieldPath.split('.'); + } + /** + * Constructs a new instance of FieldPath. We need this instead of using + * the normal constructor because polymorphic 'this' doesn't work on static + * methods. + * + * @private + * @internal + * @override + * @param segments Sequence of field names. + * @returns The newly created FieldPath. + */ + construct(segments) { + return new FieldPath(...segments); + } + /** + * Returns true if this `FieldPath` is equal to the provided value. + * + * @param {*} other The value to compare against. + * @return {boolean} true if this `FieldPath` is equal to the provided value. + */ + isEqual(other) { + return super.isEqual(other); + } +} +exports.FieldPath = FieldPath; +/** + * A special sentinel value to refer to the ID of a document. + * + * @private + * @internal + */ +FieldPath._DOCUMENT_ID = new FieldPath('__name__'); +/** + * Validates that the provided value can be used as a field path argument. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param fieldPath The value to verify. + * @throws if the string can't be used as a field path. + */ +function validateFieldPath(arg, fieldPath) { + if (fieldPath instanceof FieldPath) { + return; + } + if (fieldPath === undefined) { + throw new Error((0, validate_1.invalidArgumentMessage)(arg, 'field path') + ' The path cannot be omitted.'); + } + if ((0, util_1.isObject)(fieldPath) && fieldPath.constructor.name === 'FieldPath') { + throw new Error((0, validate_1.customObjectMessage)(arg, fieldPath)); + } + if (typeof fieldPath !== 'string') { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'field path')} Paths can only be specified as strings or via a FieldPath object.`); + } + if (fieldPath.indexOf('..') >= 0) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'field path')} Paths must not contain ".." in them.`); + } + if (fieldPath.startsWith('.') || fieldPath.endsWith('.')) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'field path')} Paths must not start or end with ".".`); + } + if (!FIELD_PATH_RE.test(fieldPath)) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'field path')} Paths can't be empty and must not contain + "*~/[]".`); + } +} +//# sourceMappingURL=path.js.map + +/***/ }), + +/***/ 74197: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2018 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ClientPool = exports.CLIENT_TERMINATED_ERROR_MSG = void 0; +const assert = __nccwpck_require__(39491); +const logger_1 = __nccwpck_require__(42718); +const util_1 = __nccwpck_require__(15468); +exports.CLIENT_TERMINATED_ERROR_MSG = 'The client has already been terminated'; +/** + * An auto-resizing pool that distributes concurrent operations over multiple + * clients of type `T`. + * + * ClientPool is used within Firestore to manage a pool of GAPIC clients and + * automatically initializes multiple clients if we issue more than 100 + * concurrent operations. + * + * @private + * @internal + */ +class ClientPool { + /** + * @param concurrentOperationLimit The number of operations that each client + * can handle. + * @param maxIdleClients The maximum number of idle clients to keep before + * garbage collecting. + * @param clientFactory A factory function called as needed when new clients + * are required. + * @param clientDestructor A cleanup function that is called when a client is + * disposed of. + */ + constructor(concurrentOperationLimit, maxIdleClients, clientFactory, clientDestructor = () => Promise.resolve()) { + this.concurrentOperationLimit = concurrentOperationLimit; + this.maxIdleClients = maxIdleClients; + this.clientFactory = clientFactory; + this.clientDestructor = clientDestructor; + this.grpcEnabled = false; + /** + * Stores each active clients and how many operations it has outstanding. + */ + this.activeClients = new Map(); + /** + * A set of clients that have seen RST_STREAM errors (see + * https://github.com/googleapis/nodejs-firestore/issues/1023) and should + * no longer be used. + */ + this.failedClients = new Set(); + /** + * Whether the Firestore instance has been terminated. Once terminated, the + * ClientPool can longer schedule new operations. + */ + this.terminated = false; + /** + * Deferred promise that is resolved when there are no active operations on + * the client pool after terminate() has been called. + */ + this.terminateDeferred = new util_1.Deferred(); + } + /** + * Returns an already existing client if it has less than the maximum number + * of concurrent operations or initializes and returns a new client. + * + * @private + * @internal + */ + acquire(requestTag, requiresGrpc) { + let selectedClient = null; + let selectedClientRequestCount = -1; + // Transition to grpc when we see the first operation that requires grpc. + this.grpcEnabled = this.grpcEnabled || requiresGrpc; + // Require a grpc client for this operation if we have transitioned to grpc. + requiresGrpc = requiresGrpc || this.grpcEnabled; + for (const [client, metadata] of this.activeClients) { + // Use the "most-full" client that can still accommodate the request + // in order to maximize the number of idle clients as operations start to + // complete. + if (!this.failedClients.has(client) && + metadata.activeRequestCount > selectedClientRequestCount && + metadata.activeRequestCount < this.concurrentOperationLimit && + (metadata.grpcEnabled || !requiresGrpc)) { + selectedClient = client; + selectedClientRequestCount = metadata.activeRequestCount; + } + } + if (selectedClient) { + (0, logger_1.logger)('ClientPool.acquire', requestTag, 'Re-using existing client with %s remaining operations', this.concurrentOperationLimit - selectedClientRequestCount); + } + else { + (0, logger_1.logger)('ClientPool.acquire', requestTag, 'Creating a new client (requiresGrpc: %s)', requiresGrpc); + selectedClient = this.clientFactory(requiresGrpc); + selectedClientRequestCount = 0; + assert(!this.activeClients.has(selectedClient), 'The provided client factory returned an existing instance'); + } + this.activeClients.set(selectedClient, { + grpcEnabled: requiresGrpc, + activeRequestCount: selectedClientRequestCount + 1, + }); + return selectedClient; + } + /** + * Reduces the number of operations for the provided client, potentially + * removing it from the pool of active clients. + * @private + * @internal + */ + async release(requestTag, client) { + const metadata = this.activeClients.get(client); + assert(metadata && metadata.activeRequestCount > 0, 'No active requests'); + this.activeClients.set(client, { + grpcEnabled: metadata.grpcEnabled, + activeRequestCount: metadata.activeRequestCount - 1, + }); + if (this.terminated && this.opCount === 0) { + this.terminateDeferred.resolve(); + } + if (this.shouldGarbageCollectClient(client)) { + this.activeClients.delete(client); + this.failedClients.delete(client); + await this.clientDestructor(client); + (0, logger_1.logger)('ClientPool.release', requestTag, 'Garbage collected 1 client'); + } + } + /** + * Given the current operation counts, determines if the given client should + * be garbage collected. + * @private + * @internal + */ + shouldGarbageCollectClient(client) { + const clientMetadata = this.activeClients.get(client); + if (clientMetadata.activeRequestCount !== 0) { + // Don't garbage collect clients that have active requests. + return false; + } + if (this.grpcEnabled !== clientMetadata.grpcEnabled) { + // We are transitioning to GRPC. Garbage collect REST clients. + return true; + } + // Idle clients that have received RST_STREAM errors are always garbage + // collected. + if (this.failedClients.has(client)) { + return true; + } + // Otherwise, only garbage collect if we have too much idle capacity (e.g. + // more than 100 idle capacity with default settings). + let idleCapacityCount = 0; + for (const [, metadata] of this.activeClients) { + idleCapacityCount += + this.concurrentOperationLimit - metadata.activeRequestCount; + } + return (idleCapacityCount > this.maxIdleClients * this.concurrentOperationLimit); + } + /** + * The number of currently registered clients. + * + * @return Number of currently registered clients. + * @private + * @internal + */ + // Visible for testing. + get size() { + return this.activeClients.size; + } + /** + * The number of currently active operations. + * + * @return Number of currently active operations. + * @private + * @internal + */ + // Visible for testing. + get opCount() { + let activeOperationCount = 0; + this.activeClients.forEach(metadata => (activeOperationCount += metadata.activeRequestCount)); + return activeOperationCount; + } + /** + * The currently active clients. + * + * @return The currently active clients. + * @private + * @internal + */ + // Visible for testing. + get _activeClients() { + return this.activeClients; + } + /** + * Runs the provided operation in this pool. This function may create an + * additional client if all existing clients already operate at the concurrent + * operation limit. + * + * @param requestTag A unique client-assigned identifier for this operation. + * @param op A callback function that returns a Promise. The client T will + * be returned to the pool when callback finishes. + * @return A Promise that resolves with the result of `op`. + * @private + * @internal + */ + run(requestTag, requiresGrpc, op) { + if (this.terminated) { + return Promise.reject(new Error(exports.CLIENT_TERMINATED_ERROR_MSG)); + } + const client = this.acquire(requestTag, requiresGrpc); + return op(client) + .catch(async (err) => { + var _a; + if ((_a = err.message) === null || _a === void 0 ? void 0 : _a.match(/RST_STREAM/)) { + // Once a client has seen a RST_STREAM error, the GRPC channel can + // no longer be used. We mark the client as failed, which ensures that + // we open a new GRPC channel for the next request. + this.failedClients.add(client); + } + await this.release(requestTag, client); + return Promise.reject(err); + }) + .then(async (res) => { + await this.release(requestTag, client); + return res; + }); + } + async terminate() { + this.terminated = true; + // Wait for all pending operations to complete before terminating. + if (this.opCount > 0) { + (0, logger_1.logger)('ClientPool.terminate', + /* requestTag= */ null, 'Waiting for %s pending operations to complete before terminating', this.opCount); + await this.terminateDeferred.promise; + } + for (const [client] of this.activeClients) { + this.activeClients.delete(client); + await this.clientDestructor(client); + } + } +} +exports.ClientPool = ClientPool; +//# sourceMappingURL=pool.js.map + +/***/ }), + +/***/ 88357: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.QueryPartition = void 0; +const field_order_1 = __nccwpck_require__(59339); +const query_1 = __nccwpck_require__(38621); +const query_options_1 = __nccwpck_require__(47188); +const path_1 = __nccwpck_require__(34908); +const serializer_1 = __nccwpck_require__(49170); +/** + * A split point that can be used in a query as a starting and/or end point for + * the query results. The cursors returned by {@link #startAt} and {@link + * #endBefore} can only be used in a query that matches the constraint of query + * that produced this partition. + * + * @class QueryPartition + */ +class QueryPartition { + /** @private */ + constructor(_firestore, _collectionId, _converter, _startAt, _endBefore) { + this._firestore = _firestore; + this._collectionId = _collectionId; + this._converter = _converter; + this._startAt = _startAt; + this._endBefore = _endBefore; + this._serializer = new serializer_1.Serializer(_firestore); + } + /** + * The cursor that defines the first result for this partition or `undefined` + * if this is the first partition. The cursor value must be + * destructured when passed to `startAt()` (for example with + * `query.startAt(...queryPartition.startAt)`). + * + * @example + * ``` + * const query = firestore.collectionGroup('collectionId'); + * for await (const partition of query.getPartitions(42)) { + * let partitionedQuery = query.orderBy(FieldPath.documentId()); + * if (partition.startAt) { + * partitionedQuery = partitionedQuery.startAt(...partition.startAt); + * } + * if (partition.endBefore) { + * partitionedQuery = partitionedQuery.endBefore(...partition.endBefore); + * } + * const querySnapshot = await partitionedQuery.get(); + * console.log(`Partition contained ${querySnapshot.length} documents`); + * } + * + * ``` + * @type {Array<*>} + * @return {Array<*>} A cursor value that can be used with {@link + * Query#startAt} or `undefined` if this is the first partition. + */ + get startAt() { + if (this._startAt && !this._memoizedStartAt) { + this._memoizedStartAt = this._startAt.map(v => this._serializer.decodeValue(v)); + } + return this._memoizedStartAt; + } + /** + * The cursor that defines the first result after this partition or + * `undefined` if this is the last partition. The cursor value must be + * destructured when passed to `endBefore()` (for example with + * `query.endBefore(...queryPartition.endBefore)`). + * + * @example + * ``` + * const query = firestore.collectionGroup('collectionId'); + * for await (const partition of query.getPartitions(42)) { + * let partitionedQuery = query.orderBy(FieldPath.documentId()); + * if (partition.startAt) { + * partitionedQuery = partitionedQuery.startAt(...partition.startAt); + * } + * if (partition.endBefore) { + * partitionedQuery = partitionedQuery.endBefore(...partition.endBefore); + * } + * const querySnapshot = await partitionedQuery.get(); + * console.log(`Partition contained ${querySnapshot.length} documents`); + * } + * + * ``` + * @type {Array<*>} + * @return {Array<*>} A cursor value that can be used with {@link + * Query#endBefore} or `undefined` if this is the last partition. + */ + get endBefore() { + if (this._endBefore && !this._memoizedEndBefore) { + this._memoizedEndBefore = this._endBefore.map(v => this._serializer.decodeValue(v)); + } + return this._memoizedEndBefore; + } + /** + * Returns a query that only encapsulates the documents for this partition. + * + * @example + * ``` + * const query = firestore.collectionGroup('collectionId'); + * for await (const partition of query.getPartitions(42)) { + * const partitionedQuery = partition.toQuery(); + * const querySnapshot = await partitionedQuery.get(); + * console.log(`Partition contained ${querySnapshot.length} documents`); + * } + * + * ``` + * @return {Query} A query partitioned by a {@link Query#startAt} and + * {@link Query#endBefore} cursor. + */ + toQuery() { + // Since the api.Value to JavaScript type conversion can be lossy (unless + // `useBigInt` is used), we pass the original protobuf representation to the + // created query. + let queryOptions = query_options_1.QueryOptions.forCollectionGroupQuery(this._collectionId, this._converter); + queryOptions = queryOptions.with({ + fieldOrders: [new field_order_1.FieldOrder(path_1.FieldPath.documentId())], + }); + if (this._startAt !== undefined) { + queryOptions = queryOptions.with({ + startAt: { before: true, values: this._startAt }, + }); + } + if (this._endBefore !== undefined) { + queryOptions = queryOptions.with({ + endAt: { before: true, values: this._endBefore }, + }); + } + return new query_1.Query(this._firestore, queryOptions); + } +} +exports.QueryPartition = QueryPartition; +//# sourceMappingURL=query-partition.js.map + +/***/ }), + +/***/ 15453: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/*! + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExplainResults = exports.ExplainMetrics = exports.ExecutionStats = exports.PlanSummary = void 0; +/** + * PlanSummary contains information about the planning stage of a query. + * + * @class PlanSummary + */ +class PlanSummary { + /** + * @private + * @internal + */ + constructor(indexesUsed) { + this.indexesUsed = indexesUsed; + } + /** + * @private + * @internal + */ + static _fromProto(plan, serializer) { + const indexes = []; + if (plan && plan.indexesUsed) { + for (const index of plan.indexesUsed) { + indexes.push(serializer.decodeGoogleProtobufStruct(index)); + } + } + return new PlanSummary(indexes); + } +} +exports.PlanSummary = PlanSummary; +/** + * ExecutionStats contains information about the execution of a query. + * + * @class ExecutionStats + */ +class ExecutionStats { + /** + * @private + * @internal + */ + constructor(resultsReturned, executionDuration, readOperations, debugStats) { + this.resultsReturned = resultsReturned; + this.executionDuration = executionDuration; + this.readOperations = readOperations; + this.debugStats = debugStats; + } + /** + * @private + * @internal + */ + static _fromProto(stats, serializer) { + var _a, _b; + if (stats) { + return new ExecutionStats(Number(stats.resultsReturned), { + seconds: Number((_a = stats.executionDuration) === null || _a === void 0 ? void 0 : _a.seconds), + nanoseconds: Number((_b = stats.executionDuration) === null || _b === void 0 ? void 0 : _b.nanos), + }, Number(stats.readOperations), serializer.decodeGoogleProtobufStruct(stats.debugStats)); + } + return null; + } +} +exports.ExecutionStats = ExecutionStats; +/** + * ExplainMetrics contains information about planning and execution of a query. + * + * @class ExplainMetrics + */ +class ExplainMetrics { + /** + * @private + * @internal + */ + constructor(planSummary, executionStats) { + this.planSummary = planSummary; + this.executionStats = executionStats; + } + /** + * @private + * @internal + */ + static _fromProto(metrics, serializer) { + return new ExplainMetrics(PlanSummary._fromProto(metrics.planSummary, serializer), ExecutionStats._fromProto(metrics.executionStats, serializer)); + } +} +exports.ExplainMetrics = ExplainMetrics; +/** + * ExplainResults contains information about planning, execution, and results + * of a query. + * + * @class ExplainResults + */ +class ExplainResults { + /** + * @private + * @internal + */ + constructor(metrics, snapshot) { + this.metrics = metrics; + this.snapshot = snapshot; + } +} +exports.ExplainResults = ExplainResults; +//# sourceMappingURL=query-profile.js.map + +/***/ }), + +/***/ 57934: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RateLimiter = void 0; +/*! + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const assert = __nccwpck_require__(39491); +const logger_1 = __nccwpck_require__(42718); +/** + * A helper that uses the Token Bucket algorithm to rate limit the number of + * operations that can be made in a second. + * + * Before a given request containing a number of operations can proceed, + * RateLimiter determines doing so stays under the provided rate limits. It can + * also determine how much time is required before a request can be made. + * + * RateLimiter can also implement a gradually increasing rate limit. This is + * used to enforce the 500/50/5 rule + * (https://firebase.google.com/docs/firestore/best-practices#ramping_up_traffic). + * + * @private + * @internal + */ +class RateLimiter { + /** + * @param initialCapacity Initial maximum number of operations per second. + * @param multiplier Rate by which to increase the capacity. + * @param multiplierMillis How often the capacity should increase in + * milliseconds. + * @param maximumCapacity Maximum number of allowed operations per second. + * The number of tokens added per second will never exceed this number. + * @param startTimeMillis The starting time in epoch milliseconds that the + * rate limit is based on. Used for testing the limiter. + */ + constructor(initialCapacity, multiplier, multiplierMillis, maximumCapacity, startTimeMillis = Date.now()) { + this.initialCapacity = initialCapacity; + this.multiplier = multiplier; + this.multiplierMillis = multiplierMillis; + this.maximumCapacity = maximumCapacity; + this.startTimeMillis = startTimeMillis; + this.availableTokens = initialCapacity; + this.lastRefillTimeMillis = startTimeMillis; + this.previousCapacity = initialCapacity; + } + /** + * Tries to make the number of operations. Returns true if the request + * succeeded and false otherwise. + * + * @param requestTimeMillis The time used to calculate the number of available + * tokens. Used for testing the limiter. + * @private + * @internal + */ + tryMakeRequest(numOperations, requestTimeMillis = Date.now()) { + this.refillTokens(requestTimeMillis); + if (numOperations <= this.availableTokens) { + this.availableTokens -= numOperations; + return true; + } + return false; + } + /** + * Returns the number of ms needed to make a request with the provided number + * of operations. Returns 0 if the request can be made with the existing + * capacity. Returns -1 if the request is not possible with the current + * capacity. + * + * @param requestTimeMillis The time used to calculate the number of available + * tokens. Used for testing the limiter. + * @private + * @internal + */ + getNextRequestDelayMs(numOperations, requestTimeMillis = Date.now()) { + this.refillTokens(requestTimeMillis); + if (numOperations < this.availableTokens) { + return 0; + } + const capacity = this.calculateCapacity(requestTimeMillis); + if (capacity < numOperations) { + return -1; + } + const requiredTokens = numOperations - this.availableTokens; + return Math.ceil((requiredTokens * 1000) / capacity); + } + /** + * Refills the number of available tokens based on how much time has elapsed + * since the last time the tokens were refilled. + * + * @param requestTimeMillis The time used to calculate the number of available + * tokens. Used for testing the limiter. + * @private + * @internal + */ + refillTokens(requestTimeMillis) { + if (requestTimeMillis >= this.lastRefillTimeMillis) { + const elapsedTime = requestTimeMillis - this.lastRefillTimeMillis; + const capacity = this.calculateCapacity(requestTimeMillis); + const tokensToAdd = Math.floor((elapsedTime * capacity) / 1000); + if (tokensToAdd > 0) { + this.availableTokens = Math.min(capacity, this.availableTokens + tokensToAdd); + this.lastRefillTimeMillis = requestTimeMillis; + } + } + else { + throw new Error('Request time should not be before the last token refill time.'); + } + } + /** + * Calculates the maximum capacity based on the provided date. + * + * @private + * @internal + */ + // Visible for testing. + calculateCapacity(requestTimeMillis) { + assert(requestTimeMillis >= this.startTimeMillis, 'startTime cannot be after currentTime'); + const millisElapsed = requestTimeMillis - this.startTimeMillis; + const operationsPerSecond = Math.min(Math.floor(Math.pow(this.multiplier, Math.floor(millisElapsed / this.multiplierMillis)) * this.initialCapacity), this.maximumCapacity); + if (operationsPerSecond !== this.previousCapacity) { + (0, logger_1.logger)('RateLimiter.calculateCapacity', null, `New request capacity: ${operationsPerSecond} operations per second.`); + } + this.previousCapacity = operationsPerSecond; + return operationsPerSecond; + } +} +exports.RateLimiter = RateLimiter; +//# sourceMappingURL=rate-limiter.js.map + +/***/ }), + +/***/ 3023: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RecursiveDelete = exports.RECURSIVE_DELETE_MIN_PENDING_OPS = exports.RECURSIVE_DELETE_MAX_PENDING_OPS = exports.REFERENCE_NAME_MIN_ID = void 0; +const assert = __nccwpck_require__(39491); +const _1 = __nccwpck_require__(32210); +const util_1 = __nccwpck_require__(15468); +const query_options_1 = __nccwpck_require__(47188); +/*! + * Datastore allowed numeric IDs where Firestore only allows strings. Numeric + * IDs are exposed to Firestore as __idNUM__, so this is the lowest possible + * negative numeric value expressed in that format. + * + * This constant is used to specify startAt/endAt values when querying for all + * descendants in a single collection. + */ +exports.REFERENCE_NAME_MIN_ID = '__id-9223372036854775808__'; +/*! + * The query limit used for recursive deletes when fetching all descendants of + * the specified reference to delete. This is done to prevent the query stream + * from streaming documents faster than Firestore can delete. + */ +// Visible for testing. +exports.RECURSIVE_DELETE_MAX_PENDING_OPS = 5000; +/*! + * The number of pending BulkWriter operations at which RecursiveDelete + * starts the next limit query to fetch descendants. By starting the query + * while there are pending operations, Firestore can improve BulkWriter + * throughput. This helps prevent BulkWriter from idling while Firestore + * fetches the next query. + */ +exports.RECURSIVE_DELETE_MIN_PENDING_OPS = 1000; +/** + * Class used to store state required for running a recursive delete operation. + * Each recursive delete call should use a new instance of the class. + * @private + * @internal + */ +class RecursiveDelete { + /** + * + * @param firestore The Firestore instance to use. + * @param writer The BulkWriter instance to use for delete operations. + * @param ref The document or collection reference to recursively delete. + * @param maxLimit The query limit to use when fetching descendants + * @param minLimit The number of pending BulkWriter operations at which + * RecursiveDelete starts the next limit query to fetch descendants. + */ + constructor(firestore, writer, ref, maxLimit, minLimit) { + this.firestore = firestore; + this.writer = writer; + this.ref = ref; + this.maxLimit = maxLimit; + this.minLimit = minLimit; + /** + * The number of deletes that failed with a permanent error. + * @private + * @internal + */ + this.errorCount = 0; + /** + * Whether there are still documents to delete that still need to be fetched. + * @private + * @internal + */ + this.documentsPending = true; + /** + * Whether run() has been called. + * @private + * @internal + */ + this.started = false; + /** + * A deferred promise that resolves when the recursive delete operation + * is completed. + * @private + * @internal + */ + this.completionDeferred = new util_1.Deferred(); + /** + * Whether a query stream is currently in progress. Only one stream can be + * run at a time. + * @private + * @internal + */ + this.streamInProgress = false; + /** + * The number of pending BulkWriter operations. Used to determine when the + * next query can be run. + * @private + * @internal + */ + this.pendingOpsCount = 0; + this.errorStack = ''; + this.maxPendingOps = maxLimit; + this.minPendingOps = minLimit; + } + /** + * Recursively deletes the reference provided in the class constructor. + * Returns a promise that resolves when all descendants have been deleted, or + * if an error occurs. + */ + run() { + assert(!this.started, 'RecursiveDelete.run() should only be called once.'); + // Capture the error stack to preserve stack tracing across async calls. + this.errorStack = Error().stack; + this.writer._verifyNotClosed(); + this.setupStream(); + return this.completionDeferred.promise; + } + /** + * Creates a query stream and attaches event handlers to it. + * @private + * @internal + */ + setupStream() { + const stream = this.getAllDescendants(this.ref instanceof _1.CollectionReference + ? this.ref + : this.ref); + this.streamInProgress = true; + let streamedDocsCount = 0; + stream + .on('error', err => { + err.code = 14 /* StatusCode.UNAVAILABLE */; + err.stack = 'Failed to fetch children documents: ' + err.stack; + this.lastError = err; + this.onQueryEnd(); + }) + .on('data', (snap) => { + streamedDocsCount++; + this.lastDocumentSnap = snap; + this.deleteRef(snap.ref); + }) + .on('end', () => { + this.streamInProgress = false; + // If there are fewer than the number of documents specified in the + // limit() field, we know that the query is complete. + if (streamedDocsCount < this.minPendingOps) { + this.onQueryEnd(); + } + else if (this.pendingOpsCount === 0) { + this.setupStream(); + } + }); + } + /** + * Retrieves all descendant documents nested under the provided reference. + * @param ref The reference to fetch all descendants for. + * @private + * @internal + * @return {Stream} Stream of descendant documents. + */ + getAllDescendants(ref) { + // The parent is the closest ancestor document to the location we're + // deleting. If we are deleting a document, the parent is the path of that + // document. If we are deleting a collection, the parent is the path of the + // document containing that collection (or the database root, if it is a + // root collection). + let parentPath = ref._resourcePath; + if (ref instanceof _1.CollectionReference) { + parentPath = parentPath.popLast(); + } + const collectionId = ref instanceof _1.CollectionReference + ? ref.id + : ref.parent.id; + let query = new _1.Query(this.firestore, query_options_1.QueryOptions.forKindlessAllDescendants(parentPath, collectionId, + /* requireConsistency= */ false)); + // Query for names only to fetch empty snapshots. + query = query.select(_1.FieldPath.documentId()).limit(this.maxPendingOps); + if (ref instanceof _1.CollectionReference) { + // To find all descendants of a collection reference, we need to use a + // composite filter that captures all documents that start with the + // collection prefix. The MIN_KEY constant represents the minimum key in + // this collection, and a null byte + the MIN_KEY represents the minimum + // key is the next possible collection. + const nullChar = String.fromCharCode(0); + const startAt = collectionId + '/' + exports.REFERENCE_NAME_MIN_ID; + const endAt = collectionId + nullChar + '/' + exports.REFERENCE_NAME_MIN_ID; + query = query + .where(_1.FieldPath.documentId(), '>=', startAt) + .where(_1.FieldPath.documentId(), '<', endAt); + } + if (this.lastDocumentSnap) { + query = query.startAfter(this.lastDocumentSnap); + } + return query.stream(); + } + /** + * Called when all descendants of the provided reference have been streamed + * or if a permanent error occurs during the stream. Deletes the developer + * provided reference and wraps any errors that occurred. + * @private + * @internal + */ + onQueryEnd() { + this.documentsPending = false; + if (this.ref instanceof _1.DocumentReference) { + this.writer.delete(this.ref).catch(err => this.incrementErrorCount(err)); + } + this.writer.flush().then(async () => { + var _a; + if (this.lastError === undefined) { + this.completionDeferred.resolve(); + } + else { + let error = new ((__nccwpck_require__(90418).GoogleError))(`${this.errorCount} ` + + `${this.errorCount !== 1 ? 'deletes' : 'delete'} ` + + 'failed. The last delete failed with: '); + if (this.lastError.code !== undefined) { + error.code = this.lastError.code; + } + error = (0, util_1.wrapError)(error, this.errorStack); + // Wrap the BulkWriter error last to provide the full stack trace. + this.completionDeferred.reject(this.lastError.stack + ? (0, util_1.wrapError)(error, (_a = this.lastError.stack) !== null && _a !== void 0 ? _a : '') + : error); + } + }); + } + /** + * Deletes the provided reference and starts the next stream if conditions + * are met. + * @private + * @internal + */ + deleteRef(docRef) { + this.pendingOpsCount++; + this.writer + .delete(docRef) + .catch(err => { + this.incrementErrorCount(err); + }) + .then(() => { + this.pendingOpsCount--; + // We wait until the previous stream has ended in order to sure the + // startAfter document is correct. Starting the next stream while + // there are pending operations allows Firestore to maximize + // BulkWriter throughput. + if (this.documentsPending && + !this.streamInProgress && + this.pendingOpsCount < this.minPendingOps) { + this.setupStream(); + } + }); + } + incrementErrorCount(err) { + this.errorCount++; + this.lastError = err; + } +} +exports.RecursiveDelete = RecursiveDelete; +//# sourceMappingURL=recursive-delete.js.map + +/***/ }), + +/***/ 24319: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AggregateQuerySnapshot = void 0; +const deepEqual = __nccwpck_require__(28206); +/** + * The results of executing an aggregation query. + */ +class AggregateQuerySnapshot { + /** + * @internal + * + * @param _query The query that was executed to produce this result. + * @param _readTime The time this snapshot was read. + * @param _data The results of the aggregations performed over the underlying + * query. + */ + constructor(_query, _readTime, _data) { + this._query = _query; + this._readTime = _readTime; + this._data = _data; + } + /** The query that was executed to produce this result. */ + get query() { + return this._query; + } + /** The time this snapshot was read. */ + get readTime() { + return this._readTime; + } + /** + * Returns the results of the aggregations performed over the underlying + * query. + * + * The keys of the returned object will be the same as those of the + * `AggregateSpec` object specified to the aggregation method, and the + * values will be the corresponding aggregation result. + * + * @returns The results of the aggregations performed over the underlying + * query. + */ + data() { + return this._data; + } + /** + * Compares this object with the given object for equality. + * + * Two `AggregateQuerySnapshot` instances are considered "equal" if they + * have the same data and their underlying queries compare "equal" using + * `AggregateQuery.isEqual()`. + * + * @param other The object to compare to this object for equality. + * @return `true` if this object is "equal" to the given object, as + * defined above, or `false` otherwise. + */ + isEqual(other) { + if (this === other) { + return true; + } + if (!(other instanceof AggregateQuerySnapshot)) { + return false; + } + // Since the read time is different on every read, we explicitly ignore all + // document metadata in this comparison, just like + // `DocumentSnapshot.isEqual()` does. + if (!this.query.isEqual(other.query)) { + return false; + } + return deepEqual(this._data, other._data); + } +} +exports.AggregateQuerySnapshot = AggregateQuerySnapshot; +//# sourceMappingURL=aggregate-query-snapshot.js.map + +/***/ }), + +/***/ 8763: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AggregateQuery = void 0; +const assert = __nccwpck_require__(39491); +const deepEqual = __nccwpck_require__(28206); +const aggregate_1 = __nccwpck_require__(97114); +const timestamp_1 = __nccwpck_require__(29061); +const util_1 = __nccwpck_require__(15468); +const query_profile_1 = __nccwpck_require__(15453); +const logger_1 = __nccwpck_require__(42718); +const aggregate_query_snapshot_1 = __nccwpck_require__(24319); +const stream_1 = __nccwpck_require__(12781); +const trace_util_1 = __nccwpck_require__(2693); +/** + * A query that calculates aggregations over an underlying query. + */ +class AggregateQuery { + /** + * @internal + * @param _query The query whose aggregations will be calculated by this + * object. + * @param _aggregates The aggregations that will be performed by this query. + */ + constructor( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + _query, _aggregates) { + this._query = _query; + this._aggregates = _aggregates; + this.clientAliasToServerAliasMap = {}; + this.serverAliasToClientAliasMap = {}; + // Client-side aliases may be too long and exceed the 1500-byte string size limit. + // Such long strings do not need to be transferred over the wire either. + // The client maps the user's alias to a short form alias and send that to the server. + let aggregationNum = 0; + for (const clientAlias in this._aggregates) { + if (Object.prototype.hasOwnProperty.call(this._aggregates, clientAlias)) { + const serverAlias = `aggregate_${aggregationNum++}`; + this.clientAliasToServerAliasMap[clientAlias] = serverAlias; + this.serverAliasToClientAliasMap[serverAlias] = clientAlias; + } + } + } + /** The query whose aggregations will be calculated by this object. */ + get query() { + return this._query; + } + /** + * Executes this query. + * + * @return A promise that will be resolved with the results of the query. + */ + async get() { + return this._query._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_AGGREGATION_QUERY_GET, async () => { + const { result } = await this._get(); + return result; + }); + } + /** + * Internal get() method that accepts an optional transaction options and + * returns a snapshot with transaction and explain metadata. + * + * @private + * @internal + * @param transactionOrReadTime A transaction ID, options to start a new + * transaction, or timestamp to use as read time. + */ + async _get(transactionOrReadTime) { + const response = await this._getResponse(transactionOrReadTime); + if (!response.result) { + throw new Error('No AggregateQuery results'); + } + return response; + } + /** + * Internal get() method that accepts an optional transaction id, and returns + * transaction metadata. + * + * @private + * @internal + * @param transactionOrReadTime A transaction ID, options to start a new + * transaction, or timestamp to use as read time. + */ + _getResponse(transactionOrReadTime, explainOptions) { + // Capture the error stack to preserve stack tracing across async calls. + const stack = Error().stack; + return new Promise((resolve, reject) => { + const output = {}; + const stream = this._stream(transactionOrReadTime, explainOptions); + stream.on('error', err => { + reject((0, util_1.wrapError)(err, stack)); + }); + stream.on('data', (data) => { + if (data.transaction) { + output.transaction = data.transaction; + } + if (data.explainMetrics) { + output.explainMetrics = data.explainMetrics; + } + if (data.result) { + output.result = data.result; + } + }); + stream.on('end', () => { + stream.destroy(); + resolve(output); + }); + }); + } + /** + * Internal streaming method that accepts an optional transaction ID. + * + * BEWARE: If `transactionOrReadTime` is `ITransactionOptions`, then the first + * response in the stream will be a transaction response. + * + * @private + * @internal + * @param transactionOrReadTime A transaction ID, options to start a new + * transaction, or timestamp to use as read time. + * @param explainOptions Options to use for explaining the query (if any). + * @returns A stream of document results optionally preceded by a transaction response. + */ + _stream(transactionOrReadTime, explainOptions) { + const tag = (0, util_1.requestTag)(); + const firestore = this._query.firestore; + const stream = new stream_1.Transform({ + objectMode: true, + transform: (proto, enc, callback) => { + var _a; + const output = {}; + // Proto comes with zero-length buffer by default + if ((_a = proto.transaction) === null || _a === void 0 ? void 0 : _a.length) { + output.transaction = proto.transaction; + } + if (proto.explainMetrics) { + output.explainMetrics = query_profile_1.ExplainMetrics._fromProto(proto.explainMetrics, firestore._serializer); + } + if (proto.result) { + const readTime = timestamp_1.Timestamp.fromProto(proto.readTime); + const data = this.decodeResult(proto.result); + output.result = new aggregate_query_snapshot_1.AggregateQuerySnapshot(this, readTime, data); + } + callback(undefined, output); + }, + }); + firestore + .initializeIfNeeded(tag) + .then(async () => { + // `toProto()` might throw an exception. We rely on the behavior of an + // async function to convert this exception into the rejected Promise we + // catch below. + const request = this.toProto(transactionOrReadTime, explainOptions); + const backendStream = await firestore.requestStream('runAggregationQuery', + /* bidirectional= */ false, request, tag); + stream.on('close', () => { + backendStream.resume(); + backendStream.end(); + }); + backendStream.on('error', err => { + // TODO(group-by) When group-by queries are supported for aggregates + // consider implementing retries if the stream is making progress + // receiving results for groups. See the use of lastReceivedDocument + // in the retry strategy for runQuery. + // Also note that explain queries should not be retried. + backendStream.unpipe(stream); + (0, logger_1.logger)('AggregateQuery._stream', tag, 'AggregateQuery failed with stream error:', err); + this._query._firestore._traceUtil + .currentSpan() + .addEvent(`${trace_util_1.SPAN_NAME_RUN_AGGREGATION_QUERY}: Error.`, { + 'error.message': err.message, + }); + stream.destroy(err); + }); + backendStream.resume(); + backendStream.pipe(stream); + }) + .catch(e => stream.destroy(e)); + return stream; + } + /** + * Internal method to decode values within result. + * @private + */ + decodeResult(proto) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const data = {}; + const fields = proto.aggregateFields; + if (fields) { + const serializer = this._query.firestore._serializer; + for (const prop of Object.keys(fields)) { + const alias = this.serverAliasToClientAliasMap[prop]; + assert(alias !== null && alias !== undefined, `'${prop}' not present in server-client alias mapping.`); + if (this._aggregates[alias] === undefined) { + throw new Error(`Unexpected alias [${prop}] in result aggregate result`); + } + data[alias] = serializer.decodeValue(fields[prop]); + } + } + return data; + } + /** + * Internal method for serializing a query to its RunAggregationQuery proto + * representation with an optional transaction id. + * + * @private + * @internal + * @returns Serialized JSON for the query. + */ + toProto(transactionOrReadTime, explainOptions) { + const queryProto = this._query.toProto(); + const runQueryRequest = { + parent: queryProto.parent, + structuredAggregationQuery: { + structuredQuery: queryProto.structuredQuery, + aggregations: (0, util_1.mapToArray)(this._aggregates, (aggregate, clientAlias) => { + const serverAlias = this.clientAliasToServerAliasMap[clientAlias]; + assert(serverAlias !== null && serverAlias !== undefined, `'${clientAlias}' not present in client-server alias mapping.`); + return new aggregate_1.Aggregate(serverAlias, aggregate.aggregateType, aggregate._field).toProto(); + }), + }, + }; + if (transactionOrReadTime instanceof Uint8Array) { + runQueryRequest.transaction = transactionOrReadTime; + } + else if (transactionOrReadTime instanceof timestamp_1.Timestamp) { + runQueryRequest.readTime = transactionOrReadTime; + } + else if (transactionOrReadTime) { + runQueryRequest.newTransaction = transactionOrReadTime; + } + if (explainOptions) { + runQueryRequest.explainOptions = explainOptions; + } + return runQueryRequest; + } + /** + * Compares this object with the given object for equality. + * + * This object is considered "equal" to the other object if and only if + * `other` performs the same aggregations as this `AggregateQuery` and + * the underlying Query of `other` compares equal to that of this object + * using `Query.isEqual()`. + * + * @param other The object to compare to this object for equality. + * @return `true` if this object is "equal" to the given object, as + * defined above, or `false` otherwise. + */ + isEqual(other) { + if (this === other) { + return true; + } + if (!(other instanceof AggregateQuery)) { + return false; + } + if (!this.query.isEqual(other.query)) { + return false; + } + return deepEqual(this._aggregates, other._aggregates); + } + /** + * Plans and optionally executes this query. Returns a Promise that will be + * resolved with the planner information, statistics from the query + * execution (if any), and the query results (if any). + * + * @return A Promise that will be resolved with the planner information, + * statistics from the query execution (if any), and the query results (if any). + */ + async explain(options) { + const { result, explainMetrics } = await this._getResponse(undefined, options || {}); + if (!explainMetrics) { + throw new Error('No explain results'); + } + return new query_profile_1.ExplainResults(explainMetrics, result || null); + } +} +exports.AggregateQuery = AggregateQuery; +//# sourceMappingURL=aggregate-query.js.map + +/***/ }), + +/***/ 37253: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CollectionReference = void 0; +const path_1 = __nccwpck_require__(34908); +const util_1 = __nccwpck_require__(15468); +const write_batch_1 = __nccwpck_require__(76012); +const types_1 = __nccwpck_require__(75371); +const query_1 = __nccwpck_require__(38621); +const document_reference_1 = __nccwpck_require__(502); +const query_options_1 = __nccwpck_require__(47188); +const trace_util_1 = __nccwpck_require__(2693); +/** + * A CollectionReference object can be used for adding documents, getting + * document references, and querying for documents (using the methods + * inherited from [Query]{@link Query}). + * + * @class CollectionReference + * @extends Query + */ +class CollectionReference extends query_1.Query { + /** + * @private + * + * @param firestore The Firestore Database client. + * @param path The Path of this collection. + */ + constructor(firestore, path, converter) { + super(firestore, query_options_1.QueryOptions.forCollectionQuery(path, converter)); + } + /** + * Returns a resource path for this collection. + * @private + * @internal + */ + get _resourcePath() { + return this._queryOptions.parentPath.append(this._queryOptions.collectionId); + } + /** + * The last path element of the referenced collection. + * + * @type {string} + * @name CollectionReference#id + * @readonly + * + * @example + * ``` + * let collectionRef = firestore.collection('col/doc/subcollection'); + * console.log(`ID of the subcollection: ${collectionRef.id}`); + * ``` + */ + get id() { + return this._queryOptions.collectionId; + } + /** + * A reference to the containing Document if this is a subcollection, else + * null. + * + * @type {DocumentReference|null} + * @name CollectionReference#parent + * @readonly + * + * @example + * ``` + * let collectionRef = firestore.collection('col/doc/subcollection'); + * let documentRef = collectionRef.parent; + * console.log(`Parent name: ${documentRef.path}`); + * ``` + */ + get parent() { + if (this._queryOptions.parentPath.isDocument) { + return new document_reference_1.DocumentReference(this.firestore, this._queryOptions.parentPath); + } + return null; + } + /** + * A string representing the path of the referenced collection (relative + * to the root of the database). + * + * @type {string} + * @name CollectionReference#path + * @readonly + * + * @example + * ``` + * let collectionRef = firestore.collection('col/doc/subcollection'); + * console.log(`Path of the subcollection: ${collectionRef.path}`); + * ``` + */ + get path() { + return this._resourcePath.relativeName; + } + /** + * Retrieves the list of documents in this collection. + * + * The document references returned may include references to "missing + * documents", i.e. document locations that have no document present but + * which contain subcollections with documents. Attempting to read such a + * document reference (e.g. via `.get()` or `.onSnapshot()`) will return a + * `DocumentSnapshot` whose `.exists` property is false. + * + * @return {Promise} The list of documents in this + * collection. + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * + * return collectionRef.listDocuments().then(documentRefs => { + * return firestore.getAll(...documentRefs); + * }).then(documentSnapshots => { + * for (let documentSnapshot of documentSnapshots) { + * if (documentSnapshot.exists) { + * console.log(`Found document with data: ${documentSnapshot.id}`); + * } else { + * console.log(`Found missing document: ${documentSnapshot.id}`); + * } + * } + * }); + * ``` + */ + listDocuments() { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_COL_REF_LIST_DOCUMENTS, () => { + const tag = (0, util_1.requestTag)(); + return this.firestore.initializeIfNeeded(tag).then(() => { + const parentPath = this._queryOptions.parentPath.toQualifiedResourcePath(this.firestore.projectId, this.firestore.databaseId); + const request = { + parent: parentPath.formattedName, + collectionId: this.id, + showMissing: true, + // Setting `pageSize` to an arbitrarily large value lets the backend cap + // the page size (currently to 300). Note that the backend rejects + // MAX_INT32 (b/146883794). + pageSize: Math.pow(2, 16) - 1, + mask: { fieldPaths: [] }, + }; + return this.firestore + .request('listDocuments', request, tag) + .then(documents => { + // Note that the backend already orders these documents by name, + // so we do not need to manually sort them. + return documents.map(doc => { + const path = path_1.QualifiedResourcePath.fromSlashSeparatedString(doc.name); + return this.doc(path.id); + }); + }); + }); + }); + } + /** + * Gets a [DocumentReference]{@link DocumentReference} instance that + * refers to the document at the specified path. If no path is specified, an + * automatically-generated unique ID will be used for the returned + * DocumentReference. + * + * @param {string=} documentPath A slash-separated path to a document. + * @returns {DocumentReference} The `DocumentReference` + * instance. + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * let documentRefWithName = collectionRef.doc('doc'); + * let documentRefWithAutoId = collectionRef.doc(); + * console.log(`Reference with name: ${documentRefWithName.path}`); + * console.log(`Reference with auto-id: ${documentRefWithAutoId.path}`); + * ``` + */ + doc(documentPath) { + if (arguments.length === 0) { + documentPath = (0, util_1.autoId)(); + } + else { + (0, path_1.validateResourcePath)('documentPath', documentPath); + } + const path = this._resourcePath.append(documentPath); + if (!path.isDocument) { + throw new Error(`Value for argument "documentPath" must point to a document, but was "${documentPath}". Your path does not contain an even number of components.`); + } + return new document_reference_1.DocumentReference(this.firestore, path, this._queryOptions.converter); + } + /** + * Add a new document to this collection with the specified data, assigning + * it a document ID automatically. + * + * @param {DocumentData} data An Object containing the data for the new + * document. + * @throws {Error} If the provided input is not a valid Firestore document. + * @returns {Promise.} A Promise resolved with a + * [DocumentReference]{@link DocumentReference} pointing to the + * newly created document. + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * collectionRef.add({foo: 'bar'}).then(documentReference => { + * console.log(`Added document with name: ${documentReference.id}`); + * }); + * ``` + */ + add(data) { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_COL_REF_ADD, () => { + const firestoreData = this._queryOptions.converter.toFirestore(data); + (0, write_batch_1.validateDocumentData)('data', firestoreData, + /*allowDeletes=*/ false, this._allowUndefined); + const documentRef = this.doc(); + return documentRef.create(data).then(() => documentRef); + }); + } + /** + * Returns true if this `CollectionReference` is equal to the provided value. + * + * @param {*} other The value to compare against. + * @return {boolean} true if this `CollectionReference` is equal to the + * provided value. + */ + isEqual(other) { + return (this === other || + (other instanceof CollectionReference && super.isEqual(other))); + } + /** + * Applies a custom data converter to this CollectionReference, allowing you + * to use your own custom model objects with Firestore. When you call add() on + * the returned CollectionReference instance, the provided converter will + * convert between Firestore data of type `NewDbModelType` and your custom + * type `NewAppModelType`. + * + * Using the converter allows you to specify generic type arguments when + * storing and retrieving objects from Firestore. + * + * Passing in `null` as the converter parameter removes the current + * converter. + * + * @example + * ``` + * class Post { + * constructor(readonly title: string, readonly author: string) {} + * + * toString(): string { + * return this.title + ', by ' + this.author; + * } + * } + * + * const postConverter = { + * toFirestore(post: Post): FirebaseFirestore.DocumentData { + * return {title: post.title, author: post.author}; + * }, + * fromFirestore( + * snapshot: FirebaseFirestore.QueryDocumentSnapshot + * ): Post { + * const data = snapshot.data(); + * return new Post(data.title, data.author); + * } + * }; + * + * const postSnap = await Firestore() + * .collection('posts') + * .withConverter(postConverter) + * .doc().get(); + * const post = postSnap.data(); + * if (post !== undefined) { + * post.title; // string + * post.toString(); // Should be defined + * post.someNonExistentProperty; // TS error + * } + * + * ``` + * @param {FirestoreDataConverter | null} converter Converts objects to and + * from Firestore. Passing in `null` removes the current converter. + * @return A CollectionReference that uses the provided converter. + */ + withConverter(converter) { + return new CollectionReference(this.firestore, this._resourcePath, converter !== null && converter !== void 0 ? converter : (0, types_1.defaultConverter)()); + } +} +exports.CollectionReference = CollectionReference; +//# sourceMappingURL=collection-reference.js.map + +/***/ }), + +/***/ 98009: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CompositeFilterInternal = void 0; +const filter_internal_1 = __nccwpck_require__(15702); +class CompositeFilterInternal extends filter_internal_1.FilterInternal { + constructor(filters, operator) { + super(); + this.filters = filters; + this.operator = operator; + // Memoized list of all field filters that can be found by traversing the tree of filters + // contained in this composite filter. + this.memoizedFlattenedFilters = null; + } + getFilters() { + return this.filters; + } + isConjunction() { + return this.operator === 'AND'; + } + getFlattenedFilters() { + if (this.memoizedFlattenedFilters !== null) { + return this.memoizedFlattenedFilters; + } + this.memoizedFlattenedFilters = this.filters.reduce((allFilters, subfilter) => allFilters.concat(subfilter.getFlattenedFilters()), []); + return this.memoizedFlattenedFilters; + } + toProto() { + if (this.filters.length === 1) { + return this.filters[0].toProto(); + } + const proto = { + compositeFilter: { + op: this.operator, + filters: this.filters.map(filter => filter.toProto()), + }, + }; + return proto; + } + isEqual(other) { + if (other instanceof CompositeFilterInternal) { + const otherFilters = other.getFilters(); + return (this.operator === other.operator && + this.getFilters().length === other.getFilters().length && + this.getFilters().every((filter, index) => filter.isEqual(otherFilters[index]))); + } + else { + return false; + } + } +} +exports.CompositeFilterInternal = CompositeFilterInternal; +//# sourceMappingURL=composite-filter-internal.js.map + +/***/ }), + +/***/ 43176: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NOOP_MESSAGE = exports.comparisonOperators = exports.directionOperators = void 0; +/** + * The direction of a `Query.orderBy()` clause is specified as 'desc' or 'asc' + * (descending or ascending). + * + * @private + * @internal + */ +exports.directionOperators = { + asc: 'ASCENDING', + desc: 'DESCENDING', +}; +/** + * Filter conditions in a `Query.where()` clause are specified using the + * strings '<', '<=', '==', '!=', '>=', '>', 'array-contains', 'in', 'not-in', + * and 'array-contains-any'. + * + * @private + * @internal + */ +exports.comparisonOperators = { + '<': 'LESS_THAN', + '<=': 'LESS_THAN_OR_EQUAL', + '==': 'EQUAL', + '!=': 'NOT_EQUAL', + '>': 'GREATER_THAN', + '>=': 'GREATER_THAN_OR_EQUAL', + 'array-contains': 'ARRAY_CONTAINS', + in: 'IN', + 'not-in': 'NOT_IN', + 'array-contains-any': 'ARRAY_CONTAINS_ANY', +}; +exports.NOOP_MESSAGE = Symbol('a noop message'); +//# sourceMappingURL=constants.js.map + +/***/ }), + +/***/ 502: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DocumentReference = void 0; +const index_1 = __nccwpck_require__(32210); +const path_1 = __nccwpck_require__(34908); +const types_1 = __nccwpck_require__(75371); +const collection_reference_1 = __nccwpck_require__(37253); +const util_1 = __nccwpck_require__(15468); +const validate_1 = __nccwpck_require__(33822); +const document_1 = __nccwpck_require__(98912); +const trace_util_1 = __nccwpck_require__(2693); +/** + * A DocumentReference refers to a document location in a Firestore database + * and can be used to write, read, or listen to the location. The document at + * the referenced location may or may not exist. A DocumentReference can + * also be used to create a + * [CollectionReference]{@link CollectionReference} to a + * subcollection. + * + * @class DocumentReference + */ +class DocumentReference { + /** + * @private + * @internal + * @param _firestore The Firestore Database client. + * @param _path The Path of this reference. + * @param _converter The converter to use when serializing data. + */ + constructor(_firestore, + /** + * @private + * @internal + **/ + _path, + /** + * @internal + * @private + **/ + _converter = (0, types_1.defaultConverter)()) { + this._firestore = _firestore; + this._path = _path; + this._converter = _converter; + } + /** + * The string representation of the DocumentReference's location. + * @private + * @internal + * @type {string} + * @name DocumentReference#formattedName + */ + get formattedName() { + const projectId = this.firestore.projectId; + const databaseId = this.firestore.databaseId; + return this._path.toQualifiedResourcePath(projectId, databaseId) + .formattedName; + } + /** + * The [Firestore]{@link Firestore} instance for the Firestore + * database (useful for performing transactions, etc.). + * + * @type {Firestore} + * @name DocumentReference#firestore + * @readonly + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * + * collectionRef.add({foo: 'bar'}).then(documentReference => { + * let firestore = documentReference.firestore; + * console.log(`Root location for document is ${firestore.formattedName}`); + * }); + * ``` + */ + get firestore() { + return this._firestore; + } + /** + * A string representing the path of the referenced document (relative + * to the root of the database). + * + * @type {string} + * @name DocumentReference#path + * @readonly + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * + * collectionRef.add({foo: 'bar'}).then(documentReference => { + * console.log(`Added document at '${documentReference.path}'`); + * }); + * ``` + */ + get path() { + return this._path.relativeName; + } + /** + * The last path element of the referenced document. + * + * @type {string} + * @name DocumentReference#id + * @readonly + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * + * collectionRef.add({foo: 'bar'}).then(documentReference => { + * console.log(`Added document with name '${documentReference.id}'`); + * }); + * ``` + */ + get id() { + return this._path.id; + } + /** + * Returns a resource path for this document. + * @private + * @internal + */ + get _resourcePath() { + return this._path; + } + /** + * A reference to the collection to which this DocumentReference belongs. + * + * @name DocumentReference#parent + * @type {CollectionReference} + * @readonly + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * let collectionRef = documentRef.parent; + * + * collectionRef.where('foo', '==', 'bar').get().then(results => { + * console.log(`Found ${results.size} matches in parent collection`); + * }): + * ``` + */ + get parent() { + return new collection_reference_1.CollectionReference(this._firestore, this._path.parent(), this._converter); + } + /** + * Reads the document referred to by this DocumentReference. + * + * @returns {Promise.} A Promise resolved with a + * DocumentSnapshot for the retrieved document on success. For missing + * documents, DocumentSnapshot.exists will be false. If the get() fails for + * other reasons, the Promise will be rejected. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then(documentSnapshot => { + * if (documentSnapshot.exists) { + * console.log('Document retrieved successfully.'); + * } + * }); + * ``` + */ + get() { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_DOC_REF_GET, () => { + return this._firestore.getAll(this).then(([result]) => result); + }); + } + /** + * Gets a [CollectionReference]{@link CollectionReference} instance + * that refers to the collection at the specified path. + * + * @param {string} collectionPath A slash-separated path to a collection. + * @returns {CollectionReference} A reference to the new + * subcollection. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * let subcollection = documentRef.collection('subcollection'); + * console.log(`Path to subcollection: ${subcollection.path}`); + * ``` + */ + collection(collectionPath) { + (0, path_1.validateResourcePath)('collectionPath', collectionPath); + const path = this._path.append(collectionPath); + if (!path.isCollection) { + throw new Error(`Value for argument "collectionPath" must point to a collection, but was "${collectionPath}". Your path does not contain an odd number of components.`); + } + return new collection_reference_1.CollectionReference(this._firestore, path); + } + /** + * Fetches the subcollections that are direct children of this document. + * + * @returns {Promise.>} A Promise that resolves + * with an array of CollectionReferences. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.listCollections().then(collections => { + * for (let collection of collections) { + * console.log(`Found subcollection with id: ${collection.id}`); + * } + * }); + * ``` + */ + listCollections() { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_DOC_REF_LIST_COLLECTIONS, () => { + const tag = (0, util_1.requestTag)(); + return this.firestore.initializeIfNeeded(tag).then(() => { + const request = { + parent: this.formattedName, + // Setting `pageSize` to an arbitrarily large value lets the backend cap + // the page size (currently to 300). Note that the backend rejects + // MAX_INT32 (b/146883794). + pageSize: Math.pow(2, 16) - 1, + }; + return this._firestore + .request('listCollectionIds', request, tag) + .then(collectionIds => { + const collections = []; + // We can just sort this list using the default comparator since it + // will only contain collection ids. + collectionIds.sort(); + for (const collectionId of collectionIds) { + collections.push(this.collection(collectionId)); + } + return collections; + }); + }); + }); + } + /** + * Create a document with the provided object values. This will fail the write + * if a document exists at its location. + * + * @param {DocumentData} data An object that contains the fields and data to + * serialize as the document. + * @throws {Error} If the provided input is not a valid Firestore document or if the document already exists. + * @returns {Promise.} A Promise that resolves with the + * write time of this create. + * + * @example + * ``` + * let documentRef = firestore.collection('col').doc(); + * + * documentRef.create({foo: 'bar'}).then((res) => { + * console.log(`Document created at ${res.updateTime}`); + * }).catch((err) => { + * console.log(`Failed to create document: ${err}`); + * }); + * ``` + */ + create(data) { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_DOC_REF_CREATE, () => { + const writeBatch = new index_1.WriteBatch(this._firestore); + return writeBatch + .create(this, data) + .commit() + .then(([writeResult]) => writeResult); + }); + } + /** + * Deletes the document referred to by this `DocumentReference`. + * + * A delete for a non-existing document is treated as a success (unless + * lastUptimeTime is provided). + * + * @param {Precondition=} precondition A precondition to enforce for this + * delete. + * @param {Timestamp=} precondition.lastUpdateTime If set, enforces that the + * document was last updated at lastUpdateTime. Fails the delete if the + * document was last updated at a different time. + * @param {boolean=} precondition.exists If set, enforces that the target + * document must or must not exist. + * @returns {Promise.} A Promise that resolves with the + * delete time. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.delete().then(() => { + * console.log('Document successfully deleted.'); + * }); + * ``` + */ + delete(precondition) { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_DOC_REF_DELETE, () => { + const writeBatch = new index_1.WriteBatch(this._firestore); + return writeBatch + .delete(this, precondition) + .commit() + .then(([writeResult]) => writeResult); + }); + } + /** + * Writes to the document referred to by this DocumentReference. If the + * document does not yet exist, it will be created. If you pass + * [SetOptions]{@link SetOptions}, the provided data can be merged into an + * existing document. + * + * @param {T|Partial} data A map of the fields and values for + * the document. + * @param {SetOptions=} options An object to configure the set behavior. + * @param {boolean=} options.merge If true, set() merges the values specified + * in its data argument. Fields omitted from this set() call remain untouched. + * If your input sets any field to an empty map, all nested fields are + * overwritten. + * @param {Array.=} options.mergeFields If provided, + * set() only replaces the specified field paths. Any field path that is not + * specified is ignored and remains untouched. If your input sets any field to + * an empty map, all nested fields are overwritten. + * @throws {Error} If the provided input is not a valid Firestore document. + * @returns {Promise.} A Promise that resolves with the + * write time of this set. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.set({foo: 'bar'}).then(res => { + * console.log(`Document written at ${res.updateTime}`); + * }); + * ``` + */ + set(data, options) { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_DOC_REF_SET, () => { + let writeBatch = new index_1.WriteBatch(this._firestore); + if (options) { + writeBatch = writeBatch.set(this, data, options); + } + else { + writeBatch = writeBatch.set(this, data); + } + return writeBatch.commit().then(([writeResult]) => writeResult); + }); + } + /** + * Updates fields in the document referred to by this DocumentReference. + * If the document doesn't yet exist, the update fails and the returned + * Promise will be rejected. + * + * The update() method accepts either an object with field paths encoded as + * keys and field values encoded as values, or a variable number of arguments + * that alternate between field paths and field values. + * + * A Precondition restricting this update can be specified as the last + * argument. + * + * @param {UpdateData|string|FieldPath} dataOrField An object containing the + * fields and values with which to update the document or the path of the + * first field to update. + * @param { + * ...(*|string|FieldPath|Precondition)} preconditionOrValues An alternating + * list of field paths and values to update or a Precondition to restrict + * this update. + * @throws {Error} If the provided input is not valid Firestore data. + * @returns {Promise.} A Promise that resolves once the + * data has been successfully written to the backend. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.update({foo: 'bar'}).then(res => { + * console.log(`Document updated at ${res.updateTime}`); + * }); + * ``` + */ + update(dataOrField, ...preconditionOrValues) { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_DOC_REF_UPDATE, () => { + // eslint-disable-next-line prefer-rest-params + (0, validate_1.validateMinNumberOfArguments)('DocumentReference.update', arguments, 1); + const writeBatch = new index_1.WriteBatch(this._firestore); + return writeBatch + .update(this, dataOrField, ...preconditionOrValues) + .commit() + .then(([writeResult]) => writeResult); + }); + } + /** + * Attaches a listener for DocumentSnapshot events. + * + * @param {documentSnapshotCallback} onNext A callback to be called every + * time a new `DocumentSnapshot` is available. + * @param {errorCallback=} onError A callback to be called if the listen fails + * or is cancelled. No further callbacks will occur. If unset, errors will be + * logged to the console. + * + * @returns {function()} An unsubscribe function that can be called to cancel + * the snapshot listener. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * let unsubscribe = documentRef.onSnapshot(documentSnapshot => { + * if (documentSnapshot.exists) { + * console.log(documentSnapshot.data()); + * } + * }, err => { + * console.log(`Encountered error: ${err}`); + * }); + * + * // Remove this listener. + * unsubscribe(); + * ``` + */ + onSnapshot(onNext, onError) { + (0, validate_1.validateFunction)('onNext', onNext); + (0, validate_1.validateFunction)('onError', onError, { optional: true }); + const watch = new ((__nccwpck_require__(97462)/* .DocumentWatch */ .i9))(this.firestore, this); + return watch.onSnapshot((readTime, size, docs) => { + for (const document of docs()) { + if (document.ref.path === this.path) { + onNext(document); + return; + } + } + // The document is missing. + const ref = new DocumentReference(this._firestore, this._path, this._converter); + const document = new document_1.DocumentSnapshotBuilder(ref); + document.readTime = readTime; + onNext(document.build()); + }, onError || console.error); + } + /** + * Returns true if this `DocumentReference` is equal to the provided value. + * + * @param {*} other The value to compare against. + * @return {boolean} true if this `DocumentReference` is equal to the provided + * value. + */ + isEqual(other) { + return (this === other || + (other instanceof DocumentReference && + this._firestore === other._firestore && + this._path.isEqual(other._path) && + this._converter === other._converter)); + } + /** + * Converts this DocumentReference to the Firestore Proto representation. + * + * @private + * @internal + */ + toProto() { + return { referenceValue: this.formattedName }; + } + /** + * Applies a custom data converter to this DocumentReference, allowing you to + * use your own custom model objects with Firestore. When you call set(), + * get(), etc. on the returned DocumentReference instance, the provided + * converter will convert between Firestore data of type `NewDbModelType` and + * your custom type `NewAppModelType`. + * + * Using the converter allows you to specify generic type arguments when + * storing and retrieving objects from Firestore. + * + * Passing in `null` as the converter parameter removes the current + * converter. + * + * @example + * ``` + * class Post { + * constructor(readonly title: string, readonly author: string) {} + * + * toString(): string { + * return this.title + ', by ' + this.author; + * } + * } + * + * const postConverter = { + * toFirestore(post: Post): FirebaseFirestore.DocumentData { + * return {title: post.title, author: post.author}; + * }, + * fromFirestore( + * snapshot: FirebaseFirestore.QueryDocumentSnapshot + * ): Post { + * const data = snapshot.data(); + * return new Post(data.title, data.author); + * } + * }; + * + * const postSnap = await Firestore() + * .collection('posts') + * .withConverter(postConverter) + * .doc().get(); + * const post = postSnap.data(); + * if (post !== undefined) { + * post.title; // string + * post.toString(); // Should be defined + * post.someNonExistentProperty; // TS error + * } + * + * ``` + * @param {FirestoreDataConverter | null} converter Converts objects to and + * from Firestore. Passing in `null` removes the current converter. + * @return A DocumentReference that uses the provided converter. + */ + withConverter(converter) { + return new DocumentReference(this.firestore, this._path, converter !== null && converter !== void 0 ? converter : (0, types_1.defaultConverter)()); + } +} +exports.DocumentReference = DocumentReference; +//# sourceMappingURL=document-reference.js.map + +/***/ }), + +/***/ 97475: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FieldFilterInternal = void 0; +const deepEqual = __nccwpck_require__(28206); +const filter_internal_1 = __nccwpck_require__(15702); +/** + * A field constraint for a Query where clause. + * + * @private + * @internal + * @class + */ +class FieldFilterInternal extends filter_internal_1.FilterInternal { + getFlattenedFilters() { + return [this]; + } + getFilters() { + return [this]; + } + /** + * @param serializer The Firestore serializer + * @param field The path of the property value to compare. + * @param op A comparison operation. + * @param value The value to which to compare the field for inclusion in a + * query. + */ + constructor(serializer, field, op, value) { + super(); + this.serializer = serializer; + this.field = field; + this.op = op; + this.value = value; + } + /** + * Returns whether this FieldFilter uses an equals comparison. + * + * @private + * @internal + */ + isInequalityFilter() { + switch (this.op) { + case 'GREATER_THAN': + case 'GREATER_THAN_OR_EQUAL': + case 'LESS_THAN': + case 'LESS_THAN_OR_EQUAL': + case 'NOT_EQUAL': + case 'NOT_IN': + return true; + default: + return false; + } + } + /** + * Generates the proto representation for this field filter. + * + * @private + * @internal + */ + toProto() { + if (typeof this.value === 'number' && isNaN(this.value)) { + return { + unaryFilter: { + field: { + fieldPath: this.field.formattedName, + }, + op: this.op === 'EQUAL' ? 'IS_NAN' : 'IS_NOT_NAN', + }, + }; + } + if (this.value === null) { + return { + unaryFilter: { + field: { + fieldPath: this.field.formattedName, + }, + op: this.op === 'EQUAL' ? 'IS_NULL' : 'IS_NOT_NULL', + }, + }; + } + return { + fieldFilter: { + field: { + fieldPath: this.field.formattedName, + }, + op: this.op, + value: this.serializer.encodeValue(this.value), + }, + }; + } + isEqual(other) { + return (other instanceof FieldFilterInternal && + this.field.isEqual(other.field) && + this.op === other.op && + deepEqual(this.value, other.value)); + } +} +exports.FieldFilterInternal = FieldFilterInternal; +//# sourceMappingURL=field-filter-internal.js.map + +/***/ }), + +/***/ 59339: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FieldOrder = void 0; +/** + * A Query order-by field. + * + * @private + * @internal + * @class + */ +class FieldOrder { + /** + * @param field The name of a document field (member) on which to order query + * results. + * @param direction One of 'ASCENDING' (default) or 'DESCENDING' to + * set the ordering direction to ascending or descending, respectively. + */ + constructor(field, direction = 'ASCENDING') { + this.field = field; + this.direction = direction; + } + /** + * Generates the proto representation for this field order. + * @private + * @internal + */ + toProto() { + return { + field: { + fieldPath: this.field.formattedName, + }, + direction: this.direction, + }; + } +} +exports.FieldOrder = FieldOrder; +//# sourceMappingURL=field-order.js.map + +/***/ }), + +/***/ 15702: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FilterInternal = void 0; +class FilterInternal { +} +exports.FilterInternal = FilterInternal; +//# sourceMappingURL=filter-internal.js.map + +/***/ }), + +/***/ 13823: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateQueryOrder = validateQueryOrder; +exports.validateQueryOperator = validateQueryOperator; +exports.validateDocumentReference = validateDocumentReference; +exports.validateQueryValue = validateQueryValue; +exports.coalesce = coalesce; +const validate_1 = __nccwpck_require__(33822); +const serializer_1 = __nccwpck_require__(49170); +const document_reference_1 = __nccwpck_require__(502); +const constants_1 = __nccwpck_require__(43176); +/** + * Validates the input string as a field order direction. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param op Order direction to validate. + * @throws when the direction is invalid + * @return a validated input value, which may be different from the provided + * value. + */ +function validateQueryOrder(arg, op) { + // For backwards compatibility, we support both lower and uppercase values. + op = typeof op === 'string' ? op.toLowerCase() : op; + (0, validate_1.validateEnumValue)(arg, op, Object.keys(constants_1.directionOperators), { optional: true }); + return op; +} +/** + * Validates the input string as a field comparison operator. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param op Field comparison operator to validate. + * @param fieldValue Value that is used in the filter. + * @throws when the comparison operation is invalid + * @return a validated input value, which may be different from the provided + * value. + */ +function validateQueryOperator(arg, op, fieldValue) { + // For backwards compatibility, we support both `=` and `==` for "equals". + if (op === '=') { + op = '=='; + } + (0, validate_1.validateEnumValue)(arg, op, Object.keys(constants_1.comparisonOperators)); + if (typeof fieldValue === 'number' && + isNaN(fieldValue) && + op !== '==' && + op !== '!=') { + throw new Error("Invalid query. You can only perform '==' and '!=' comparisons on NaN."); + } + if (fieldValue === null && op !== '==' && op !== '!=') { + throw new Error("Invalid query. You can only perform '==' and '!=' comparisons on Null."); + } + return op; +} +/** + * Validates that 'value' is a DocumentReference. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The argument to validate. + * @return the DocumentReference if valid + */ +function validateDocumentReference(arg, value) { + if (!(value instanceof document_reference_1.DocumentReference)) { + throw new Error((0, validate_1.invalidArgumentMessage)(arg, 'DocumentReference')); + } + return value; +} +/** + * Validates that 'value' can be used as a query value. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The argument to validate. + * @param allowUndefined Whether to allow nested properties that are `undefined`. + */ +function validateQueryValue(arg, value, allowUndefined) { + (0, serializer_1.validateUserInput)(arg, value, 'query constraint', { + allowDeletes: 'none', + allowTransforms: false, + allowUndefined, + }); +} +/** + * Returns the first non-undefined value or `undefined` if no such value exists. + * @private + * @internal + */ +function coalesce(...values) { + return values.find(value => value !== undefined); +} +//# sourceMappingURL=helpers.js.map + +/***/ }), + +/***/ 47188: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.QueryOptions = void 0; +const deepEqual = __nccwpck_require__(28206); +const path_1 = __nccwpck_require__(34908); +const types_1 = __nccwpck_require__(75371); +const helpers_1 = __nccwpck_require__(13823); +/** + * Internal class representing custom Query options. + * + * These options are immutable. Modified options can be created using `with()`. + * @private + * @internal + */ +class QueryOptions { + constructor(parentPath, collectionId, converter, allDescendants, filters, fieldOrders, startAt, endAt, limit, limitType, offset, projection, + // Whether to select all documents under `parentPath`. By default, only + // collections that match `collectionId` are selected. + kindless = false, + // Whether to require consistent documents when restarting the query. By + // default, restarting the query uses the readTime offset of the original + // query to provide consistent results. + requireConsistency = true) { + this.parentPath = parentPath; + this.collectionId = collectionId; + this.converter = converter; + this.allDescendants = allDescendants; + this.filters = filters; + this.fieldOrders = fieldOrders; + this.startAt = startAt; + this.endAt = endAt; + this.limit = limit; + this.limitType = limitType; + this.offset = offset; + this.projection = projection; + this.kindless = kindless; + this.requireConsistency = requireConsistency; + } + /** + * Returns query options for a collection group query. + * @private + * @internal + */ + static forCollectionGroupQuery(collectionId, converter = (0, types_1.defaultConverter)()) { + return new QueryOptions( + /*parentPath=*/ path_1.ResourcePath.EMPTY, collectionId, converter, + /*allDescendants=*/ true, + /*fieldFilters=*/ [], + /*fieldOrders=*/ []); + } + /** + * Returns query options for a single-collection query. + * @private + * @internal + */ + static forCollectionQuery(collectionRef, converter = (0, types_1.defaultConverter)()) { + return new QueryOptions(collectionRef.parent(), collectionRef.id, converter, + /*allDescendants=*/ false, + /*fieldFilters=*/ [], + /*fieldOrders=*/ []); + } + /** + * Returns query options for a query that fetches all descendants under the + * specified reference. + * + * @private + * @internal + */ + static forKindlessAllDescendants(parent, id, requireConsistency = true) { + let options = new QueryOptions(parent, id, (0, types_1.defaultConverter)(), + /*allDescendants=*/ true, + /*fieldFilters=*/ [], + /*fieldOrders=*/ []); + options = options.with({ + kindless: true, + requireConsistency, + }); + return options; + } + /** + * Returns the union of the current and the provided options. + * @private + * @internal + */ + with(settings) { + return new QueryOptions((0, helpers_1.coalesce)(settings.parentPath, this.parentPath), (0, helpers_1.coalesce)(settings.collectionId, this.collectionId), this.converter, (0, helpers_1.coalesce)(settings.allDescendants, this.allDescendants), (0, helpers_1.coalesce)(settings.filters, this.filters), (0, helpers_1.coalesce)(settings.fieldOrders, this.fieldOrders), (0, helpers_1.coalesce)(settings.startAt, this.startAt), (0, helpers_1.coalesce)(settings.endAt, this.endAt), (0, helpers_1.coalesce)(settings.limit, this.limit), (0, helpers_1.coalesce)(settings.limitType, this.limitType), (0, helpers_1.coalesce)(settings.offset, this.offset), (0, helpers_1.coalesce)(settings.projection, this.projection), (0, helpers_1.coalesce)(settings.kindless, this.kindless), (0, helpers_1.coalesce)(settings.requireConsistency, this.requireConsistency)); + } + withConverter(converter) { + return new QueryOptions(this.parentPath, this.collectionId, converter, this.allDescendants, this.filters, this.fieldOrders, this.startAt, this.endAt, this.limit, this.limitType, this.offset, this.projection); + } + hasFieldOrders() { + return this.fieldOrders.length > 0; + } + isEqual(other) { + if (this === other) { + return true; + } + return (other instanceof QueryOptions && + this.parentPath.isEqual(other.parentPath) && + this.filtersEqual(other.filters) && + this.collectionId === other.collectionId && + this.converter === other.converter && + this.allDescendants === other.allDescendants && + this.limit === other.limit && + this.offset === other.offset && + deepEqual(this.fieldOrders, other.fieldOrders) && + deepEqual(this.startAt, other.startAt) && + deepEqual(this.endAt, other.endAt) && + deepEqual(this.projection, other.projection) && + this.kindless === other.kindless && + this.requireConsistency === other.requireConsistency); + } + filtersEqual(other) { + if (this.filters.length !== other.length) { + return false; + } + for (let i = 0; i < other.length; i++) { + if (!this.filters[i].isEqual(other[i])) { + return false; + } + } + return true; + } +} +exports.QueryOptions = QueryOptions; +//# sourceMappingURL=query-options.js.map + +/***/ }), + +/***/ 81796: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.QuerySnapshot = void 0; +const validate_1 = __nccwpck_require__(33822); +const util_1 = __nccwpck_require__(15468); +/** + * A QuerySnapshot contains zero or more + * [QueryDocumentSnapshot]{@link QueryDocumentSnapshot} objects + * representing the results of a query. The documents can be accessed as an + * array via the [documents]{@link QuerySnapshot#documents} property + * or enumerated using the [forEach]{@link QuerySnapshot#forEach} + * method. The number of documents can be determined via the + * [empty]{@link QuerySnapshot#empty} and + * [size]{@link QuerySnapshot#size} properties. + * + * @class QuerySnapshot + */ +class QuerySnapshot { + /** + * @private + * + * @param _query The originating query. + * @param _readTime The time when this query snapshot was obtained. + * @param _size The number of documents in the result set. + * @param docs A callback returning a sorted array of documents matching + * this query + * @param changes A callback returning a sorted array of document change + * events for this snapshot. + */ + constructor(_query, _readTime, _size, docs, changes) { + this._query = _query; + this._readTime = _readTime; + this._size = _size; + this._materializedDocs = null; + this._materializedChanges = null; + this._docs = null; + this._changes = null; + this._docs = docs; + this._changes = changes; + } + /** + * The query on which you called get() or onSnapshot() in order to get this + * QuerySnapshot. + * + * @type {Query} + * @name QuerySnapshot#query + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * query.limit(10).get().then(querySnapshot => { + * console.log(`Returned first batch of results`); + * let query = querySnapshot.query; + * return query.offset(10).get(); + * }).then(() => { + * console.log(`Returned second batch of results`); + * }); + * ``` + */ + get query() { + return this._query; + } + /** + * An array of all the documents in this QuerySnapshot. + * + * @type {Array.} + * @name QuerySnapshot#docs + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * query.get().then(querySnapshot => { + * let docs = querySnapshot.docs; + * for (let doc of docs) { + * console.log(`Document found at path: ${doc.ref.path}`); + * } + * }); + * ``` + */ + get docs() { + if (this._materializedDocs) { + return this._materializedDocs; + } + this._materializedDocs = this._docs(); + this._docs = null; + return this._materializedDocs; + } + /** + * True if there are no documents in the QuerySnapshot. + * + * @type {boolean} + * @name QuerySnapshot#empty + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * query.get().then(querySnapshot => { + * if (querySnapshot.empty) { + * console.log('No documents found.'); + * } + * }); + * ``` + */ + get empty() { + return this._size === 0; + } + /** + * The number of documents in the QuerySnapshot. + * + * @type {number} + * @name QuerySnapshot#size + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * query.get().then(querySnapshot => { + * console.log(`Found ${querySnapshot.size} documents.`); + * }); + * ``` + */ + get size() { + return this._size; + } + /** + * The time this query snapshot was obtained. + * + * @type {Timestamp} + * @name QuerySnapshot#readTime + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * query.get().then((querySnapshot) => { + * let readTime = querySnapshot.readTime; + * console.log(`Query results returned at '${readTime.toDate()}'`); + * }); + * ``` + */ + get readTime() { + return this._readTime; + } + /** + * Returns an array of the documents changes since the last snapshot. If + * this is the first snapshot, all documents will be in the list as added + * changes. + * + * @return {Array.} + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * query.onSnapshot(querySnapshot => { + * let changes = querySnapshot.docChanges(); + * for (let change of changes) { + * console.log(`A document was ${change.type}.`); + * } + * }); + * ``` + */ + docChanges() { + if (this._materializedChanges) { + return this._materializedChanges; + } + this._materializedChanges = this._changes(); + this._changes = null; + return this._materializedChanges; + } + /** + * Enumerates all of the documents in the QuerySnapshot. This is a convenience + * method for running the same callback on each {@link QueryDocumentSnapshot} + * that is returned. + * + * @param {function} callback A callback to be called with a + * [QueryDocumentSnapshot]{@link QueryDocumentSnapshot} for each document in + * the snapshot. + * @param {*=} thisArg The `this` binding for the callback.. + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * query.get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Document found at path: ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + forEach(callback, thisArg) { + (0, validate_1.validateFunction)('callback', callback); + for (const doc of this.docs) { + callback.call(thisArg, doc); + } + } + /** + * Returns true if the document data in this `QuerySnapshot` is equal to the + * provided value. + * + * @param {*} other The value to compare against. + * @return {boolean} true if this `QuerySnapshot` is equal to the provided + * value. + */ + isEqual(other) { + // Since the read time is different on every query read, we explicitly + // ignore all metadata in this comparison. + if (this === other) { + return true; + } + if (!(other instanceof QuerySnapshot)) { + return false; + } + if (this._size !== other._size) { + return false; + } + if (!this._query.isEqual(other._query)) { + return false; + } + if (this._materializedDocs && !this._materializedChanges) { + // If we have only materialized the documents, we compare them first. + return ((0, util_1.isArrayEqual)(this.docs, other.docs) && + (0, util_1.isArrayEqual)(this.docChanges(), other.docChanges())); + } + // Otherwise, we compare the changes first as we expect there to be fewer. + return ((0, util_1.isArrayEqual)(this.docChanges(), other.docChanges()) && + (0, util_1.isArrayEqual)(this.docs, other.docs)); + } +} +exports.QuerySnapshot = QuerySnapshot; +//# sourceMappingURL=query-snapshot.js.map + +/***/ }), + +/***/ 63379: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.QueryUtil = void 0; +const stream_1 = __nccwpck_require__(12781); +const timestamp_1 = __nccwpck_require__(29061); +const document_1 = __nccwpck_require__(98912); +const util_1 = __nccwpck_require__(15468); +const document_change_1 = __nccwpck_require__(62270); +const query_profile_1 = __nccwpck_require__(15453); +const logger_1 = __nccwpck_require__(42718); +const types_1 = __nccwpck_require__(66155); +const constants_1 = __nccwpck_require__(43176); +const trace_util_1 = __nccwpck_require__(2693); +class QueryUtil { + constructor( + /** @private */ + _firestore, + /** @private */ + _queryOptions, + /** @private */ + _serializer) { + this._firestore = _firestore; + this._queryOptions = _queryOptions; + this._serializer = _serializer; + } + _getResponse(query, transactionOrReadTime, retryWithCursor = true, explainOptions) { + // Capture the error stack to preserve stack tracing across async calls. + const stack = Error().stack; + return new Promise((resolve, reject) => { + const docs = []; + const output = {}; + this._stream(query, transactionOrReadTime, retryWithCursor, explainOptions) + .on('error', err => { + reject((0, util_1.wrapError)(err, stack)); + }) + .on('data', (data) => { + if (data.transaction) { + output.transaction = data.transaction; + } + if (data.readTime) { + output.readTime = data.readTime; + } + if (data.explainMetrics) { + output.explainMetrics = data.explainMetrics; + } + if (data.document) { + docs.push(data.document); + } + }) + .on('end', () => { + if (this._queryOptions.limitType === types_1.LimitType.Last) { + // The results for limitToLast queries need to be flipped since + // we reversed the ordering constraints before sending the query + // to the backend. + docs.reverse(); + } + // Only return a snapshot when we have a readTime + // explain queries with analyze !== true will return no documents and no read time + const result = output.readTime + ? query._createSnapshot(output.readTime, docs.length, () => docs, () => { + const changes = []; + for (let i = 0; i < docs.length; ++i) { + changes.push(new document_change_1.DocumentChange('added', docs[i], -1, i)); + } + return changes; + }) + : undefined; + resolve({ + transaction: output.transaction, + explainMetrics: output.explainMetrics, + result, + }); + }); + }); + } + // This method exists solely to enable unit tests to mock it. + _isPermanentRpcError(err, methodName) { + return (0, util_1.isPermanentRpcError)(err, methodName); + } + _hasRetryTimedOut(methodName, startTime) { + const totalTimeout = (0, util_1.getTotalTimeout)(methodName); + if (totalTimeout === 0) { + return false; + } + return Date.now() - startTime >= totalTimeout; + } + stream(query) { + if (this._queryOptions.limitType === types_1.LimitType.Last) { + throw new Error('Query results for queries that include limitToLast() ' + + 'constraints cannot be streamed. Use Query.get() instead.'); + } + const responseStream = this._stream(query); + const transform = new stream_1.Transform({ + objectMode: true, + transform(chunk, encoding, callback) { + callback(undefined, chunk.document); + }, + }); + responseStream.pipe(transform); + responseStream.on('error', e => transform.destroy(e)); + return transform; + } + _stream(query, transactionOrReadTime, retryWithCursor = true, explainOptions) { + const tag = (0, util_1.requestTag)(); + const startTime = Date.now(); + const isExplain = explainOptions !== undefined; + let lastReceivedDocument = null; + let backendStream; + const stream = new stream_1.Transform({ + objectMode: true, + transform: (proto, enc, callback) => { + var _a; + if (proto === constants_1.NOOP_MESSAGE) { + callback(undefined); + return; + } + const output = {}; + // Proto comes with zero-length buffer by default + if ((_a = proto.transaction) === null || _a === void 0 ? void 0 : _a.length) { + output.transaction = proto.transaction; + } + if (proto.readTime) { + output.readTime = timestamp_1.Timestamp.fromProto(proto.readTime); + } + if (proto.document) { + const document = this._firestore.snapshot_(proto.document, proto.readTime); + const finalDoc = new document_1.DocumentSnapshotBuilder(document.ref.withConverter(this._queryOptions.converter)); + // Recreate the QueryDocumentSnapshot with the DocumentReference + // containing the original converter. + finalDoc.fieldsProto = document._fieldsProto; + finalDoc.readTime = document.readTime; + finalDoc.createTime = document.createTime; + finalDoc.updateTime = document.updateTime; + lastReceivedDocument = finalDoc.build(); + output.document = lastReceivedDocument; + } + if (proto.explainMetrics) { + output.explainMetrics = query_profile_1.ExplainMetrics._fromProto(proto.explainMetrics, this._serializer); + } + callback(undefined, output); + if (proto.done) { + (0, logger_1.logger)('QueryUtil._stream', tag, 'Trigger Logical Termination.'); + backendStream.unpipe(stream); + backendStream.resume(); + backendStream.end(); + stream.end(); + } + }, + }); + this._firestore + .initializeIfNeeded(tag) + .then(async () => { + // `toProto()` might throw an exception. We rely on the behavior of an + // async function to convert this exception into the rejected Promise we + // catch below. + let request = query.toProto(transactionOrReadTime, explainOptions); + let isRetryRequestWithCursor = false; + let streamActive; + do { + streamActive = new util_1.Deferred(); + const methodName = 'runQuery'; + this._firestore._traceUtil + .currentSpan() + .addEvent(trace_util_1.SPAN_NAME_RUN_QUERY, { + [trace_util_1.ATTRIBUTE_KEY_IS_TRANSACTIONAL]: !!request.transaction, + [trace_util_1.ATTRIBUTE_KEY_IS_RETRY_WITH_CURSOR]: isRetryRequestWithCursor, + }); + backendStream = await this._firestore.requestStream(methodName, + /* bidirectional= */ false, request, tag); + backendStream.on('error', err => { + backendStream.unpipe(stream); + // If a non-transactional query failed, attempt to restart. + // Transactional queries are retried via the transaction runner. + // Explain queries are not retried with a cursor. That would produce + // incorrect/partial profiling results. + if (!isExplain && + !transactionOrReadTime && + !this._isPermanentRpcError(err, methodName)) { + (0, logger_1.logger)('QueryUtil._stream', tag, 'Query failed with retryable stream error:', err); + this._firestore._traceUtil + .currentSpan() + .addEvent(`${trace_util_1.SPAN_NAME_RUN_QUERY}: Retryable Error.`, { + 'error.message': err.message, + }); + // Enqueue a "no-op" write into the stream and wait for it to be + // read by the downstream consumer. This ensures that all enqueued + // results in the stream are consumed, which will give us an accurate + // value for `lastReceivedDocument`. + stream.write(constants_1.NOOP_MESSAGE, () => { + if (this._hasRetryTimedOut(methodName, startTime)) { + (0, logger_1.logger)('QueryUtil._stream', tag, 'Query failed with retryable stream error but the total retry timeout has exceeded.'); + stream.destroy(err); + streamActive.resolve(/* active= */ false); + } + else if (lastReceivedDocument && retryWithCursor) { + (0, logger_1.logger)('Query._stream', tag, 'Query failed with retryable stream error and progress was made receiving ' + + 'documents, so the stream is being retried.'); + isRetryRequestWithCursor = true; + // Restart the query but use the last document we received as + // the query cursor. Note that we do not use backoff here. The + // call to `requestStream()` will backoff should the restart + // fail before delivering any results. + if (this._queryOptions.requireConsistency) { + request = query + .startAfter(lastReceivedDocument) + .toProto(lastReceivedDocument.readTime); + } + else { + request = query.startAfter(lastReceivedDocument).toProto(); + } + // Set lastReceivedDocument to null before each retry attempt to ensure the retry makes progress + lastReceivedDocument = null; + streamActive.resolve(/* active= */ true); + } + else { + (0, logger_1.logger)('QueryUtil._stream', tag, `Query failed with retryable stream error however either retryWithCursor="${retryWithCursor}", or ` + + 'no progress was made receiving documents, so the stream is being closed.'); + stream.destroy(err); + streamActive.resolve(/* active= */ false); + } + }); + } + else { + (0, logger_1.logger)('QueryUtil._stream', tag, 'Query failed with stream error:', err); + this._firestore._traceUtil + .currentSpan() + .addEvent(`${trace_util_1.SPAN_NAME_RUN_QUERY}: Error.`, { + 'error.message': err.message, + }); + stream.destroy(err); + streamActive.resolve(/* active= */ false); + } + }); + backendStream.on('end', () => { + streamActive.resolve(/* active= */ false); + }); + backendStream.resume(); + backendStream.pipe(stream); + } while (await streamActive.promise); + }) + .catch(e => stream.destroy(e)); + return stream; + } +} +exports.QueryUtil = QueryUtil; +//# sourceMappingURL=query-util.js.map + +/***/ }), + +/***/ 38621: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Query = void 0; +const stream_1 = __nccwpck_require__(12781); +const query_util_1 = __nccwpck_require__(63379); +const index_1 = __nccwpck_require__(32210); +const field_order_1 = __nccwpck_require__(59339); +const field_filter_internal_1 = __nccwpck_require__(97475); +const composite_filter_internal_1 = __nccwpck_require__(98009); +const constants_1 = __nccwpck_require__(43176); +const document_reference_1 = __nccwpck_require__(502); +const query_snapshot_1 = __nccwpck_require__(81796); +const serializer_1 = __nccwpck_require__(49170); +const query_profile_1 = __nccwpck_require__(15453); +const filter_1 = __nccwpck_require__(47864); +const path_1 = __nccwpck_require__(34908); +const helpers_1 = __nccwpck_require__(13823); +const validate_1 = __nccwpck_require__(33822); +const types_1 = __nccwpck_require__(66155); +const aggregate_query_1 = __nccwpck_require__(8763); +const vector_query_1 = __nccwpck_require__(71943); +const order_1 = __nccwpck_require__(66849); +const types_2 = __nccwpck_require__(75371); +const trace_util_1 = __nccwpck_require__(2693); +/** + * A Query refers to a query which you can read or stream from. You can also + * construct refined Query objects by adding filters and ordering. + * + * @class Query + */ +class Query { + /** + * @internal + * @private + * + * @param _firestore The Firestore Database client. + * @param _queryOptions Options that define the query. + */ + constructor( + /** + * @internal + * @private + **/ + _firestore, + /** + * @internal + * @private + **/ + _queryOptions) { + this._firestore = _firestore; + this._queryOptions = _queryOptions; + this._serializer = new serializer_1.Serializer(_firestore); + this._allowUndefined = + !!this._firestore._settings.ignoreUndefinedProperties; + this._queryUtil = new query_util_1.QueryUtil(_firestore, _queryOptions, this._serializer); + } + /** + * Extracts field values from the DocumentSnapshot based on the provided + * field order. + * + * @private + * @internal + * @param documentSnapshot The document to extract the fields from. + * @param fieldOrders The field order that defines what fields we should + * extract. + * @return {Array.<*>} The field values to use. + */ + static _extractFieldValues(documentSnapshot, fieldOrders) { + const fieldValues = []; + for (const fieldOrder of fieldOrders) { + if (index_1.FieldPath.documentId().isEqual(fieldOrder.field)) { + fieldValues.push(documentSnapshot.ref); + } + else { + const fieldValue = documentSnapshot.get(fieldOrder.field); + if (fieldValue === undefined) { + throw new Error(`Field "${fieldOrder.field}" is missing in the provided DocumentSnapshot. ` + + 'Please provide a document that contains values for all specified ' + + 'orderBy() and where() constraints.'); + } + else { + fieldValues.push(fieldValue); + } + } + } + return fieldValues; + } + /** + * The [Firestore]{@link Firestore} instance for the Firestore + * database (useful for performing transactions, etc.). + * + * @type {Firestore} + * @name Query#firestore + * @readonly + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * + * collectionRef.add({foo: 'bar'}).then(documentReference => { + * let firestore = documentReference.firestore; + * console.log(`Root location for document is ${firestore.formattedName}`); + * }); + * ``` + */ + get firestore() { + return this._firestore; + } + where(fieldPathOrFilter, opStr, value) { + let filter; + if (fieldPathOrFilter instanceof index_1.Filter) { + filter = fieldPathOrFilter; + } + else { + filter = index_1.Filter.where(fieldPathOrFilter, opStr, value); + } + if (this._queryOptions.startAt || this._queryOptions.endAt) { + throw new Error('Cannot specify a where() filter after calling startAt(), ' + + 'startAfter(), endBefore() or endAt().'); + } + const parsedFilter = this._parseFilter(filter); + if (parsedFilter.getFilters().length === 0) { + // Return the existing query if not adding any more filters (e.g. an empty composite filter). + return this; + } + const options = this._queryOptions.with({ + filters: this._queryOptions.filters.concat(parsedFilter), + }); + return new Query(this._firestore, options); + } + /** + * @internal + * @private + */ + _parseFilter(filter) { + if (filter instanceof filter_1.UnaryFilter) { + return this._parseFieldFilter(filter); + } + return this._parseCompositeFilter(filter); + } + /** + * @internal + * @private + */ + _parseFieldFilter(fieldFilterData) { + let value = fieldFilterData._getValue(); + let operator = fieldFilterData._getOperator(); + const fieldPath = fieldFilterData._getField(); + (0, path_1.validateFieldPath)('fieldPath', fieldPath); + operator = (0, helpers_1.validateQueryOperator)('opStr', operator, value); + (0, helpers_1.validateQueryValue)('value', value, this._allowUndefined); + const path = index_1.FieldPath.fromArgument(fieldPath); + if (index_1.FieldPath.documentId().isEqual(path)) { + if (operator === 'array-contains' || operator === 'array-contains-any') { + throw new Error(`Invalid Query. You can't perform '${operator}' ` + + 'queries on FieldPath.documentId().'); + } + else if (operator === 'in' || operator === 'not-in') { + if (!Array.isArray(value) || value.length === 0) { + throw new Error(`Invalid Query. A non-empty array is required for '${operator}' filters.`); + } + value = value.map(el => this.validateReference(el)); + } + else { + value = this.validateReference(value); + } + } + return new field_filter_internal_1.FieldFilterInternal(this._serializer, path, constants_1.comparisonOperators[operator], value); + } + /** + * @internal + * @private + */ + _parseCompositeFilter(compositeFilterData) { + const parsedFilters = compositeFilterData + ._getFilters() + .map(filter => this._parseFilter(filter)) + .filter(parsedFilter => parsedFilter.getFilters().length > 0); + // For composite filters containing 1 filter, return the only filter. + // For example: AND(FieldFilter1) == FieldFilter1 + if (parsedFilters.length === 1) { + return parsedFilters[0]; + } + return new composite_filter_internal_1.CompositeFilterInternal(parsedFilters, compositeFilterData._getOperator() === 'AND' ? 'AND' : 'OR'); + } + /** + * Creates and returns a new [Query]{@link Query} instance that applies a + * field mask to the result and returns only the specified subset of fields. + * You can specify a list of field paths to return, or use an empty list to + * only return the references of matching documents. + * + * Queries that contain field masks cannot be listened to via `onSnapshot()` + * listeners. + * + * This function returns a new (immutable) instance of the Query (rather than + * modify the existing instance) to impose the field mask. + * + * @param {...(string|FieldPath)} fieldPaths The field paths to return. + * @returns {Query} The created Query. + * + * @example + * ``` + * let collectionRef = firestore.collection('col'); + * let documentRef = collectionRef.doc('doc'); + * + * return documentRef.set({x:10, y:5}).then(() => { + * return collectionRef.where('x', '>', 5).select('y').get(); + * }).then((res) => { + * console.log(`y is ${res.docs[0].get('y')}.`); + * }); + * ``` + */ + select(...fieldPaths) { + const fields = []; + if (fieldPaths.length === 0) { + fields.push({ fieldPath: index_1.FieldPath.documentId().formattedName }); + } + else { + for (let i = 0; i < fieldPaths.length; ++i) { + (0, path_1.validateFieldPath)(i, fieldPaths[i]); + fields.push({ + fieldPath: index_1.FieldPath.fromArgument(fieldPaths[i]).formattedName, + }); + } + } + // By specifying a field mask, the query result no longer conforms to type + // `T`. We there return `Query`; + const options = this._queryOptions.with({ + projection: { fields }, + }); + return new Query(this._firestore, options); + } + /** + * Creates and returns a new [Query]{@link Query} that's additionally sorted + * by the specified field, optionally in descending order instead of + * ascending. + * + * This function returns a new (immutable) instance of the Query (rather than + * modify the existing instance) to impose the field mask. + * + * @param {string|FieldPath} fieldPath The field to sort by. + * @param {string=} directionStr Optional direction to sort by ('asc' or + * 'desc'). If not specified, order will be ascending. + * @returns {Query} The created Query. + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '>', 42); + * + * query.orderBy('foo', 'desc').get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + orderBy(fieldPath, directionStr) { + (0, path_1.validateFieldPath)('fieldPath', fieldPath); + directionStr = (0, helpers_1.validateQueryOrder)('directionStr', directionStr); + if (this._queryOptions.startAt || this._queryOptions.endAt) { + throw new Error('Cannot specify an orderBy() constraint after calling ' + + 'startAt(), startAfter(), endBefore() or endAt().'); + } + const newOrder = new field_order_1.FieldOrder(index_1.FieldPath.fromArgument(fieldPath), constants_1.directionOperators[directionStr || 'asc']); + const options = this._queryOptions.with({ + fieldOrders: this._queryOptions.fieldOrders.concat(newOrder), + }); + return new Query(this._firestore, options); + } + /** + * Creates and returns a new [Query]{@link Query} that only returns the + * first matching documents. + * + * This function returns a new (immutable) instance of the Query (rather than + * modify the existing instance) to impose the limit. + * + * @param {number} limit The maximum number of items to return. + * @returns {Query} The created Query. + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '>', 42); + * + * query.limit(1).get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + limit(limit) { + (0, validate_1.validateInteger)('limit', limit); + const options = this._queryOptions.with({ + limit, + limitType: types_1.LimitType.First, + }); + return new Query(this._firestore, options); + } + /** + * Creates and returns a new [Query]{@link Query} that only returns the + * last matching documents. + * + * You must specify at least one orderBy clause for limitToLast queries, + * otherwise an exception will be thrown during execution. + * + * Results for limitToLast queries cannot be streamed via the `stream()` API. + * + * @param limit The maximum number of items to return. + * @return The created Query. + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '>', 42); + * + * query.limitToLast(1).get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Last matching document is ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + limitToLast(limit) { + (0, validate_1.validateInteger)('limitToLast', limit); + const options = this._queryOptions.with({ limit, limitType: types_1.LimitType.Last }); + return new Query(this._firestore, options); + } + /** + * Specifies the offset of the returned results. + * + * This function returns a new (immutable) instance of the + * [Query]{@link Query} (rather than modify the existing instance) + * to impose the offset. + * + * @param {number} offset The offset to apply to the Query results + * @returns {Query} The created Query. + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '>', 42); + * + * query.limit(10).offset(20).get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + offset(offset) { + (0, validate_1.validateInteger)('offset', offset); + const options = this._queryOptions.with({ offset }); + return new Query(this._firestore, options); + } + /** + * Returns a query that counts the documents in the result set of this + * query. + * + * The returned query, when executed, counts the documents in the result set + * of this query without actually downloading the documents. + * + * Using the returned query to count the documents is efficient because only + * the final count, not the documents' data, is downloaded. The returned + * query can count the documents in cases where the result set is + * prohibitively large to download entirely (thousands of documents). + * + * @return a query that counts the documents in the result set of this + * query. The count can be retrieved from `snapshot.data().count`, where + * `snapshot` is the `AggregateQuerySnapshot` resulting from running the + * returned query. + */ + count() { + return this.aggregate({ + count: index_1.AggregateField.count(), + }); + } + /** + * Returns a query that can perform the given aggregations. + * + * The returned query, when executed, calculates the specified aggregations + * over the documents in the result set of this query without actually + * downloading the documents. + * + * Using the returned query to perform aggregations is efficient because only + * the final aggregation values, not the documents' data, is downloaded. The + * returned query can perform aggregations of the documents count the + * documents in cases where the result set is prohibitively large to download + * entirely (thousands of documents). + * + * @param aggregateSpec An `AggregateSpec` object that specifies the aggregates + * to perform over the result set. The AggregateSpec specifies aliases for each + * aggregate, which can be used to retrieve the aggregate result. + * @example + * ```typescript + * const aggregateQuery = col.aggregate(query, { + * countOfDocs: count(), + * totalHours: sum('hours'), + * averageScore: average('score') + * }); + * + * const aggregateSnapshot = await aggregateQuery.get(); + * const countOfDocs: number = aggregateSnapshot.data().countOfDocs; + * const totalHours: number = aggregateSnapshot.data().totalHours; + * const averageScore: number | null = aggregateSnapshot.data().averageScore; + * ``` + */ + aggregate(aggregateSpec) { + return new aggregate_query_1.AggregateQuery(this, aggregateSpec); + } + findNearest(vectorFieldOrOptions, queryVector, options) { + if (typeof vectorFieldOrOptions === 'string' || + vectorFieldOrOptions instanceof index_1.FieldPath) { + const vqOptions = { + distanceMeasure: options.distanceMeasure, + limit: options.limit, + queryVector: queryVector, + vectorField: vectorFieldOrOptions, + }; + return this._findNearest(vqOptions); + } + else { + return this._findNearest(vectorFieldOrOptions); + } + } + _findNearest(options) { + (0, path_1.validateFieldPath)('vectorField', options.vectorField); + if (options.limit <= 0) { + throw (0, validate_1.invalidArgumentMessage)('limit', 'positive limit number'); + } + if ((Array.isArray(options.queryVector) + ? options.queryVector.length + : options.queryVector.toArray().length) === 0) { + throw (0, validate_1.invalidArgumentMessage)('queryVector', 'vector size must be larger than 0'); + } + return new vector_query_1.VectorQuery(this, options); + } + /** + * Returns true if this `Query` is equal to the provided value. + * + * @param {*} other The value to compare against. + * @return {boolean} true if this `Query` is equal to the provided value. + */ + isEqual(other) { + if (this === other) { + return true; + } + return (other instanceof Query && this._queryOptions.isEqual(other._queryOptions)); + } + /** + * Returns the sorted array of inequality filter fields used in this query. + * + * @return An array of inequality filter fields sorted lexicographically by FieldPath. + */ + getInequalityFilterFields() { + const inequalityFields = []; + for (const filter of this._queryOptions.filters) { + for (const subFilter of filter.getFlattenedFilters()) { + if (subFilter.isInequalityFilter()) { + inequalityFields.push(subFilter.field); + } + } + } + return inequalityFields.sort((a, b) => a.compareTo(b)); + } + /** + * Computes the backend ordering semantics for DocumentSnapshot cursors. + * + * @private + * @internal + * @param cursorValuesOrDocumentSnapshot The snapshot of the document or the + * set of field values to use as the boundary. + * @returns The implicit ordering semantics. + */ + createImplicitOrderBy(cursorValuesOrDocumentSnapshot) { + // Add an implicit orderBy if the only cursor value is a DocumentSnapshot. + if (cursorValuesOrDocumentSnapshot.length !== 1 || + !(cursorValuesOrDocumentSnapshot[0] instanceof index_1.DocumentSnapshot)) { + return this._queryOptions.fieldOrders; + } + const fieldOrders = this._queryOptions.fieldOrders.slice(); + const fieldsNormalized = new Set([ + ...fieldOrders.map(item => item.field.toString()), + ]); + /** The order of the implicit ordering always matches the last explicit order by. */ + const lastDirection = fieldOrders.length === 0 + ? constants_1.directionOperators.ASC + : fieldOrders[fieldOrders.length - 1].direction; + /** + * Any inequality fields not explicitly ordered should be implicitly ordered in a + * lexicographical order. When there are multiple inequality filters on the same field, the + * field should be added only once. + * Note: getInequalityFilterFields function sorts the key field before + * other fields. However, we want the key field to be sorted last. + */ + const inequalityFields = this.getInequalityFilterFields(); + for (const field of inequalityFields) { + if (!fieldsNormalized.has(field.toString()) && + !field.isEqual(index_1.FieldPath.documentId())) { + fieldOrders.push(new field_order_1.FieldOrder(field, lastDirection)); + fieldsNormalized.add(field.toString()); + } + } + // Add the document key field to the last if it is not explicitly ordered. + if (!fieldsNormalized.has(index_1.FieldPath.documentId().toString())) { + fieldOrders.push(new field_order_1.FieldOrder(index_1.FieldPath.documentId(), lastDirection)); + } + return fieldOrders; + } + /** + * Builds a Firestore 'Position' proto message. + * + * @private + * @internal + * @param {Array.} fieldOrders The field orders to use for this + * cursor. + * @param {Array.} cursorValuesOrDocumentSnapshot The + * snapshot of the document or the set of field values to use as the boundary. + * @param before Whether the query boundary lies just before or after the + * provided data. + * @returns {Object} The proto message. + */ + createCursor(fieldOrders, cursorValuesOrDocumentSnapshot, before) { + let fieldValues; + if (cursorValuesOrDocumentSnapshot.length === 1 && + cursorValuesOrDocumentSnapshot[0] instanceof index_1.DocumentSnapshot) { + fieldValues = Query._extractFieldValues(cursorValuesOrDocumentSnapshot[0], fieldOrders); + } + else { + fieldValues = cursorValuesOrDocumentSnapshot; + } + if (fieldValues.length > fieldOrders.length) { + throw new Error('Too many cursor values specified. The specified ' + + 'values must match the orderBy() constraints of the query.'); + } + const options = { values: [], before }; + for (let i = 0; i < fieldValues.length; ++i) { + let fieldValue = fieldValues[i]; + if (index_1.FieldPath.documentId().isEqual(fieldOrders[i].field)) { + fieldValue = this.validateReference(fieldValue); + } + (0, helpers_1.validateQueryValue)(i, fieldValue, this._allowUndefined); + options.values.push(this._serializer.encodeValue(fieldValue)); + } + return options; + } + /** + * Validates that a value used with FieldValue.documentId() is either a + * string or a DocumentReference that is part of the query`s result set. + * Throws a validation error or returns a DocumentReference that can + * directly be used in the Query. + * + * @param val The value to validate. + * @throws If the value cannot be used for this query. + * @return If valid, returns a DocumentReference that can be used with the + * query. + * @private + * @internal + */ + validateReference(val) { + const basePath = this._queryOptions.allDescendants + ? this._queryOptions.parentPath + : this._queryOptions.parentPath.append(this._queryOptions.collectionId); + let reference; + if (typeof val === 'string') { + const path = basePath.append(val); + if (this._queryOptions.allDescendants) { + if (!path.isDocument) { + throw new Error('When querying a collection group and ordering by ' + + 'FieldPath.documentId(), the corresponding value must result in ' + + `a valid document path, but '${val}' is not because it ` + + 'contains an odd number of segments.'); + } + } + else if (val.indexOf('/') !== -1) { + throw new Error('When querying a collection and ordering by FieldPath.documentId(), ' + + `the corresponding value must be a plain document ID, but '${val}' ` + + 'contains a slash.'); + } + reference = new document_reference_1.DocumentReference(this._firestore, basePath.append(val), this._queryOptions.converter); + } + else if (val instanceof document_reference_1.DocumentReference) { + reference = val; + if (!basePath.isPrefixOf(reference._path)) { + throw new Error(`"${reference.path}" is not part of the query result set and ` + + 'cannot be used as a query boundary.'); + } + } + else { + throw new Error('The corresponding value for FieldPath.documentId() must be a ' + + `string or a DocumentReference, but was "${val}".`); + } + if (!this._queryOptions.allDescendants && + reference._path.parent().compareTo(basePath) !== 0) { + throw new Error('Only a direct child can be used as a query boundary. ' + + `Found: "${reference.path}".`); + } + return reference; + } + /** + * Creates and returns a new [Query]{@link Query} that starts at the provided + * set of field values relative to the order of the query. The order of the + * provided values must match the order of the order by clauses of the query. + * + * @param {...*|DocumentSnapshot} fieldValuesOrDocumentSnapshot The snapshot + * of the document the query results should start at or the field values to + * start this query at, in order of the query's order by. + * @returns {Query} A query with the new starting point. + * + * @example + * ``` + * let query = firestore.collection('col'); + * + * query.orderBy('foo').startAt(42).get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + startAt(...fieldValuesOrDocumentSnapshot) { + (0, validate_1.validateMinNumberOfArguments)('Query.startAt', fieldValuesOrDocumentSnapshot, 1); + const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); + const startAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, true); + const options = this._queryOptions.with({ fieldOrders, startAt }); + return new Query(this._firestore, options); + } + /** + * Creates and returns a new [Query]{@link Query} that starts after the + * provided set of field values relative to the order of the query. The order + * of the provided values must match the order of the order by clauses of the + * query. + * + * @param {...*|DocumentSnapshot} fieldValuesOrDocumentSnapshot The snapshot + * of the document the query results should start after or the field values to + * start this query after, in order of the query's order by. + * @returns {Query} A query with the new starting point. + * + * @example + * ``` + * let query = firestore.collection('col'); + * + * query.orderBy('foo').startAfter(42).get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + startAfter(...fieldValuesOrDocumentSnapshot) { + (0, validate_1.validateMinNumberOfArguments)('Query.startAfter', fieldValuesOrDocumentSnapshot, 1); + const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); + const startAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, false); + const options = this._queryOptions.with({ fieldOrders, startAt }); + return new Query(this._firestore, options); + } + /** + * Creates and returns a new [Query]{@link Query} that ends before the set of + * field values relative to the order of the query. The order of the provided + * values must match the order of the order by clauses of the query. + * + * @param {...*|DocumentSnapshot} fieldValuesOrDocumentSnapshot The snapshot + * of the document the query results should end before or the field values to + * end this query before, in order of the query's order by. + * @returns {Query} A query with the new ending point. + * + * @example + * ``` + * let query = firestore.collection('col'); + * + * query.orderBy('foo').endBefore(42).get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + endBefore(...fieldValuesOrDocumentSnapshot) { + (0, validate_1.validateMinNumberOfArguments)('Query.endBefore', fieldValuesOrDocumentSnapshot, 1); + const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); + const endAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, true); + const options = this._queryOptions.with({ fieldOrders, endAt }); + return new Query(this._firestore, options); + } + /** + * Creates and returns a new [Query]{@link Query} that ends at the provided + * set of field values relative to the order of the query. The order of the + * provided values must match the order of the order by clauses of the query. + * + * @param {...*|DocumentSnapshot} fieldValuesOrDocumentSnapshot The snapshot + * of the document the query results should end at or the field values to end + * this query at, in order of the query's order by. + * @returns {Query} A query with the new ending point. + * + * @example + * ``` + * let query = firestore.collection('col'); + * + * query.orderBy('foo').endAt(42).get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + endAt(...fieldValuesOrDocumentSnapshot) { + (0, validate_1.validateMinNumberOfArguments)('Query.endAt', fieldValuesOrDocumentSnapshot, 1); + const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); + const endAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, false); + const options = this._queryOptions.with({ fieldOrders, endAt }); + return new Query(this._firestore, options); + } + /** + * Executes the query and returns the results as a + * [QuerySnapshot]{@link QuerySnapshot}. + * + * @returns {Promise.} A Promise that resolves with the results + * of the Query. + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * query.get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Found document at ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + async get() { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_QUERY_GET, async () => { + const { result } = await this._get(); + return result; + }); + } + /** + * Plans and optionally executes this query. Returns a Promise that will be + * resolved with the planner information, statistics from the query execution (if any), + * and the query results (if any). + * + * @return A Promise that will be resolved with the planner information, statistics + * from the query execution (if any), and the query results (if any). + */ + async explain(options) { + if (options === undefined) { + options = {}; + } + const { result, explainMetrics } = await this._getResponse(undefined, options); + if (!explainMetrics) { + throw new Error('No explain results'); + } + return new query_profile_1.ExplainResults(explainMetrics, result || null); + } + /** + * Internal get() method that accepts an optional transaction options, and + * returns a query snapshot with transaction and explain metadata. + * + * @private + * @internal + * @param transactionOrReadTime A transaction ID, options to start a new + * transaction, or timestamp to use as read time. + */ + async _get(transactionOrReadTime) { + const result = await this._getResponse(transactionOrReadTime); + if (!result.result) { + throw new Error('No QuerySnapshot result'); + } + return result; + } + _getResponse(transactionOrReadTime, explainOptions) { + return this._queryUtil._getResponse(this, transactionOrReadTime, true, explainOptions); + } + /** + * Executes the query and streams the results as + * [QueryDocumentSnapshots]{@link QueryDocumentSnapshot}. + * + * @returns {Stream.} A stream of + * QueryDocumentSnapshots. + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * let count = 0; + * + * query.stream().on('data', (documentSnapshot) => { + * console.log(`Found document with name '${documentSnapshot.id}'`); + * ++count; + * }).on('end', () => { + * console.log(`Total count is ${count}`); + * }); + * ``` + */ + stream() { + return this._queryUtil.stream(this); + } + /** + * Executes the query and streams the results as the following object: + * {document?: DocumentSnapshot, metrics?: ExplainMetrics} + * + * The stream surfaces documents one at a time as they are received from the + * server, and at the end, it will surface the metrics associated with + * executing the query. + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * let count = 0; + * + * query.explainStream({analyze: true}).on('data', (data) => { + * if (data.document) { + * // Use data.document which is a DocumentSnapshot instance. + * console.log(`Found document with name '${data.document.id}'`); + * ++count; + * } + * if (data.metrics) { + * // Use data.metrics which is an ExplainMetrics instance. + * } + * }).on('end', () => { + * console.log(`Received ${count} documents.`); + * }); + * ``` + */ + explainStream(explainOptions) { + if (explainOptions === undefined) { + explainOptions = {}; + } + if (this._queryOptions.limitType === types_1.LimitType.Last) { + throw new Error('Query results for queries that include limitToLast() ' + + 'constraints cannot be streamed. Use Query.explain() instead.'); + } + const responseStream = this._stream(undefined, explainOptions); + const transform = new stream_1.Transform({ + objectMode: true, + transform(chunk, encoding, callback) { + if (chunk.document || chunk.explainMetrics) { + callback(undefined, { + document: chunk.document, + metrics: chunk.explainMetrics, + }); + } + }, + }); + responseStream.pipe(transform); + responseStream.on('error', e => transform.destroy(e)); + return transform; + } + /** + * Converts a QueryCursor to its proto representation. + * + * @param cursor The original cursor value + * @private + * @internal + */ + toCursor(cursor) { + if (cursor) { + return cursor.before + ? { before: true, values: cursor.values } + : { values: cursor.values }; + } + return undefined; + } + /** + * Internal method for serializing a query to its RunQuery proto + * representation with an optional transaction id or read time. + * + * @param transactionOrReadTime A transaction ID, options to start a new + * transaction, or timestamp to use as read time. + * @param explainOptions Options to use for explaining the query (if any). + * @private + * @internal + * @returns Serialized JSON for the query. + */ + toProto(transactionOrReadTime, explainOptions) { + const projectId = this.firestore.projectId; + const databaseId = this.firestore.databaseId; + const parentPath = this._queryOptions.parentPath.toQualifiedResourcePath(projectId, databaseId); + const structuredQuery = this.toStructuredQuery(); + // For limitToLast queries, the structured query has to be translated to a version with + // reversed ordered, and flipped startAt/endAt to work properly. + if (this._queryOptions.limitType === types_1.LimitType.Last) { + if (!this._queryOptions.hasFieldOrders()) { + throw new Error('limitToLast() queries require specifying at least one orderBy() clause.'); + } + structuredQuery.orderBy = this._queryOptions.fieldOrders.map(order => { + // Flip the orderBy directions since we want the last results + const dir = order.direction === 'DESCENDING' ? 'ASCENDING' : 'DESCENDING'; + return new field_order_1.FieldOrder(order.field, dir).toProto(); + }); + // Swap the cursors to match the now-flipped query ordering. + structuredQuery.startAt = this._queryOptions.endAt + ? this.toCursor({ + values: this._queryOptions.endAt.values, + before: !this._queryOptions.endAt.before, + }) + : undefined; + structuredQuery.endAt = this._queryOptions.startAt + ? this.toCursor({ + values: this._queryOptions.startAt.values, + before: !this._queryOptions.startAt.before, + }) + : undefined; + } + const runQueryRequest = { + parent: parentPath.formattedName, + structuredQuery, + }; + if (transactionOrReadTime instanceof Uint8Array) { + runQueryRequest.transaction = transactionOrReadTime; + } + else if (transactionOrReadTime instanceof index_1.Timestamp) { + runQueryRequest.readTime = transactionOrReadTime.toProto().timestampValue; + } + else if (transactionOrReadTime) { + runQueryRequest.newTransaction = transactionOrReadTime; + } + if (explainOptions) { + runQueryRequest.explainOptions = explainOptions; + } + return runQueryRequest; + } + /** + * Converts current Query to an IBundledQuery. + * + * @private + * @internal + */ + _toBundledQuery() { + const projectId = this.firestore.projectId; + const databaseId = this.firestore.databaseId; + const parentPath = this._queryOptions.parentPath.toQualifiedResourcePath(projectId, databaseId); + const structuredQuery = this.toStructuredQuery(); + const bundledQuery = { + parent: parentPath.formattedName, + structuredQuery, + }; + if (this._queryOptions.limitType === types_1.LimitType.First) { + bundledQuery.limitType = 'FIRST'; + } + else if (this._queryOptions.limitType === types_1.LimitType.Last) { + bundledQuery.limitType = 'LAST'; + } + return bundledQuery; + } + toStructuredQuery() { + const structuredQuery = { + from: [{}], + }; + if (this._queryOptions.allDescendants) { + structuredQuery.from[0].allDescendants = true; + } + // Kindless queries select all descendant documents, so we remove the + // collectionId field. + if (!this._queryOptions.kindless) { + structuredQuery.from[0].collectionId = this._queryOptions.collectionId; + } + if (this._queryOptions.filters.length >= 1) { + structuredQuery.where = new composite_filter_internal_1.CompositeFilterInternal(this._queryOptions.filters, 'AND').toProto(); + } + if (this._queryOptions.hasFieldOrders()) { + structuredQuery.orderBy = this._queryOptions.fieldOrders.map(o => o.toProto()); + } + structuredQuery.startAt = this.toCursor(this._queryOptions.startAt); + structuredQuery.endAt = this.toCursor(this._queryOptions.endAt); + if (this._queryOptions.limit) { + structuredQuery.limit = { value: this._queryOptions.limit }; + } + structuredQuery.offset = this._queryOptions.offset; + structuredQuery.select = this._queryOptions.projection; + return structuredQuery; + } + /** + * @internal + * @private + * This method exists solely to maintain backward compatability. + */ + _isPermanentRpcError(err, methodName) { + return this._queryUtil._isPermanentRpcError(err, methodName); + } + /** + * @internal + * @private + * This method exists solely to maintain backward compatability. + */ + _hasRetryTimedOut(methodName, startTime) { + return this._queryUtil._hasRetryTimedOut(methodName, startTime); + } + /** + * Internal streaming method that accepts an optional transaction ID. + * + * BEWARE: If `transactionOrReadTime` is `ITransactionOptions`, then the first + * response in the stream will be a transaction response. + * + * @param transactionOrReadTime A transaction ID, options to start a new + * transaction, or timestamp to use as read time. + * @param explainOptions Options to use for explaining the query (if any). + * @private + * @internal + * @returns A stream of document results, optionally preceded by a transaction response. + */ + _stream(transactionOrReadTime, explainOptions) { + return this._queryUtil._stream(this, transactionOrReadTime, true, explainOptions); + } + /** + * Attaches a listener for QuerySnapshot events. + * + * @param {querySnapshotCallback} onNext A callback to be called every time + * a new [QuerySnapshot]{@link QuerySnapshot} is available. + * @param {errorCallback=} onError A callback to be called if the listen + * fails or is cancelled. No further callbacks will occur. + * + * @returns {function()} An unsubscribe function that can be called to cancel + * the snapshot listener. + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * let unsubscribe = query.onSnapshot(querySnapshot => { + * console.log(`Received query snapshot of size ${querySnapshot.size}`); + * }, err => { + * console.log(`Encountered error: ${err}`); + * }); + * + * // Remove this listener. + * unsubscribe(); + * ``` + */ + onSnapshot(onNext, onError) { + (0, validate_1.validateFunction)('onNext', onNext); + (0, validate_1.validateFunction)('onError', onError, { optional: true }); + const watch = new ((__nccwpck_require__(97462)/* .QueryWatch */ .mI))(this.firestore, this, this._queryOptions.converter); + return watch.onSnapshot((readTime, size, docs, changes) => { + onNext(new query_snapshot_1.QuerySnapshot(this, readTime, size, docs, changes)); + }, onError || console.error); + } + /** + * Returns a function that can be used to sort QueryDocumentSnapshots + * according to the sort criteria of this query. + * + * @private + * @internal + */ + comparator() { + return (doc1, doc2) => { + // Add implicit sorting by name, using the last specified direction. + const lastDirection = this._queryOptions.hasFieldOrders() + ? this._queryOptions.fieldOrders[this._queryOptions.fieldOrders.length - 1].direction + : 'ASCENDING'; + const orderBys = this._queryOptions.fieldOrders.concat(new field_order_1.FieldOrder(index_1.FieldPath.documentId(), lastDirection)); + for (const orderBy of orderBys) { + let comp; + if (index_1.FieldPath.documentId().isEqual(orderBy.field)) { + comp = doc1.ref._path.compareTo(doc2.ref._path); + } + else { + const v1 = doc1.protoField(orderBy.field); + const v2 = doc2.protoField(orderBy.field); + if (v1 === undefined || v2 === undefined) { + throw new Error('Trying to compare documents on fields that ' + + "don't exist. Please include the fields you are ordering on " + + 'in your select() call.'); + } + comp = (0, order_1.compare)(v1, v2); + } + if (comp !== 0) { + const direction = orderBy.direction === 'ASCENDING' ? 1 : -1; + return direction * comp; + } + } + return 0; + }; + } + /** + * Applies a custom data converter to this Query, allowing you to use your + * own custom model objects with Firestore. When you call get() on the + * returned Query, the provided converter will convert between Firestore + * data of type `NewDbModelType` and your custom type `NewAppModelType`. + * + * Using the converter allows you to specify generic type arguments when + * storing and retrieving objects from Firestore. + * + * Passing in `null` as the converter parameter removes the current + * converter. + * + * @example + * ``` + * class Post { + * constructor(readonly title: string, readonly author: string) {} + * + * toString(): string { + * return this.title + ', by ' + this.author; + * } + * } + * + * const postConverter = { + * toFirestore(post: Post): FirebaseFirestore.DocumentData { + * return {title: post.title, author: post.author}; + * }, + * fromFirestore( + * snapshot: FirebaseFirestore.QueryDocumentSnapshot + * ): Post { + * const data = snapshot.data(); + * return new Post(data.title, data.author); + * } + * }; + * + * const postSnap = await Firestore() + * .collection('posts') + * .withConverter(postConverter) + * .doc().get(); + * const post = postSnap.data(); + * if (post !== undefined) { + * post.title; // string + * post.toString(); // Should be defined + * post.someNonExistentProperty; // TS error + * } + * + * ``` + * @param {FirestoreDataConverter | null} converter Converts objects to and + * from Firestore. Passing in `null` removes the current converter. + * @return A Query that uses the provided converter. + */ + withConverter(converter) { + return new Query(this.firestore, this._queryOptions.withConverter(converter !== null && converter !== void 0 ? converter : (0, types_2.defaultConverter)())); + } + /** + * Construct the resulting snapshot for this query with given documents. + * + * @private + * @internal + */ + _createSnapshot(readTime, size, docs, changes) { + return new query_snapshot_1.QuerySnapshot(this, readTime, size, docs, changes); + } +} +exports.Query = Query; +//# sourceMappingURL=query.js.map + +/***/ }), + +/***/ 66155: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LimitType = void 0; +/*! + * Denotes whether a provided limit is applied to the beginning or the end of + * the result set. + */ +var LimitType; +(function (LimitType) { + LimitType[LimitType["First"] = 0] = "First"; + LimitType[LimitType["Last"] = 1] = "Last"; +})(LimitType || (exports.LimitType = LimitType = {})); +/** + * onSnapshot() callback that receives a QuerySnapshot. + * + * @callback querySnapshotCallback + * @param {QuerySnapshot} snapshot A query snapshot. + */ +/** + * onSnapshot() callback that receives a DocumentSnapshot. + * + * @callback documentSnapshotCallback + * @param {DocumentSnapshot} snapshot A document snapshot. + */ +/** + * onSnapshot() callback that receives an error. + * + * @callback errorCallback + * @param {Error} err An error from a listen. + */ +//# sourceMappingURL=types.js.map + +/***/ }), + +/***/ 95893: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.VectorQuerySnapshot = void 0; +const validate_1 = __nccwpck_require__(33822); +const util_1 = __nccwpck_require__(15468); +/** + * A `VectorQuerySnapshot` contains zero or more `QueryDocumentSnapshot` objects + * representing the results of a query. The documents can be accessed as an + * array via the `docs` property or enumerated using the `forEach` method. The + * number of documents can be determined via the `empty` and `size` + * properties. + */ +class VectorQuerySnapshot { + /** + * @private + * @internal + * + * @param _query - The originating query. + * @param _readTime - The time when this query snapshot was obtained. + * @param _size - The number of documents in the result set. + * @param docs - A callback returning a sorted array of documents matching + * this query + * @param changes - A callback returning a sorted array of document change + * events for this snapshot. + */ + constructor(_query, _readTime, _size, docs, changes) { + this._query = _query; + this._readTime = _readTime; + this._size = _size; + this._materializedDocs = null; + this._materializedChanges = null; + this._docs = null; + this._changes = null; + this._docs = docs; + this._changes = changes; + } + /** + * The `VectorQuery` on which you called get() in order to get this + * `VectorQuerySnapshot`. + * + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col').where('foo', '==', 'bar'); + * + * query.findNearest("embedding", [0, 0], {limit: 10, distanceMeasure: "EUCLIDEAN"}) + * .get().then(querySnapshot => { + * console.log(`Returned first batch of results`); + * let query = querySnapshot.query; + * return query.offset(10).get(); + * }).then(() => { + * console.log(`Returned second batch of results`); + * }); + * ``` + */ + get query() { + return this._query; + } + /** + * An array of all the documents in this `VectorQuerySnapshot`. + * + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col') + * .findNearest("embedding", [0, 0], {limit: 10, distanceMeasure: "EUCLIDEAN"}); + * + * query.get().then(querySnapshot => { + * let docs = querySnapshot.docs; + * for (let doc of docs) { + * console.log(`Document found at path: ${doc.ref.path}`); + * } + * }); + * ``` + */ + get docs() { + if (this._materializedDocs) { + return this._materializedDocs; + } + this._materializedDocs = this._docs(); + this._docs = null; + return this._materializedDocs; + } + /** + * `true` if there are no documents in the `VectorQuerySnapshot`. + * + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col') + * .findNearest("embedding", [0, 0], {limit: 10, distanceMeasure: "EUCLIDEAN"}); + * + * query.get().then(querySnapshot => { + * if (querySnapshot.empty) { + * console.log('No documents found.'); + * } + * }); + * ``` + */ + get empty() { + return this._size === 0; + } + /** + * The number of documents in the `VectorQuerySnapshot`. + * + * @readonly + * + * @example + * ``` + * let query = firestore.collection('col') + * .findNearest("embedding", [0, 0], {limit: 10, distanceMeasure: "EUCLIDEAN"}); + * + * query.get().then(querySnapshot => { + * console.log(`Found ${querySnapshot.size} documents.`); + * }); + * ``` + */ + get size() { + return this._size; + } + /** + * The time this `VectorQuerySnapshot` was obtained. + * + * @example + * ``` + * let query = firestore.collection('col') + * .findNearest("embedding", [0, 0], {limit: 10, distanceMeasure: "EUCLIDEAN"}); + * + * query.get().then((querySnapshot) => { + * let readTime = querySnapshot.readTime; + * console.log(`Query results returned at '${readTime.toDate()}'`); + * }); + * ``` + */ + get readTime() { + return this._readTime; + } + /** + * Returns an array of the documents changes since the last snapshot. If + * this is the first snapshot, all documents will be in the list as added + * changes. + * + * @returns An array of the documents changes since the last snapshot. + * + * @example + * ``` + * let query = firestore.collection('col') + * .findNearest("embedding", [0, 0], {limit: 10, distanceMeasure: "EUCLIDEAN"}); + * + * query.get().then(querySnapshot => { + * let changes = querySnapshot.docChanges(); + * for (let change of changes) { + * console.log(`A document was ${change.type}.`); + * } + * }); + * ``` + */ + docChanges() { + if (this._materializedChanges) { + return this._materializedChanges; + } + this._materializedChanges = this._changes(); + this._changes = null; + return this._materializedChanges; + } + /** + * Enumerates all of the documents in the `VectorQuerySnapshot`. This is a convenience + * method for running the same callback on each {@link QueryDocumentSnapshot} + * that is returned. + * + * @param callback - A callback to be called with a + * {@link QueryDocumentSnapshot} for each document in + * the snapshot. + * @param thisArg - The `this` binding for the callback.. + * + * @example + * ``` + * let query = firestore.collection('col') + * .findNearest("embedding", [0, 0], {limit: 10, distanceMeasure: "EUCLIDEAN"}); + * + * query.get().then(querySnapshot => { + * querySnapshot.forEach(documentSnapshot => { + * console.log(`Document found at path: ${documentSnapshot.ref.path}`); + * }); + * }); + * ``` + */ + forEach(callback, thisArg) { + (0, validate_1.validateFunction)('callback', callback); + for (const doc of this.docs) { + callback.call(thisArg, doc); + } + } + /** + * Returns true if the document data in this `VectorQuerySnapshot` is equal to the + * provided value. + * + * @param other - The value to compare against. + * @returns true if this `VectorQuerySnapshot` is equal to the provided + * value. + */ + isEqual(other) { + // Since the read time is different on every query read, we explicitly + // ignore all metadata in this comparison. + if (this === other) { + return true; + } + if (!(other instanceof VectorQuerySnapshot)) { + return false; + } + if (this._size !== other._size) { + return false; + } + if (!this._query.isEqual(other._query)) { + return false; + } + if (this._materializedDocs && !this._materializedChanges) { + // If we have only materialized the documents, we compare them first. + return ((0, util_1.isArrayEqual)(this.docs, other.docs) && + (0, util_1.isArrayEqual)(this.docChanges(), other.docChanges())); + } + // Otherwise, we compare the changes first as we expect there to be fewer. + return ((0, util_1.isArrayEqual)(this.docChanges(), other.docChanges()) && + (0, util_1.isArrayEqual)(this.docs, other.docs)); + } +} +exports.VectorQuerySnapshot = VectorQuerySnapshot; +//# sourceMappingURL=vector-query-snapshot.js.map + +/***/ }), + +/***/ 71943: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.VectorQuery = void 0; +const field_value_1 = __nccwpck_require__(16888); +const path_1 = __nccwpck_require__(34908); +const util_1 = __nccwpck_require__(15468); +const query_util_1 = __nccwpck_require__(63379); +const vector_query_snapshot_1 = __nccwpck_require__(95893); +const query_profile_1 = __nccwpck_require__(15453); +/** + * A query that finds the documents whose vector fields are closest to a certain query vector. + * Create an instance of `VectorQuery` with {@link Query.findNearest}. + */ +class VectorQuery { + /** + * @private + * @internal + */ + constructor(_query, _options) { + this._query = _query; + this._options = _options; + this._queryUtil = new query_util_1.QueryUtil(_query._firestore, _query._queryOptions, _query._serializer); + } + /** The query whose results participants in the vector search. Filtering + * performed by the query will apply before the vector search. + **/ + get query() { + return this._query; + } + /** + * @private + * @internal + */ + get _rawVectorField() { + return typeof this._options.vectorField === 'string' + ? this._options.vectorField + : this._options.vectorField.toString(); + } + /** + * @private + * @internal + */ + get _rawDistanceResultField() { + if (typeof this._options.distanceResultField === 'undefined') + return; + return typeof this._options.distanceResultField === 'string' + ? this._options.distanceResultField + : this._options.distanceResultField.toString(); + } + /** + * @private + * @internal + */ + get _rawQueryVector() { + return Array.isArray(this._options.queryVector) + ? this._options.queryVector + : this._options.queryVector.toArray(); + } + /** + * Plans and optionally executes this vector search query. Returns a Promise that will be + * resolved with the planner information, statistics from the query execution (if any), + * and the query results (if any). + * + * @return A Promise that will be resolved with the planner information, statistics + * from the query execution (if any), and the query results (if any). + */ + async explain(options) { + if (options === undefined) { + options = {}; + } + const { result, explainMetrics } = await this._getResponse(options); + if (!explainMetrics) { + throw new Error('No explain results'); + } + return new query_profile_1.ExplainResults(explainMetrics, result || null); + } + /** + * Executes this vector search query. + * + * @returns A promise that will be resolved with the results of the query. + */ + async get() { + const { result } = await this._getResponse(); + if (!result) { + throw new Error('No VectorQuerySnapshot result'); + } + return result; + } + _getResponse(explainOptions) { + return this._queryUtil._getResponse(this, + /*transactionOrReadTime*/ undefined, + // VectorQuery cannot be retried with cursors as they do not support cursors yet. + /*retryWithCursor*/ false, explainOptions); + } + /** + * Internal streaming method that accepts an optional transaction ID. + * + * @param transactionId - A transaction ID. + * @private + * @internal + * @returns A stream of document results. + */ + _stream(transactionId) { + return this._queryUtil._stream(this, transactionId, + /*retryWithCursor*/ false); + } + /** + * Internal method for serializing a query to its proto + * representation with an optional transaction id. + * + * @private + * @internal + * @returns Serialized JSON for the query. + */ + toProto(transactionOrReadTime, explainOptions) { + var _a, _b, _c; + const queryProto = this._query.toProto(transactionOrReadTime); + const queryVector = Array.isArray(this._options.queryVector) + ? new field_value_1.VectorValue(this._options.queryVector) + : this._options.queryVector; + queryProto.structuredQuery.findNearest = { + limit: { value: this._options.limit }, + distanceMeasure: this._options.distanceMeasure, + vectorField: { + fieldPath: path_1.FieldPath.fromArgument(this._options.vectorField) + .formattedName, + }, + queryVector: queryVector._toProto(this._query._serializer), + distanceResultField: ((_a = this._options) === null || _a === void 0 ? void 0 : _a.distanceResultField) + ? path_1.FieldPath.fromArgument(this._options.distanceResultField) + .formattedName + : undefined, + distanceThreshold: ((_b = this._options) === null || _b === void 0 ? void 0 : _b.distanceThreshold) + ? { value: (_c = this._options) === null || _c === void 0 ? void 0 : _c.distanceThreshold } + : undefined, + }; + if (explainOptions) { + queryProto.explainOptions = explainOptions; + } + return queryProto; + } + /** + * Construct the resulting vector snapshot for this query with given documents. + * + * @private + * @internal + */ + _createSnapshot(readTime, size, docs, changes) { + return new vector_query_snapshot_1.VectorQuerySnapshot(this, readTime, size, docs, changes); + } + /** + * Construct a new vector query whose result will start after To support stream(). + * This now throws an exception because cursors are not supported from the backend for vector queries yet. + * + * @private + * @internal + * @returns Serialized JSON for the query. + */ + startAfter( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + ...fieldValuesOrDocumentSnapshot) { + throw new Error('Unimplemented: Vector query does not support cursors yet.'); + } + /** + * Compares this object with the given object for equality. + * + * This object is considered "equal" to the other object if and only if + * `other` performs the same vector distance search as this `VectorQuery` and + * the underlying Query of `other` compares equal to that of this object + * using `Query.isEqual()`. + * + * @param other - The object to compare to this object for equality. + * @returns `true` if this object is "equal" to the given object, as + * defined above, or `false` otherwise. + */ + isEqual(other) { + if (this === other) { + return true; + } + if (!(other instanceof VectorQuery)) { + return false; + } + if (!this.query.isEqual(other.query)) { + return false; + } + return (this._rawVectorField === other._rawVectorField && + (0, util_1.isPrimitiveArrayEqual)(this._rawQueryVector, other._rawQueryVector) && + this._options.limit === other._options.limit && + this._options.distanceMeasure === other._options.distanceMeasure && + this._options.distanceThreshold === other._options.distanceThreshold && + this._rawDistanceResultField === other._rawDistanceResultField); + } +} +exports.VectorQuery = VectorQuery; +//# sourceMappingURL=vector-query.js.map + +/***/ }), + +/***/ 49170: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Serializer = void 0; +exports.validateUserInput = validateUserInput; +const field_value_1 = __nccwpck_require__(16888); +const convert_1 = __nccwpck_require__(36674); +const geo_point_1 = __nccwpck_require__(98854); +const index_1 = __nccwpck_require__(32210); +const path_1 = __nccwpck_require__(34908); +const timestamp_1 = __nccwpck_require__(29061); +const util_1 = __nccwpck_require__(15468); +const validate_1 = __nccwpck_require__(33822); +const map_type_1 = __nccwpck_require__(16723); +/** + * The maximum depth of a Firestore object. + * + * @private + * @internal + */ +const MAX_DEPTH = 20; +/** + * Serializer that is used to convert between JavaScript types and their + * Firestore Protobuf representation. + * + * @private + * @internal + */ +class Serializer { + constructor(firestore) { + // Instead of storing the `firestore` object, we store just a reference to + // its `.doc()` method. This avoid a circular reference, which breaks + // JSON.stringify(). + this.createReference = path => firestore.doc(path); + this.createInteger = n => firestore._settings.useBigInt ? BigInt(n) : Number(n); + this.allowUndefined = !!firestore._settings.ignoreUndefinedProperties; + } + /** + * Encodes a JavaScript object into the Firestore 'Fields' representation. + * + * @private + * @internal + * @param obj The object to encode. + * @returns The Firestore 'Fields' representation + */ + encodeFields(obj) { + const fields = {}; + for (const prop of Object.keys(obj)) { + const val = this.encodeValue(obj[prop]); + if (val) { + fields[prop] = val; + } + } + return fields; + } + /** + * Encodes a JavaScript value into the Firestore 'Value' representation. + * + * @private + * @internal + * @param val The object to encode + * @returns The Firestore Proto or null if we are deleting a field. + */ + encodeValue(val) { + if (val instanceof field_value_1.FieldTransform) { + return null; + } + if (typeof val === 'string') { + return { + stringValue: val, + }; + } + if (typeof val === 'boolean') { + return { + booleanValue: val, + }; + } + if (typeof val === 'number') { + const isNegativeZero = val === 0 && 1 / val === 1 / -0; + if (Number.isSafeInteger(val) && !isNegativeZero) { + return { + integerValue: val, + }; + } + else { + return { + doubleValue: val, + }; + } + } + if (typeof val === 'bigint') { + return { + integerValue: val.toString(), + }; + } + if (val instanceof Date) { + const timestamp = timestamp_1.Timestamp.fromDate(val); + return { + timestampValue: { + seconds: timestamp.seconds, + nanos: timestamp.nanoseconds, + }, + }; + } + if (isMomentJsType(val)) { + const timestamp = timestamp_1.Timestamp.fromDate(val.toDate()); + return { + timestampValue: { + seconds: timestamp.seconds, + nanos: timestamp.nanoseconds, + }, + }; + } + if (val === null) { + return { + nullValue: 'NULL_VALUE', + }; + } + if (val instanceof Buffer || val instanceof Uint8Array) { + return { + bytesValue: val, + }; + } + if (val instanceof field_value_1.VectorValue) { + return val._toProto(this); + } + if ((0, util_1.isObject)(val)) { + const toProto = val['toProto']; + if (typeof toProto === 'function') { + return toProto.bind(val)(); + } + } + if (Array.isArray(val)) { + const array = { + arrayValue: {}, + }; + if (val.length > 0) { + array.arrayValue.values = []; + for (let i = 0; i < val.length; ++i) { + const enc = this.encodeValue(val[i]); + if (enc) { + array.arrayValue.values.push(enc); + } + } + } + return array; + } + if (typeof val === 'object' && (0, util_1.isPlainObject)(val)) { + const map = { + mapValue: {}, + }; + // If we encounter an empty object, we always need to send it to make sure + // the server creates a map entry. + if (!(0, util_1.isEmpty)(val)) { + map.mapValue.fields = this.encodeFields(val); + if ((0, util_1.isEmpty)(map.mapValue.fields)) { + return null; + } + } + return map; + } + if (val === undefined && this.allowUndefined) { + return null; + } + throw new Error(`Cannot encode value: ${val}`); + } + /** + * @private + */ + encodeVector(rawVector) { + // A Firestore Vector is a map with reserved key/value pairs. + return { + mapValue: { + fields: { + [map_type_1.RESERVED_MAP_KEY]: { + stringValue: map_type_1.RESERVED_MAP_KEY_VECTOR_VALUE, + }, + [map_type_1.VECTOR_MAP_VECTORS_KEY]: { + arrayValue: { + values: rawVector.map(value => { + return { + doubleValue: value, + }; + }), + }, + }, + }, + }, + }; + } + /** + * Decodes a single Firestore 'Value' Protobuf. + * + * @private + * @internal + * @param proto A Firestore 'Value' Protobuf. + * @returns The converted JS type. + */ + decodeValue(proto) { + const valueType = (0, convert_1.detectValueType)(proto); + switch (valueType) { + case 'stringValue': { + return proto.stringValue; + } + case 'booleanValue': { + return proto.booleanValue; + } + case 'integerValue': { + return this.createInteger(proto.integerValue); + } + case 'doubleValue': { + return proto.doubleValue; + } + case 'timestampValue': { + return timestamp_1.Timestamp.fromProto(proto.timestampValue); + } + case 'referenceValue': { + const resourcePath = path_1.QualifiedResourcePath.fromSlashSeparatedString(proto.referenceValue); + return this.createReference(resourcePath.relativeName); + } + case 'arrayValue': { + const array = []; + if (Array.isArray(proto.arrayValue.values)) { + for (const value of proto.arrayValue.values) { + array.push(this.decodeValue(value)); + } + } + return array; + } + case 'nullValue': { + return null; + } + case 'mapValue': { + const fields = proto.mapValue.fields; + if (fields) { + const obj = {}; + for (const prop of Object.keys(fields)) { + obj[prop] = this.decodeValue(fields[prop]); + } + return obj; + } + else { + return {}; + } + } + case 'vectorValue': { + const fields = proto.mapValue.fields; + return field_value_1.VectorValue._fromProto(fields[map_type_1.VECTOR_MAP_VECTORS_KEY]); + } + case 'geoPointValue': { + return geo_point_1.GeoPoint.fromProto(proto.geoPointValue); + } + case 'bytesValue': { + return proto.bytesValue; + } + default: { + throw new Error('Cannot decode type from Firestore Value: ' + JSON.stringify(proto)); + } + } + } + /** + * Decodes a google.protobuf.Value + * + * @private + * @internal + * @param proto A Google Protobuf 'Value'. + * @returns The converted JS type. + */ + decodeGoogleProtobufValue(proto) { + switch ((0, convert_1.detectGoogleProtobufValueType)(proto)) { + case 'nullValue': { + return null; + } + case 'numberValue': { + return proto.numberValue; + } + case 'stringValue': { + return proto.stringValue; + } + case 'boolValue': { + return proto.boolValue; + } + case 'listValue': { + return this.decodeGoogleProtobufList(proto.listValue); + } + case 'structValue': { + return this.decodeGoogleProtobufStruct(proto.structValue); + } + default: { + throw new Error('Cannot decode type from google.protobuf.Value: ' + + JSON.stringify(proto)); + } + } + } + /** + * Decodes a google.protobuf.ListValue + * + * @private + * @internal + * @param proto A Google Protobuf 'ListValue'. + * @returns The converted JS type. + */ + decodeGoogleProtobufList(proto) { + const result = []; + if (proto && proto.values && Array.isArray(proto.values)) { + for (const value of proto.values) { + result.push(this.decodeGoogleProtobufValue(value)); + } + } + return result; + } + /** + * Decodes a google.protobuf.Struct + * + * @private + * @internal + * @param proto A Google Protobuf 'Struct'. + * @returns The converted JS type. + */ + decodeGoogleProtobufStruct(proto) { + const result = {}; + if (proto && proto.fields) { + for (const prop of Object.keys(proto.fields)) { + result[prop] = this.decodeGoogleProtobufValue(proto.fields[prop]); + } + } + return result; + } +} +exports.Serializer = Serializer; +/** + * Validates a JavaScript value for usage as a Firestore value. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value JavaScript value to validate. + * @param desc A description of the expected type. + * @param path The field path to validate. + * @param options Validation options + * @param level The current depth of the traversal. This is used to decide + * whether undefined values or deletes are allowed. + * @param inArray Whether we are inside an array. + * @throws when the object is invalid. + */ +function validateUserInput(arg, value, desc, options, path, level, inArray) { + if (path && path.size - 1 > MAX_DEPTH) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, desc)} Input object is deeper than ${MAX_DEPTH} levels or contains a cycle.`); + } + level = level || 0; + inArray = inArray || false; + const fieldPathMessage = path ? ` (found in field "${path}")` : ''; + if (Array.isArray(value)) { + for (let i = 0; i < value.length; ++i) { + validateUserInput(arg, value[i], desc, options, path ? path.append(String(i)) : new path_1.FieldPath(String(i)), level + 1, + /* inArray= */ true); + } + } + else if ((0, util_1.isPlainObject)(value)) { + for (const prop of Object.keys(value)) { + validateUserInput(arg, value[prop], desc, options, path ? path.append(new path_1.FieldPath(prop)) : new path_1.FieldPath(prop), level + 1, inArray); + } + } + else if (value === undefined) { + if (options.allowUndefined && level === 0) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, desc)} "undefined" values are only ignored inside of objects.`); + } + else if (!options.allowUndefined) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, desc)} Cannot use "undefined" as a Firestore value${fieldPathMessage}. ` + + 'If you want to ignore undefined values, enable `ignoreUndefinedProperties`.'); + } + } + else if (value instanceof field_value_1.VectorValue) { + // OK + } + else if (value instanceof field_value_1.DeleteTransform) { + if (inArray) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, desc)} ${value.methodName}() cannot be used inside of an array${fieldPathMessage}.`); + } + else if (options.allowDeletes === 'none') { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, desc)} ${value.methodName}() must appear at the top-level and can only be used in update() ` + + `or set() with {merge:true}${fieldPathMessage}.`); + } + else if (options.allowDeletes === 'root') { + if (level === 0) { + // Ok (update() with UpdateData). + } + else if (level === 1 && (path === null || path === void 0 ? void 0 : path.size) === 1) { + // Ok (update with varargs). + } + else { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, desc)} ${value.methodName}() must appear at the top-level and can only be used in update() ` + + `or set() with {merge:true}${fieldPathMessage}.`); + } + } + } + else if (value instanceof field_value_1.FieldTransform) { + if (inArray) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, desc)} ${value.methodName}() cannot be used inside of an array${fieldPathMessage}.`); + } + else if (!options.allowTransforms) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, desc)} ${value.methodName}() can only be used in set(), create() or update()${fieldPathMessage}.`); + } + } + else if (value instanceof path_1.FieldPath) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, desc)} Cannot use object of type "FieldPath" as a Firestore value${fieldPathMessage}.`); + } + else if (value instanceof index_1.DocumentReference) { + // Ok. + } + else if (value instanceof geo_point_1.GeoPoint) { + // Ok. + } + else if (value instanceof timestamp_1.Timestamp || value instanceof Date) { + // Ok. + } + else if (isMomentJsType(value)) { + // Ok. + } + else if (value instanceof Buffer || value instanceof Uint8Array) { + // Ok. + } + else if (value === null) { + // Ok. + } + else if (typeof value === 'object') { + throw new Error((0, validate_1.customObjectMessage)(arg, value, path)); + } +} +/** + * Returns true if value is a MomentJs date object. + * @private + * @internal + */ +function isMomentJsType(value) { + return (typeof value === 'object' && + value !== null && + value.constructor && + value.constructor.name === 'Moment' && + typeof value.toDate === 'function'); +} +//# sourceMappingURL=serializer.js.map + +/***/ }), + +/***/ 28083: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DisabledTraceUtil = void 0; +const span_1 = __nccwpck_require__(64190); +class DisabledTraceUtil { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + startSpan(name) { + return new span_1.Span(); + } + startActiveSpan( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + name, fn, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + attributes) { + const emptySpan = new span_1.Span(); + return fn(emptySpan); + } + currentSpan() { + return new span_1.Span(); + } +} +exports.DisabledTraceUtil = DisabledTraceUtil; +//# sourceMappingURL=disabled-trace-util.js.map + +/***/ }), + +/***/ 25111: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EnabledTraceUtil = void 0; +const api_1 = __nccwpck_require__(65163); +const span_1 = __nccwpck_require__(64190); +class EnabledTraceUtil { + constructor(settings) { + var _a; + let tracerProvider = (_a = settings.openTelemetryOptions) === null || _a === void 0 ? void 0 : _a.tracerProvider; + // If a TracerProvider has not been given to us, we try to use the global one. + if (!tracerProvider) { + const { trace } = __nccwpck_require__(65163); + tracerProvider = trace.getTracerProvider(); + } + const libVersion = (__nccwpck_require__(49830)/* .version */ .i8); + const libName = (__nccwpck_require__(49830)/* .name */ .u2); + this.tracer = tracerProvider.getTracer(libName, libVersion); + } + endSpan(otelSpan, error) { + otelSpan.setStatus({ + code: api_1.SpanStatusCode.ERROR, + message: error.message, + }); + otelSpan.recordException(error); + otelSpan.end(); + } + startActiveSpan(name, fn, attributes) { + return this.tracer.startActiveSpan(name, { + attributes: attributes, + }, (otelSpan) => { + // Note that if `fn` returns a `Promise`, we want the otelSpan to end + // after the `Promise` has resolved, NOT after the `fn` has returned. + // Therefore, we should not use a `finally` clause to end the otelSpan. + try { + let result = fn(new span_1.Span(otelSpan)); + if (result instanceof Promise) { + result = result + .then(value => { + otelSpan.end(); + return value; + }) + .catch(error => { + this.endSpan(otelSpan, error); + // Returns a Promise.reject the same as the underlying function. + return Promise.reject(error); + }); + } + else { + otelSpan.end(); + } + return result; + } + catch (error) { + this.endSpan(otelSpan, error); + // Re-throw the exception to maintain normal error handling. + throw error; + } + }); + } + startSpan(name) { + return new span_1.Span(this.tracer.startSpan(name, undefined, api_1.context.active())); + } + currentSpan() { + return new span_1.Span(api_1.trace.getActiveSpan()); + } +} +exports.EnabledTraceUtil = EnabledTraceUtil; +//# sourceMappingURL=enabled-trace-util.js.map + +/***/ }), + +/***/ 64190: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Span = void 0; +class Span { + constructor(span) { + this.span = span; + } + end() { + var _a; + (_a = this.span) === null || _a === void 0 ? void 0 : _a.end(); + } + addEvent(name, attributes) { + var _a; + this.span = (_a = this.span) === null || _a === void 0 ? void 0 : _a.addEvent(name, attributes); + return this; + } + setAttributes(attributes) { + var _a; + this.span = (_a = this.span) === null || _a === void 0 ? void 0 : _a.setAttributes(attributes); + return this; + } +} +exports.Span = Span; +//# sourceMappingURL=span.js.map + +/***/ }), + +/***/ 2693: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ATTRIBUTE_KEY_ATTEMPTS_REMAINING = exports.ATTRIBUTE_KEY_ATTEMPTS_ALLOWED = exports.ATTRIBUTE_KEY_TRANSACTION_TYPE = exports.ATTRIBUTE_KEY_IS_RETRY_WITH_CURSOR = exports.ATTRIBUTE_KEY_NUM_RESPONSES = exports.ATTRIBUTE_KEY_IS_TRANSACTIONAL = exports.ATTRIBUTE_KEY_DOC_COUNT = exports.SPAN_NAME_BULK_WRITER_COMMIT = exports.SPAN_NAME_PARTITION_QUERY = exports.SPAN_NAME_BATCH_COMMIT = exports.SPAN_NAME_TRANSACTION_COMMIT = exports.SPAN_NAME_TRANSACTION_ROLLBACK = exports.SPAN_NAME_TRANSACTION_GET_DOCUMENTS = exports.SPAN_NAME_TRANSACTION_GET_DOCUMENT = exports.SPAN_NAME_TRANSACTION_GET_AGGREGATION_QUERY = exports.SPAN_NAME_TRANSACTION_GET_QUERY = exports.SPAN_NAME_TRANSACTION_RUN = exports.SPAN_NAME_AGGREGATION_QUERY_GET = exports.SPAN_NAME_QUERY_GET = exports.SPAN_NAME_COL_REF_LIST_DOCUMENTS = exports.SPAN_NAME_COL_REF_ADD = exports.SPAN_NAME_DOC_REF_LIST_COLLECTIONS = exports.SPAN_NAME_DOC_REF_GET = exports.SPAN_NAME_DOC_REF_DELETE = exports.SPAN_NAME_DOC_REF_UPDATE = exports.SPAN_NAME_DOC_REF_SET = exports.SPAN_NAME_DOC_REF_CREATE = exports.SPAN_NAME_RUN_AGGREGATION_QUERY = exports.SPAN_NAME_RUN_QUERY = exports.SPAN_NAME_BATCH_GET_DOCUMENTS = exports.SERVICE = void 0; +/** + * Span names for instrumented operations. + */ +exports.SERVICE = 'google.firestore.v1.Firestore/'; +exports.SPAN_NAME_BATCH_GET_DOCUMENTS = 'BatchGetDocuments'; +exports.SPAN_NAME_RUN_QUERY = 'RunQuery'; +exports.SPAN_NAME_RUN_AGGREGATION_QUERY = 'RunAggregationQuery'; +exports.SPAN_NAME_DOC_REF_CREATE = 'DocumentReference.Create'; +exports.SPAN_NAME_DOC_REF_SET = 'DocumentReference.Set'; +exports.SPAN_NAME_DOC_REF_UPDATE = 'DocumentReference.Update'; +exports.SPAN_NAME_DOC_REF_DELETE = 'DocumentReference.Delete'; +exports.SPAN_NAME_DOC_REF_GET = 'DocumentReference.Get'; +exports.SPAN_NAME_DOC_REF_LIST_COLLECTIONS = 'DocumentReference.ListCollections'; +exports.SPAN_NAME_COL_REF_ADD = 'CollectionReference.Add'; +exports.SPAN_NAME_COL_REF_LIST_DOCUMENTS = 'CollectionReference.ListDocuments'; +exports.SPAN_NAME_QUERY_GET = 'Query.Get'; +exports.SPAN_NAME_AGGREGATION_QUERY_GET = 'AggregationQuery.Get'; +exports.SPAN_NAME_TRANSACTION_RUN = 'Transaction.Run'; +exports.SPAN_NAME_TRANSACTION_GET_QUERY = 'Transaction.Get.Query'; +exports.SPAN_NAME_TRANSACTION_GET_AGGREGATION_QUERY = 'Transaction.Get.AggregationQuery'; +exports.SPAN_NAME_TRANSACTION_GET_DOCUMENT = 'Transaction.Get.Document'; +exports.SPAN_NAME_TRANSACTION_GET_DOCUMENTS = 'Transaction.Get.Documents'; +exports.SPAN_NAME_TRANSACTION_ROLLBACK = 'Transaction.Rollback'; +exports.SPAN_NAME_TRANSACTION_COMMIT = 'Transaction.Commit'; +exports.SPAN_NAME_BATCH_COMMIT = 'Batch.Commit'; +exports.SPAN_NAME_PARTITION_QUERY = 'PartitionQuery'; +exports.SPAN_NAME_BULK_WRITER_COMMIT = 'BulkWriter.Commit'; +exports.ATTRIBUTE_KEY_DOC_COUNT = 'doc_count'; +exports.ATTRIBUTE_KEY_IS_TRANSACTIONAL = 'transactional'; +exports.ATTRIBUTE_KEY_NUM_RESPONSES = 'response_count'; +exports.ATTRIBUTE_KEY_IS_RETRY_WITH_CURSOR = 'retry_query_with_cursor'; +exports.ATTRIBUTE_KEY_TRANSACTION_TYPE = 'transaction_type'; +exports.ATTRIBUTE_KEY_ATTEMPTS_ALLOWED = 'attempts_allowed'; +exports.ATTRIBUTE_KEY_ATTEMPTS_REMAINING = 'attempts_remaining'; +//# sourceMappingURL=trace-util.js.map + +/***/ }), + +/***/ 29061: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2018 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Timestamp = void 0; +const validate_1 = __nccwpck_require__(33822); +/*! + * Number of nanoseconds in a millisecond. + * + * @type {number} + */ +const MS_TO_NANOS = 1000000; +/*! + * The minimum legal value for the "seconds" property of a Timestamp object. + * + * This value corresponds to 0001-01-01T00:00:00Z. + * + * @type {number} + */ +const MIN_SECONDS = -62135596800; +/*! + * The maximum legal value for the "seconds" property of a Timestamp object. + * + * This value corresponds to 9999-12-31T23:59:59.999999999Z. + * + * @type {number} + */ +const MAX_SECONDS = 253402300799; +/** + * A Timestamp represents a point in time independent of any time zone or + * calendar, represented as seconds and fractions of seconds at nanosecond + * resolution in UTC Epoch time. It is encoded using the Proleptic Gregorian + * Calendar which extends the Gregorian calendar backwards to year one. It is + * encoded assuming all minutes are 60 seconds long, i.e. leap seconds are + * "smeared" so that no leap second table is needed for interpretation. Range is + * from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. + * + * @see https://github.com/google/protobuf/blob/master/src/google/protobuf/timestamp.proto + */ +class Timestamp { + /** + * Creates a new timestamp with the current date, with millisecond precision. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.set({ updateTime:Firestore.Timestamp.now() }); + * + * ``` + * @return {Timestamp} A new `Timestamp` representing the current date. + */ + static now() { + return Timestamp.fromMillis(Date.now()); + } + /** + * Creates a new timestamp from the given date. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * let date = Date.parse('01 Jan 2000 00:00:00 GMT'); + * documentRef.set({ startTime:Firestore.Timestamp.fromDate(date) }); + * + * ``` + * @param {Date} date The date to initialize the `Timestamp` from. + * @return {Timestamp} A new `Timestamp` representing the same point in time + * as the given date. + */ + static fromDate(date) { + return Timestamp.fromMillis(date.getTime()); + } + /** + * Creates a new timestamp from the given number of milliseconds. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.set({ startTime:Firestore.Timestamp.fromMillis(42) }); + * + * ``` + * @param {number} milliseconds Number of milliseconds since Unix epoch + * 1970-01-01T00:00:00Z. + * @return {Timestamp} A new `Timestamp` representing the same point in time + * as the given number of milliseconds. + */ + static fromMillis(milliseconds) { + const seconds = Math.floor(milliseconds / 1000); + const nanos = Math.floor((milliseconds - seconds * 1000) * MS_TO_NANOS); + return new Timestamp(seconds, nanos); + } + /** + * Generates a `Timestamp` object from a Timestamp proto. + * + * @private + * @internal + * @param {Object} timestamp The `Timestamp` Protobuf object. + */ + static fromProto(timestamp) { + return new Timestamp(Number(timestamp.seconds || 0), timestamp.nanos || 0); + } + /** + * Creates a new timestamp. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.set({ startTime:new Firestore.Timestamp(42, 0) }); + * + * ``` + * @param {number} seconds The number of seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + * @param {number} nanoseconds The non-negative fractions of a second at + * nanosecond resolution. Negative second values with fractions must still + * have non-negative nanoseconds values that count forward in time. Must be + * from 0 to 999,999,999 inclusive. + */ + constructor(seconds, nanoseconds) { + (0, validate_1.validateInteger)('seconds', seconds, { + minValue: MIN_SECONDS, + maxValue: MAX_SECONDS, + }); + (0, validate_1.validateInteger)('nanoseconds', nanoseconds, { + minValue: 0, + maxValue: 999999999, + }); + this._seconds = seconds; + this._nanoseconds = nanoseconds; + } + /** + * The number of seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then(snap => { + * let updated = snap.updateTime; + * console.log(`Updated at ${updated.seconds}s ${updated.nanoseconds}ns`); + * }); + * + * ``` + * @type {number} + */ + get seconds() { + return this._seconds; + } + /** + * The non-negative fractions of a second at nanosecond resolution. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then(snap => { + * let updated = snap.updateTime; + * console.log(`Updated at ${updated.seconds}s ${updated.nanoseconds}ns`); + * }); + * + * ``` + * @type {number} + */ + get nanoseconds() { + return this._nanoseconds; + } + /** + * Returns a new `Date` corresponding to this timestamp. This may lose + * precision. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then(snap => { + * console.log(`Document updated at: ${snap.updateTime.toDate()}`); + * }); + * + * ``` + * @return {Date} JavaScript `Date` object representing the same point in time + * as this `Timestamp`, with millisecond precision. + */ + toDate() { + return new Date(this._seconds * 1000 + Math.round(this._nanoseconds / MS_TO_NANOS)); + } + /** + * Returns the number of milliseconds since Unix epoch 1970-01-01T00:00:00Z. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then(snap => { + * let startTime = snap.get('startTime'); + * let endTime = snap.get('endTime'); + * console.log(`Duration: ${endTime - startTime}`); + * }); + * + * ``` + * @return {number} The point in time corresponding to this timestamp, + * represented as the number of milliseconds since Unix epoch + * 1970-01-01T00:00:00Z. + */ + toMillis() { + return this._seconds * 1000 + Math.floor(this._nanoseconds / MS_TO_NANOS); + } + /** + * Returns 'true' if this `Timestamp` is equal to the provided one. + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.get().then(snap => { + * if (snap.createTime.isEqual(snap.updateTime)) { + * console.log('Document is in its initial state.'); + * } + * }); + * + * ``` + * @param {any} other The `Timestamp` to compare against. + * @return {boolean} 'true' if this `Timestamp` is equal to the provided one. + */ + isEqual(other) { + return (this === other || + (other instanceof Timestamp && + this._seconds === other.seconds && + this._nanoseconds === other.nanoseconds)); + } + /** + * Generates the Protobuf `Timestamp` object for this timestamp. + * + * @private + * @internal + * @returns {Object} The `Timestamp` Protobuf object. + */ + toProto() { + const timestamp = {}; + if (this.seconds) { + timestamp.seconds = this.seconds.toString(); + } + if (this.nanoseconds) { + timestamp.nanos = this.nanoseconds; + } + return { timestampValue: timestamp }; + } + /** + * Converts this object to a primitive `string`, which allows `Timestamp` objects to be compared + * using the `>`, `<=`, `>=` and `>` operators. + * + * @return {string} a string encoding of this object. + */ + valueOf() { + // This method returns a string of the form . where is + // translated to have a non-negative value and both and are left-padded + // with zeroes to be a consistent length. Strings with this format then have a lexicographical + // ordering that matches the expected ordering. The translation is done to avoid + // having a leading negative sign (i.e. a leading '-' character) in its string representation, + // which would affect its lexicographical ordering. + const adjustedSeconds = this.seconds - MIN_SECONDS; + // Note: Up to 12 decimal digits are required to represent all valid 'seconds' values. + const formattedSeconds = String(adjustedSeconds).padStart(12, '0'); + const formattedNanoseconds = String(this.nanoseconds).padStart(9, '0'); + return formattedSeconds + '.' + formattedNanoseconds; + } +} +exports.Timestamp = Timestamp; +//# sourceMappingURL=timestamp.js.map + +/***/ }), + +/***/ 95382: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2017 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Transaction = void 0; +exports.parseGetAllArguments = parseGetAllArguments; +const backoff_1 = __nccwpck_require__(63544); +const index_1 = __nccwpck_require__(32210); +const logger_1 = __nccwpck_require__(42718); +const path_1 = __nccwpck_require__(34908); +const aggregate_query_1 = __nccwpck_require__(8763); +const document_reference_1 = __nccwpck_require__(502); +const query_1 = __nccwpck_require__(38621); +const helpers_1 = __nccwpck_require__(13823); +const util_1 = __nccwpck_require__(15468); +const validate_1 = __nccwpck_require__(33822); +const document_reader_1 = __nccwpck_require__(81080); +const trace_util_1 = __nccwpck_require__(2693); +/*! + * Error message for transactional reads that were executed after performing + * writes. + */ +const READ_AFTER_WRITE_ERROR_MSG = 'Firestore transactions require all reads to be executed before all writes.'; +const READ_ONLY_WRITE_ERROR_MSG = 'Firestore read-only transactions cannot execute writes.'; +/** + * A reference to a transaction. + * + * The Transaction object passed to a transaction's updateFunction provides + * the methods to read and write data within the transaction context. See + * [runTransaction()]{@link Firestore#runTransaction}. + * + * @class Transaction + */ +class Transaction { + /** + * @private + * + * @param firestore The Firestore Database client. + * @param requestTag A unique client-assigned identifier for the scope of + * this transaction. + * @param transactionOptions The user-defined options for this transaction. + */ + constructor(firestore, requestTag, transactionOptions) { + this._maxAttempts = index_1.DEFAULT_MAX_TRANSACTION_ATTEMPTS; + this._firestore = firestore; + this._requestTag = requestTag; + if (transactionOptions === null || transactionOptions === void 0 ? void 0 : transactionOptions.readOnly) { + // Avoid initialising write batch and backoff unnecessarily for read-only transactions + this._maxAttempts = 1; + this._readOnlyReadTime = transactionOptions.readTime; + } + else { + this._maxAttempts = + (transactionOptions === null || transactionOptions === void 0 ? void 0 : transactionOptions.maxAttempts) || index_1.DEFAULT_MAX_TRANSACTION_ATTEMPTS; + this._writeBatch = firestore.batch(); + this._backoff = new backoff_1.ExponentialBackoff(); + } + } + /** + * Retrieve a document or a query result from the database. Holds a + * pessimistic lock on all returned documents. + * + * @param {DocumentReference|Query} refOrQuery The document or query to + * return. + * @returns {Promise} A Promise that resolves with a DocumentSnapshot or + * QuerySnapshot for the returned documents. + * + * @example + * ``` + * firestore.runTransaction(transaction => { + * let documentRef = firestore.doc('col/doc'); + * return transaction.get(documentRef).then(doc => { + * if (doc.exists) { + * transaction.update(documentRef, { count: doc.get('count') + 1 }); + * } else { + * transaction.create(documentRef, { count: 1 }); + * } + * }); + * }); + * ``` + */ + get(refOrQuery) { + if (this._writeBatch && !this._writeBatch.isEmpty) { + throw new Error(READ_AFTER_WRITE_ERROR_MSG); + } + if (refOrQuery instanceof document_reference_1.DocumentReference) { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_TRANSACTION_GET_DOCUMENT, () => { + return this.withLazyStartedTransaction(refOrQuery, this.getSingleFn); + }); + } + if (refOrQuery instanceof query_1.Query || refOrQuery instanceof aggregate_query_1.AggregateQuery) { + return this._firestore._traceUtil.startActiveSpan(refOrQuery instanceof query_1.Query + ? trace_util_1.SPAN_NAME_TRANSACTION_GET_QUERY + : trace_util_1.SPAN_NAME_TRANSACTION_GET_AGGREGATION_QUERY, () => { + return this.withLazyStartedTransaction(refOrQuery, this.getQueryFn); + }); + } + throw new Error('Value for argument "refOrQuery" must be a DocumentReference, Query, or AggregateQuery.'); + } + /** + * Retrieves multiple documents from Firestore. Holds a pessimistic lock on + * all returned documents. + * + * The first argument is required and must be of type `DocumentReference` + * followed by any additional `DocumentReference` documents. If used, the + * optional `ReadOptions` must be the last argument. + * + * @param {...DocumentReference|ReadOptions} documentRefsOrReadOptions The + * `DocumentReferences` to receive, followed by an optional field mask. + * @returns {Promise>} A Promise that + * contains an array with the resulting document snapshots. + * + * @example + * ``` + * let firstDoc = firestore.doc('col/doc1'); + * let secondDoc = firestore.doc('col/doc2'); + * let resultDoc = firestore.doc('col/doc3'); + * + * firestore.runTransaction(transaction => { + * return transaction.getAll(firstDoc, secondDoc).then(docs => { + * transaction.set(resultDoc, { + * sum: docs[0].get('count') + docs[1].get('count') + * }); + * }); + * }); + * ``` + */ + getAll(...documentRefsOrReadOptions) { + if (this._writeBatch && !this._writeBatch.isEmpty) { + throw new Error(READ_AFTER_WRITE_ERROR_MSG); + } + (0, validate_1.validateMinNumberOfArguments)('Transaction.getAll', documentRefsOrReadOptions, 1); + return this.withLazyStartedTransaction(parseGetAllArguments(documentRefsOrReadOptions), this.getBatchFn); + } + /** + * Create the document referred to by the provided + * [DocumentReference]{@link DocumentReference}. The operation will + * fail the transaction if a document exists at the specified location. + * + * @param {DocumentReference} documentRef A reference to the document to be + * created. + * @param {DocumentData} data The object data to serialize as the document. + * @returns {Transaction} This Transaction instance. Used for + * chaining method calls. + * + * @example + * ``` + * firestore.runTransaction(transaction => { + * let documentRef = firestore.doc('col/doc'); + * return transaction.get(documentRef).then(doc => { + * if (!doc.exists) { + * transaction.create(documentRef, { foo: 'bar' }); + * } + * }); + * }); + * ``` + */ + create(documentRef, data) { + if (!this._writeBatch) { + throw new Error(READ_ONLY_WRITE_ERROR_MSG); + } + this._writeBatch.create(documentRef, data); + return this; + } + /** + * Writes to the document referred to by the provided + * [DocumentReference]{@link DocumentReference}. If the document + * does not exist yet, it will be created. If you pass + * [SetOptions]{@link SetOptions}, the provided data can be merged into the + * existing document. + * + * @param {DocumentReference} documentRef A reference to the document to be + * set. + * @param {T|Partial} data The object to serialize as the document. + * @param {SetOptions=} options An object to configure the set behavior. + * @param {boolean=} options.merge - If true, set() merges the values + * specified in its data argument. Fields omitted from this set() call remain + * untouched. If your input sets any field to an empty map, all nested fields + * are overwritten. + * @param {Array.=} options.mergeFields - If provided, + * set() only replaces the specified field paths. Any field path that is not + * specified is ignored and remains untouched. If your input sets any field to + * an empty map, all nested fields are overwritten. + * @throws {Error} If the provided input is not a valid Firestore document. + * @returns {Transaction} This Transaction instance. Used for + * chaining method calls. + * + * @example + * ``` + * firestore.runTransaction(transaction => { + * let documentRef = firestore.doc('col/doc'); + * transaction.set(documentRef, { foo: 'bar' }); + * return Promise.resolve(); + * }); + * ``` + */ + set(documentRef, data, options) { + if (!this._writeBatch) { + throw new Error(READ_ONLY_WRITE_ERROR_MSG); + } + if (options) { + this._writeBatch.set(documentRef, data, options); + } + else { + this._writeBatch.set(documentRef, data); + } + return this; + } + /** + * Updates fields in the document referred to by the provided + * [DocumentReference]{@link DocumentReference}. The update will + * fail if applied to a document that does not exist. + * + * The update() method accepts either an object with field paths encoded as + * keys and field values encoded as values, or a variable number of arguments + * that alternate between field paths and field values. Nested fields can be + * updated by providing dot-separated field path strings or by providing + * FieldPath objects. + * + * A Precondition restricting this update can be specified as the last + * argument. + * + * @param {DocumentReference} documentRef A reference to the document to be + * updated. + * @param {UpdateData|string|FieldPath} dataOrField An object + * containing the fields and values with which to update the document + * or the path of the first field to update. + * @param { + * ...(Precondition|*|string|FieldPath)} preconditionOrValues - + * An alternating list of field paths and values to update or a Precondition + * to to enforce on this update. + * @throws {Error} If the provided input is not valid Firestore data. + * @returns {Transaction} This Transaction instance. Used for + * chaining method calls. + * + * @example + * ``` + * firestore.runTransaction(transaction => { + * let documentRef = firestore.doc('col/doc'); + * return transaction.get(documentRef).then(doc => { + * if (doc.exists) { + * transaction.update(documentRef, { count: doc.get('count') + 1 }); + * } else { + * transaction.create(documentRef, { count: 1 }); + * } + * }); + * }); + * ``` + */ + update(documentRef, dataOrField, ...preconditionOrValues) { + if (!this._writeBatch) { + throw new Error(READ_ONLY_WRITE_ERROR_MSG); + } + // eslint-disable-next-line prefer-rest-params + (0, validate_1.validateMinNumberOfArguments)('Transaction.update', arguments, 2); + this._writeBatch.update(documentRef, dataOrField, ...preconditionOrValues); + return this; + } + /** + * Deletes the document referred to by the provided [DocumentReference] + * {@link DocumentReference}. + * + * @param {DocumentReference} documentRef A reference to the document to be + * deleted. + * @param {Precondition=} precondition A precondition to enforce for this + * delete. + * @param {Timestamp=} precondition.lastUpdateTime If set, enforces that the + * document was last updated at lastUpdateTime. Fails the transaction if the + * document doesn't exist or was last updated at a different time. + * @param {boolean=} precondition.exists If set, enforces that the target + * document must or must not exist. + * @returns {Transaction} This Transaction instance. Used for + * chaining method calls. + * + * @example + * ``` + * firestore.runTransaction(transaction => { + * let documentRef = firestore.doc('col/doc'); + * transaction.delete(documentRef); + * return Promise.resolve(); + * }); + * ``` + */ + delete( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + documentRef, precondition) { + if (!this._writeBatch) { + throw new Error(READ_ONLY_WRITE_ERROR_MSG); + } + this._writeBatch.delete(documentRef, precondition); + return this; + } + /** + * Commits all queued-up changes in this transaction and releases all locks. + * + * @private + * @internal + */ + async commit() { + var _a; + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_TRANSACTION_COMMIT, async () => { + if (!this._writeBatch) { + throw new Error(READ_ONLY_WRITE_ERROR_MSG); + } + // If we have not performed any reads in this particular attempt + // then the writes will be atomically committed without a transaction ID + let transactionId; + if (this._transactionIdPromise) { + transactionId = await this._transactionIdPromise; + } + else if (this._writeBatch.isEmpty) { + // If we have not started a transaction (no reads) and we have no writes + // then the commit is a no-op (success) + return; + } + await this._writeBatch._commit({ + transactionId, + requestTag: this._requestTag, + }); + this._transactionIdPromise = undefined; + this._prevTransactionId = transactionId; + }, { + [trace_util_1.ATTRIBUTE_KEY_IS_TRANSACTIONAL]: true, + [trace_util_1.ATTRIBUTE_KEY_DOC_COUNT]: (_a = this._writeBatch) === null || _a === void 0 ? void 0 : _a._opCount, + }); + } + /** + * Releases all locks and rolls back this transaction. The rollback process + * is completed asynchronously and this function resolves before the operation + * is completed. + * + * @private + * @internal + */ + async rollback() { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_TRANSACTION_ROLLBACK, async () => { + // No need to roll back if we have not lazily started the transaction + // or if we are read only + if (!this._transactionIdPromise || !this._writeBatch) { + return; + } + let transactionId; + try { + transactionId = await this._transactionIdPromise; + } + catch (_a) { + // This means the initial read operation rejected + // and we do not have a transaction ID to roll back + this._transactionIdPromise = undefined; + return; + } + const request = { + database: this._firestore.formattedName, + transaction: transactionId, + }; + this._transactionIdPromise = undefined; + this._prevTransactionId = transactionId; + // We don't need to wait for rollback to completed before continuing. + // If there are any locks held, then rollback will eventually release them. + // Rollback can be done concurrently thereby reducing latency caused by + // otherwise blocking. + this._firestore + .request('rollback', request, this._requestTag) + .catch(err => { + (0, logger_1.logger)('Firestore.runTransaction', this._requestTag, 'Best effort to rollback failed with error:', err); + }); + }); + } + /** + * Executes `updateFunction()` and commits the transaction with retry. + * + * @private + * @internal + * @param updateFunction The user function to execute within the transaction + * context. + */ + async runTransaction(updateFunction) { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_TRANSACTION_RUN, async (span) => { + // No backoff is set for readonly transactions (i.e. attempts == 1) + if (!this._writeBatch) { + return this.runTransactionOnce(updateFunction); + } + let lastError = undefined; + for (let attempt = 0; attempt < this._maxAttempts; ++attempt) { + span.setAttributes({ + [trace_util_1.ATTRIBUTE_KEY_TRANSACTION_TYPE]: this._writeBatch + ? 'READ_WRITE' + : 'READ_ONLY', + [trace_util_1.ATTRIBUTE_KEY_ATTEMPTS_ALLOWED]: this._maxAttempts, + [trace_util_1.ATTRIBUTE_KEY_ATTEMPTS_REMAINING]: this._maxAttempts - attempt - 1, + }); + try { + if (lastError) { + (0, logger_1.logger)('Firestore.runTransaction', this._requestTag, 'Retrying transaction after error:', lastError); + span.addEvent('Initiate transaction retry'); + } + this._writeBatch._reset(); + await maybeBackoff(this._backoff, lastError); + return await this.runTransactionOnce(updateFunction); + } + catch (err) { + lastError = err; + if (!isRetryableTransactionError(err)) { + break; + } + } + } + (0, logger_1.logger)('Firestore.runTransaction', this._requestTag, 'Transaction not eligible for retry, returning error: %s', lastError); + return Promise.reject(lastError); + }); + } + /** + * Make single attempt to execute `updateFunction()` and commit the + * transaction. Will rollback upon error. + * + * @private + * @internal + * @param updateFunction The user function to execute within the transaction + * context. + */ + async runTransactionOnce(updateFunction) { + try { + const promise = updateFunction(this); + if (!(promise instanceof Promise)) { + throw new Error('You must return a Promise in your transaction()-callback.'); + } + const result = await promise; + if (this._writeBatch) { + await this.commit(); + } + return result; + } + catch (err) { + (0, logger_1.logger)('Firestore.runTransaction', this._requestTag, 'Rolling back transaction after callback error:', err); + await this.rollback(); + return Promise.reject(err); + } + } + /** + * Given a function that performs a read operation, ensures that the first one + * is provided with new transaction options and all subsequent ones are queued + * upon the resulting transaction ID. + */ + withLazyStartedTransaction(param, resultFn) { + if (this._transactionIdPromise) { + // Simply queue this subsequent read operation after the first read + // operation has resolved and we don't expect a transaction ID in the + // response because we are not starting a new transaction + return this._transactionIdPromise + .then(opts => resultFn.call(this, param, opts)) + .then(r => r.result); + } + else { + if (this._readOnlyReadTime) { + // We do not start a transaction for read-only transactions + // do not set _prevTransactionId + return resultFn + .call(this, param, this._readOnlyReadTime) + .then(r => r.result); + } + else { + // This is the first read of the transaction so we create the appropriate + // options for lazily starting the transaction inside this first read op + const opts = {}; + if (this._writeBatch) { + opts.readWrite = this._prevTransactionId + ? { retryTransaction: this._prevTransactionId } + : {}; + } + else { + opts.readOnly = {}; + } + const resultPromise = resultFn.call(this, param, opts); + // Ensure the _transactionIdPromise is set synchronously so that + // subsequent operations will not race to start another transaction + this._transactionIdPromise = resultPromise.then(r => { + if (!r.transaction) { + // Illegal state + // The read operation was provided with new transaction options but did not return a transaction ID + // Rejecting here will cause all queued reads to reject + throw new Error('Transaction ID was missing from server response'); + } + return r.transaction; + }); + return resultPromise.then(r => r.result); + } + } + } + async getSingleFn(document, opts) { + const documentReader = new document_reader_1.DocumentReader(this._firestore, [document], undefined, opts); + const { transaction, result: [result], } = await documentReader._get(this._requestTag); + return { transaction, result }; + } + async getBatchFn({ documents, fieldMask, }, opts) { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_TRANSACTION_GET_DOCUMENTS, async () => { + const documentReader = new document_reader_1.DocumentReader(this._firestore, documents, fieldMask, opts); + return documentReader._get(this._requestTag); + }); + } + async getQueryFn(query, opts) { + return query._get(opts); + } +} +exports.Transaction = Transaction; +/** + * Parses the arguments for the `getAll()` call supported by both the Firestore + * and Transaction class. + * + * @private + * @internal + * @param documentRefsOrReadOptions An array of document references followed by + * an optional ReadOptions object. + */ +function parseGetAllArguments(documentRefsOrReadOptions) { + let documents; + let readOptions = undefined; + if (Array.isArray(documentRefsOrReadOptions[0])) { + throw new Error('getAll() no longer accepts an array as its first argument. ' + + 'Please unpack your array and call getAll() with individual arguments.'); + } + if (documentRefsOrReadOptions.length > 0 && + (0, util_1.isPlainObject)(documentRefsOrReadOptions[documentRefsOrReadOptions.length - 1])) { + readOptions = documentRefsOrReadOptions.pop(); + documents = documentRefsOrReadOptions; + } + else { + documents = documentRefsOrReadOptions; + } + for (let i = 0; i < documents.length; ++i) { + (0, helpers_1.validateDocumentReference)(i, documents[i]); + } + validateReadOptions('options', readOptions, { optional: true }); + const fieldMask = readOptions && readOptions.fieldMask + ? readOptions.fieldMask.map(fieldPath => path_1.FieldPath.fromArgument(fieldPath)) + : undefined; + return { fieldMask, documents }; +} +/** + * Validates the use of 'options' as ReadOptions and enforces that 'fieldMask' + * is an array of strings or field paths. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + * @param options Options that specify whether the ReadOptions can be omitted. + */ +function validateReadOptions(arg, value, options) { + if (!(0, validate_1.validateOptional)(value, options)) { + if (!(0, util_1.isObject)(value)) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'read option')} Input is not an object.'`); + } + const options = value; + if (options.fieldMask !== undefined) { + if (!Array.isArray(options.fieldMask)) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'read option')} "fieldMask" is not an array.`); + } + for (let i = 0; i < options.fieldMask.length; ++i) { + try { + (0, path_1.validateFieldPath)(i, options.fieldMask[i]); + } + catch (err) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'read option')} "fieldMask" is not valid: ${err.message}`); + } + } + } + } +} +function isRetryableTransactionError(error) { + if (error.code !== undefined) { + // This list is based on https://github.com/firebase/firebase-js-sdk/blob/master/packages/firestore/src/core/transaction_runner.ts#L112 + switch (error.code) { + case 10 /* StatusCode.ABORTED */: + case 1 /* StatusCode.CANCELLED */: + case 2 /* StatusCode.UNKNOWN */: + case 4 /* StatusCode.DEADLINE_EXCEEDED */: + case 13 /* StatusCode.INTERNAL */: + case 14 /* StatusCode.UNAVAILABLE */: + case 16 /* StatusCode.UNAUTHENTICATED */: + case 8 /* StatusCode.RESOURCE_EXHAUSTED */: + return true; + case 3 /* StatusCode.INVALID_ARGUMENT */: + // The Firestore backend uses "INVALID_ARGUMENT" for transactions + // IDs that have expired. While INVALID_ARGUMENT is generally not + // retryable, we retry this specific case. + return !!error.message.match(/transaction has expired/); + default: + return false; + } + } + return false; +} +/** + * Delays further operations based on the provided error. + * + * @private + * @internal + * @return A Promise that resolves after the delay expired. + */ +async function maybeBackoff(backoff, error) { + if ((error === null || error === void 0 ? void 0 : error.code) === 8 /* StatusCode.RESOURCE_EXHAUSTED */) { + backoff.resetToMax(); + } + await backoff.backoffAndWait(); +} +//# sourceMappingURL=transaction.js.map + +/***/ }), + +/***/ 75371: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/*! + * Copyright 2018 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultConverter = defaultConverter; +/** + * A default converter to use when none is provided. + * + * By declaring the converter as a variable instead of creating the object + * inside defaultConverter(), object equality when comparing default converters + * is preserved. + * @private + * @internal + */ +const defaultConverterObj = { + toFirestore(modelObject) { + return modelObject; + }, + fromFirestore(snapshot) { + return snapshot.data(); + }, +}; +/** + * A default converter to use when none is provided. + * @private + * @internal + */ +function defaultConverter() { + return defaultConverterObj; +} +//# sourceMappingURL=types.js.map + +/***/ }), + +/***/ 15468: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2018 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Deferred = void 0; +exports.autoId = autoId; +exports.requestTag = requestTag; +exports.isObject = isObject; +exports.isPlainObject = isPlainObject; +exports.isEmpty = isEmpty; +exports.isFunction = isFunction; +exports.isPermanentRpcError = isPermanentRpcError; +exports.getRetryCodes = getRetryCodes; +exports.getTotalTimeout = getTotalTimeout; +exports.getRetryParams = getRetryParams; +exports.silencePromise = silencePromise; +exports.wrapError = wrapError; +exports.tryGetPreferRestEnvironmentVariable = tryGetPreferRestEnvironmentVariable; +exports.mapToArray = mapToArray; +exports.isArrayEqual = isArrayEqual; +exports.isPrimitiveArrayEqual = isPrimitiveArrayEqual; +const crypto_1 = __nccwpck_require__(6113); +const gapicConfig = __nccwpck_require__(89671); +/** + * A Promise implementation that supports deferred resolution. + * @private + * @internal + */ +class Deferred { + constructor() { + this.resolve = () => { }; + this.reject = () => { }; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + } +} +exports.Deferred = Deferred; +/** + * Generate a unique client-side identifier. + * + * Used for the creation of new documents. + * + * @private + * @internal + * @returns {string} A unique 20-character wide identifier. + */ +function autoId() { + const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + let autoId = ''; + while (autoId.length < 20) { + const bytes = (0, crypto_1.randomBytes)(40); + bytes.forEach(b => { + // Length of `chars` is 62. We only take bytes between 0 and 62*4-1 + // (both inclusive). The value is then evenly mapped to indices of `char` + // via a modulo operation. + const maxValue = 62 * 4 - 1; + if (autoId.length < 20 && b <= maxValue) { + autoId += chars.charAt(b % 62); + } + }); + } + return autoId; +} +/** + * Generate a short and semi-random client-side identifier. + * + * Used for the creation of request tags. + * + * @private + * @internal + * @returns {string} A random 5-character wide identifier. + */ +function requestTag() { + return autoId().substr(0, 5); +} +/** + * Determines whether `value` is a JavaScript object. + * + * @private + * @internal + */ +function isObject(value) { + return Object.prototype.toString.call(value) === '[object Object]'; +} +/** + * Verifies that 'obj' is a plain JavaScript object that can be encoded as a + * 'Map' in Firestore. + * + * @private + * @internal + * @param input The argument to verify. + * @returns 'true' if the input can be a treated as a plain object. + */ +function isPlainObject(input) { + return (isObject(input) && + (Object.getPrototypeOf(input) === Object.prototype || + Object.getPrototypeOf(input) === null || + input.constructor.name === 'Object')); +} +/** + * Returns whether `value` has no custom properties. + * + * @private + * @internal + */ +function isEmpty(value) { + return Object.keys(value).length === 0; +} +/** + * Determines whether `value` is a JavaScript function. + * + * @private + * @internal + */ +function isFunction(value) { + return typeof value === 'function'; +} +/** + * Determines whether the provided error is considered permanent for the given + * RPC. + * + * @private + * @internal + */ +function isPermanentRpcError(err, methodName) { + if (err.code !== undefined) { + const retryCodes = getRetryCodes(methodName); + return retryCodes.indexOf(err.code) === -1; + } + else { + return false; + } +} +let serviceConfig; +/** + * Lazy-loads the service config when first accessed. + * @private + * @internal + **/ +function getServiceConfig(methodName) { + if (!serviceConfig) { + serviceConfig = (__nccwpck_require__(90418).constructSettings)('google.firestore.v1.Firestore', gapicConfig, {}, (__nccwpck_require__(53501).Status)); + } + return serviceConfig[methodName]; +} +/** + * Returns the list of retryable error codes specified in the service + * configuration. + * @private + * @internal + */ +function getRetryCodes(methodName) { + var _a, _b, _c; + return (_c = (_b = (_a = getServiceConfig(methodName)) === null || _a === void 0 ? void 0 : _a.retry) === null || _b === void 0 ? void 0 : _b.retryCodes) !== null && _c !== void 0 ? _c : []; +} +/** + * Gets the total timeout in milliseconds from the retry settings in + * the service config for the given RPC. If the total timeout is not + * set, then `0` is returned. + * + * @private + * @internal + */ +function getTotalTimeout(methodName) { + var _a, _b, _c, _d; + return ((_d = (_c = (_b = (_a = getServiceConfig(methodName)) === null || _a === void 0 ? void 0 : _a.retry) === null || _b === void 0 ? void 0 : _b.backoffSettings) === null || _c === void 0 ? void 0 : _c.totalTimeoutMillis) !== null && _d !== void 0 ? _d : 0); +} +/** + * Returns the backoff setting from the service configuration. + * @private + * @internal + */ +function getRetryParams(methodName) { + var _a, _b, _c; + return ((_c = (_b = (_a = getServiceConfig(methodName)) === null || _a === void 0 ? void 0 : _a.retry) === null || _b === void 0 ? void 0 : _b.backoffSettings) !== null && _c !== void 0 ? _c : (__nccwpck_require__(90418).createDefaultBackoffSettings)()); +} +/** + * Returns a promise with a void return type. The returned promise swallows all + * errors and never throws. + * + * This is primarily used to wait for a promise to complete when the result of + * the promise will be discarded. + * + * @private + * @internal + */ +function silencePromise(promise) { + return promise.then(() => { }, () => { }); +} +/** + * Wraps the provided error in a new error that includes the provided stack. + * + * Used to preserve stack traces across async calls. + * @private + * @internal + */ +function wrapError(err, stack) { + err.stack += '\nCaused by: ' + stack; + return err; +} +/** + * Parses the value of the environment variable FIRESTORE_PREFER_REST, and + * returns a value indicating if the environment variable enables or disables + * preferRest. + * + * This function will warn to the console if the environment variable is set + * to an unsupported value. + * + * @return `true` if the environment variable enables `preferRest`, + * `false` if the environment variable disables `preferRest`, or `undefined` + * if the environment variable is not set or is set to an unsupported value. + * + * @internal + * @private + */ +function tryGetPreferRestEnvironmentVariable() { + var _a; + const rawValue = (_a = process.env.FIRESTORE_PREFER_REST) === null || _a === void 0 ? void 0 : _a.trim().toLowerCase(); + if (rawValue === undefined) { + return undefined; + } + else if (rawValue === '1' || rawValue === 'true') { + return true; + } + else if (rawValue === '0' || rawValue === 'false') { + return false; + } + else { + // eslint-disable-next-line no-console + console.warn(`An unsupported value was specified for the environment variable FIRESTORE_PREFER_REST. Value ${rawValue} is unsupported.`); + return undefined; + } +} +/** + * Returns an array of values that are calculated by performing the given `fn` + * on all keys in the given `obj` dictionary. + * + * @private + * @internal + */ +function mapToArray(obj, fn) { + const result = []; + for (const key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + result.push(fn(obj[key], key, obj)); + } + } + return result; +} +/** + * Verifies equality for an array of objects using the `isEqual` interface. + * + * @private + * @internal + * @param left Array of objects supporting `isEqual`. + * @param right Array of objects supporting `isEqual`. + * @return True if arrays are equal. + */ +function isArrayEqual(left, right) { + if (left.length !== right.length) { + return false; + } + for (let i = 0; i < left.length; ++i) { + if (!left[i].isEqual(right[i])) { + return false; + } + } + return true; +} +/** + * Verifies equality for an array of primitives. + * + * @private + * @internal + * @param left Array of primitives. + * @param right Array of primitives. + * @return True if arrays are equal. + */ +function isPrimitiveArrayEqual(left, right) { + if (left.length !== right.length) { + return false; + } + for (let i = 0; i < left.length; ++i) { + if (left[i] !== right[i]) { + return false; + } + } + return true; +} +//# sourceMappingURL=util.js.map + +/***/ }), + +/***/ 34184: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FirestoreAdminClient = void 0; +const jsonProtos = __nccwpck_require__(589); +/** + * Client JSON configuration object, loaded from + * `src/v1/firestore_admin_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +const gapicConfig = __nccwpck_require__(95189); +const version = (__nccwpck_require__(49830)/* .version */ .i8); +/** + * The Cloud Firestore Admin API. + * + * This API provides several administrative services for Cloud Firestore. + * + * Project, Database, Namespace, Collection, Collection Group, and Document are + * used as defined in the Google Cloud Firestore API. + * + * Operation: An Operation represents work being performed in the background. + * + * The index service manages Cloud Firestore indexes. + * + * Index creation is performed asynchronously. + * An Operation resource is created for each such asynchronous operation. + * The state of the operation (including any errors encountered) + * may be queried via the Operation resource. + * + * The Operations collection provides a record of actions performed for the + * specified Project (including any Operations in progress). Operations are not + * created directly but through calls on other collections or resources. + * + * An Operation that is done may be deleted so that it is no longer listed as + * part of the Operation collection. Operations are garbage collected after + * 30 days. By default, ListOperations will only return in progress and failed + * operations. To list completed operation, issue a ListOperations request with + * the filter `done: true`. + * + * Operations are created by service `FirestoreAdmin`, but are accessed via + * service `google.longrunning.Operations`. + * @class + * @memberof v1 + */ +class FirestoreAdminClient { + /** + * Construct an instance of FirestoreAdminClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP/1.1 REST mode. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new FirestoreAdminClient({fallback: true}, gax); + * ``` + */ + constructor(opts, gaxInstance) { + var _a, _b, _c, _d, _e; + this._terminated = false; + this.descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + // Ensure that options include all the required fields. + const staticMembers = this.constructor; + if ((opts === null || opts === void 0 ? void 0 : opts.universe_domain) && + (opts === null || opts === void 0 ? void 0 : opts.universeDomain) && + (opts === null || opts === void 0 ? void 0 : opts.universe_domain) !== (opts === null || opts === void 0 ? void 0 : opts.universeDomain)) { + throw new Error('Please set either universe_domain or universeDomain, but not both.'); + } + const universeDomainEnvVar = typeof process === 'object' && typeof process.env === 'object' + ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] + : undefined; + this._universeDomain = + (_c = (_b = (_a = opts === null || opts === void 0 ? void 0 : opts.universeDomain) !== null && _a !== void 0 ? _a : opts === null || opts === void 0 ? void 0 : opts.universe_domain) !== null && _b !== void 0 ? _b : universeDomainEnvVar) !== null && _c !== void 0 ? _c : 'googleapis.com'; + this._servicePath = 'firestore.' + this._universeDomain; + const servicePath = (opts === null || opts === void 0 ? void 0 : opts.servicePath) || (opts === null || opts === void 0 ? void 0 : opts.apiEndpoint) || this._servicePath; + this._providedCustomServicePath = !!((opts === null || opts === void 0 ? void 0 : opts.servicePath) || (opts === null || opts === void 0 ? void 0 : opts.apiEndpoint)); + const port = (opts === null || opts === void 0 ? void 0 : opts.port) || staticMembers.port; + const clientConfig = (_d = opts === null || opts === void 0 ? void 0 : opts.clientConfig) !== null && _d !== void 0 ? _d : {}; + const fallback = (_e = opts === null || opts === void 0 ? void 0 : opts.fallback) !== null && _e !== void 0 ? _e : (typeof window !== 'undefined' && typeof (window === null || window === void 0 ? void 0 : window.fetch) === 'function'); + opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts); + // Request numeric enum values if REST transport is used. + opts.numericEnums = true; + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== this._servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = __nccwpck_require__(12263); + } + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + // Save options to use in initialize() method. + this._opts = opts; + // Save the auth object to the client, for use by other methods. + this.auth = this._gaxGrpc.auth; + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = this._servicePath; + // Set the default scopes in auth client if needed. + if (servicePath === this._servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + this.locationsClient = new this._gaxModule.LocationsClient(this._gaxGrpc, opts); + // Determine the client header string. + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; + if (typeof process === 'object' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } + else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } + else { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + backupPathTemplate: new this._gaxModule.PathTemplate('projects/{project}/locations/{location}/backups/{backup}'), + backupSchedulePathTemplate: new this._gaxModule.PathTemplate('projects/{project}/databases/{database}/backupSchedules/{backup_schedule}'), + collectionGroupPathTemplate: new this._gaxModule.PathTemplate('projects/{project}/databases/{database}/collectionGroups/{collection}'), + databasePathTemplate: new this._gaxModule.PathTemplate('projects/{project}/databases/{database}'), + fieldPathTemplate: new this._gaxModule.PathTemplate('projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}'), + indexPathTemplate: new this._gaxModule.PathTemplate('projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}'), + locationPathTemplate: new this._gaxModule.PathTemplate('projects/{project}/locations/{location}'), + projectPathTemplate: new this._gaxModule.PathTemplate('projects/{project}'), + }; + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listIndexes: new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'indexes'), + listFields: new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'fields'), + }; + const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); + // This API contains "long-running operations", which return a + // an Operation object that allows for tracking of the operation, + // rather than holding a request open. + const lroOptions = { + auth: this.auth, + grpc: 'grpc' in this._gaxGrpc ? this._gaxGrpc.grpc : undefined, + }; + if (opts.fallback) { + lroOptions.protoJson = protoFilesRoot; + lroOptions.httpRules = [ + { + selector: 'google.longrunning.Operations.CancelOperation', + post: '/v1/{name=projects/*/databases/*/operations/*}:cancel', + body: '*', + }, + { + selector: 'google.longrunning.Operations.DeleteOperation', + delete: '/v1/{name=projects/*/databases/*/operations/*}', + }, + { + selector: 'google.longrunning.Operations.GetOperation', + get: '/v1/{name=projects/*/databases/*/operations/*}', + }, + { + selector: 'google.longrunning.Operations.ListOperations', + get: '/v1/{name=projects/*/databases/*}/operations', + }, + ]; + } + this.operationsClient = this._gaxModule + .lro(lroOptions) + .operationsClient(opts); + const createIndexResponse = protoFilesRoot.lookup('.google.firestore.admin.v1.Index'); + const createIndexMetadata = protoFilesRoot.lookup('.google.firestore.admin.v1.IndexOperationMetadata'); + const updateFieldResponse = protoFilesRoot.lookup('.google.firestore.admin.v1.Field'); + const updateFieldMetadata = protoFilesRoot.lookup('.google.firestore.admin.v1.FieldOperationMetadata'); + const exportDocumentsResponse = protoFilesRoot.lookup('.google.firestore.admin.v1.ExportDocumentsResponse'); + const exportDocumentsMetadata = protoFilesRoot.lookup('.google.firestore.admin.v1.ExportDocumentsMetadata'); + const importDocumentsResponse = protoFilesRoot.lookup('.google.protobuf.Empty'); + const importDocumentsMetadata = protoFilesRoot.lookup('.google.firestore.admin.v1.ImportDocumentsMetadata'); + const bulkDeleteDocumentsResponse = protoFilesRoot.lookup('.google.firestore.admin.v1.BulkDeleteDocumentsResponse'); + const bulkDeleteDocumentsMetadata = protoFilesRoot.lookup('.google.firestore.admin.v1.BulkDeleteDocumentsMetadata'); + const createDatabaseResponse = protoFilesRoot.lookup('.google.firestore.admin.v1.Database'); + const createDatabaseMetadata = protoFilesRoot.lookup('.google.firestore.admin.v1.CreateDatabaseMetadata'); + const updateDatabaseResponse = protoFilesRoot.lookup('.google.firestore.admin.v1.Database'); + const updateDatabaseMetadata = protoFilesRoot.lookup('.google.firestore.admin.v1.UpdateDatabaseMetadata'); + const deleteDatabaseResponse = protoFilesRoot.lookup('.google.firestore.admin.v1.Database'); + const deleteDatabaseMetadata = protoFilesRoot.lookup('.google.firestore.admin.v1.DeleteDatabaseMetadata'); + const restoreDatabaseResponse = protoFilesRoot.lookup('.google.firestore.admin.v1.Database'); + const restoreDatabaseMetadata = protoFilesRoot.lookup('.google.firestore.admin.v1.RestoreDatabaseMetadata'); + this.descriptors.longrunning = { + createIndex: new this._gaxModule.LongrunningDescriptor(this.operationsClient, createIndexResponse.decode.bind(createIndexResponse), createIndexMetadata.decode.bind(createIndexMetadata)), + updateField: new this._gaxModule.LongrunningDescriptor(this.operationsClient, updateFieldResponse.decode.bind(updateFieldResponse), updateFieldMetadata.decode.bind(updateFieldMetadata)), + exportDocuments: new this._gaxModule.LongrunningDescriptor(this.operationsClient, exportDocumentsResponse.decode.bind(exportDocumentsResponse), exportDocumentsMetadata.decode.bind(exportDocumentsMetadata)), + importDocuments: new this._gaxModule.LongrunningDescriptor(this.operationsClient, importDocumentsResponse.decode.bind(importDocumentsResponse), importDocumentsMetadata.decode.bind(importDocumentsMetadata)), + bulkDeleteDocuments: new this._gaxModule.LongrunningDescriptor(this.operationsClient, bulkDeleteDocumentsResponse.decode.bind(bulkDeleteDocumentsResponse), bulkDeleteDocumentsMetadata.decode.bind(bulkDeleteDocumentsMetadata)), + createDatabase: new this._gaxModule.LongrunningDescriptor(this.operationsClient, createDatabaseResponse.decode.bind(createDatabaseResponse), createDatabaseMetadata.decode.bind(createDatabaseMetadata)), + updateDatabase: new this._gaxModule.LongrunningDescriptor(this.operationsClient, updateDatabaseResponse.decode.bind(updateDatabaseResponse), updateDatabaseMetadata.decode.bind(updateDatabaseMetadata)), + deleteDatabase: new this._gaxModule.LongrunningDescriptor(this.operationsClient, deleteDatabaseResponse.decode.bind(deleteDatabaseResponse), deleteDatabaseMetadata.decode.bind(deleteDatabaseMetadata)), + restoreDatabase: new this._gaxModule.LongrunningDescriptor(this.operationsClient, restoreDatabaseResponse.decode.bind(restoreDatabaseResponse), restoreDatabaseMetadata.decode.bind(restoreDatabaseMetadata)), + }; + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings('google.firestore.admin.v1.FirestoreAdmin', gapicConfig, opts.clientConfig || {}, { 'x-goog-api-client': clientHeader.join(' ') }); + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.firestoreAdminStub) { + return this.firestoreAdminStub; + } + // Put together the "service stub" for + // google.firestore.admin.v1.FirestoreAdmin. + this.firestoreAdminStub = this._gaxGrpc.createStub(this._opts.fallback + ? this._protos.lookupService('google.firestore.admin.v1.FirestoreAdmin') + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + this._protos.google.firestore.admin.v1.FirestoreAdmin, this._opts, this._providedCustomServicePath); + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const firestoreAdminStubMethods = [ + 'createIndex', + 'listIndexes', + 'getIndex', + 'deleteIndex', + 'getField', + 'updateField', + 'listFields', + 'exportDocuments', + 'importDocuments', + 'bulkDeleteDocuments', + 'createDatabase', + 'getDatabase', + 'listDatabases', + 'updateDatabase', + 'deleteDatabase', + 'getBackup', + 'listBackups', + 'deleteBackup', + 'restoreDatabase', + 'createBackupSchedule', + 'getBackupSchedule', + 'listBackupSchedules', + 'updateBackupSchedule', + 'deleteBackupSchedule', + ]; + for (const methodName of firestoreAdminStubMethods) { + const callPromise = this.firestoreAdminStub.then(stub => (...args) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, (err) => () => { + throw err; + }); + const descriptor = this.descriptors.page[methodName] || + this.descriptors.longrunning[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall(callPromise, this._defaults[methodName], descriptor, this._opts.fallback); + this.innerApiCalls[methodName] = apiCall; + } + return this.firestoreAdminStub; + } + /** + * The DNS address for this API service. + * @deprecated Use the apiEndpoint method of the client instance. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + if (typeof process === 'object' && + typeof process.emitWarning === 'function') { + process.emitWarning('Static servicePath is deprecated, please use the instance method instead.', 'DeprecationWarning'); + } + return 'firestore.googleapis.com'; + } + /** + * The DNS address for this API service - same as servicePath. + * @deprecated Use the apiEndpoint method of the client instance. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + if (typeof process === 'object' && + typeof process.emitWarning === 'function') { + process.emitWarning('Static apiEndpoint is deprecated, please use the instance method instead.', 'DeprecationWarning'); + } + return 'firestore.googleapis.com'; + } + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + get apiEndpoint() { + return this._servicePath; + } + get universeDomain() { + return this._universeDomain; + } + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', + ]; + } + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback) { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + getIndex(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.getIndex(request, options, callback); + } + deleteIndex(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.deleteIndex(request, options, callback); + } + getField(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.getField(request, options, callback); + } + getDatabase(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.getDatabase(request, options, callback); + } + listDatabases(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.listDatabases(request, options, callback); + } + getBackup(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.getBackup(request, options, callback); + } + listBackups(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.listBackups(request, options, callback); + } + deleteBackup(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.deleteBackup(request, options, callback); + } + createBackupSchedule(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.createBackupSchedule(request, options, callback); + } + getBackupSchedule(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.getBackupSchedule(request, options, callback); + } + listBackupSchedules(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.listBackupSchedules(request, options, callback); + } + updateBackupSchedule(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'backup_schedule.name': (_a = request.backupSchedule.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.updateBackupSchedule(request, options, callback); + } + deleteBackupSchedule(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.deleteBackupSchedule(request, options, callback); + } + createIndex(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.createIndex(request, options, callback); + } + /** + * Check the status of the long running operation returned by `createIndex()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.create_index.js + * region_tag:firestore_v1_generated_FirestoreAdmin_CreateIndex_async + */ + async checkCreateIndexProgress(name) { + const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest({ name }); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new this._gaxModule.Operation(operation, this.descriptors.longrunning.createIndex, this._gaxModule.createDefaultBackoffSettings()); + return decodeOperation; + } + updateField(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'field.name': (_a = request.field.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.updateField(request, options, callback); + } + /** + * Check the status of the long running operation returned by `updateField()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.update_field.js + * region_tag:firestore_v1_generated_FirestoreAdmin_UpdateField_async + */ + async checkUpdateFieldProgress(name) { + const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest({ name }); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new this._gaxModule.Operation(operation, this.descriptors.longrunning.updateField, this._gaxModule.createDefaultBackoffSettings()); + return decodeOperation; + } + exportDocuments(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.exportDocuments(request, options, callback); + } + /** + * Check the status of the long running operation returned by `exportDocuments()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.export_documents.js + * region_tag:firestore_v1_generated_FirestoreAdmin_ExportDocuments_async + */ + async checkExportDocumentsProgress(name) { + const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest({ name }); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new this._gaxModule.Operation(operation, this.descriptors.longrunning.exportDocuments, this._gaxModule.createDefaultBackoffSettings()); + return decodeOperation; + } + importDocuments(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.importDocuments(request, options, callback); + } + /** + * Check the status of the long running operation returned by `importDocuments()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.import_documents.js + * region_tag:firestore_v1_generated_FirestoreAdmin_ImportDocuments_async + */ + async checkImportDocumentsProgress(name) { + const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest({ name }); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new this._gaxModule.Operation(operation, this.descriptors.longrunning.importDocuments, this._gaxModule.createDefaultBackoffSettings()); + return decodeOperation; + } + bulkDeleteDocuments(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.bulkDeleteDocuments(request, options, callback); + } + /** + * Check the status of the long running operation returned by `bulkDeleteDocuments()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.bulk_delete_documents.js + * region_tag:firestore_v1_generated_FirestoreAdmin_BulkDeleteDocuments_async + */ + async checkBulkDeleteDocumentsProgress(name) { + const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest({ name }); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new this._gaxModule.Operation(operation, this.descriptors.longrunning.bulkDeleteDocuments, this._gaxModule.createDefaultBackoffSettings()); + return decodeOperation; + } + createDatabase(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.createDatabase(request, options, callback); + } + /** + * Check the status of the long running operation returned by `createDatabase()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.create_database.js + * region_tag:firestore_v1_generated_FirestoreAdmin_CreateDatabase_async + */ + async checkCreateDatabaseProgress(name) { + const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest({ name }); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new this._gaxModule.Operation(operation, this.descriptors.longrunning.createDatabase, this._gaxModule.createDefaultBackoffSettings()); + return decodeOperation; + } + updateDatabase(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'database.name': (_a = request.database.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.updateDatabase(request, options, callback); + } + /** + * Check the status of the long running operation returned by `updateDatabase()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.update_database.js + * region_tag:firestore_v1_generated_FirestoreAdmin_UpdateDatabase_async + */ + async checkUpdateDatabaseProgress(name) { + const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest({ name }); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new this._gaxModule.Operation(operation, this.descriptors.longrunning.updateDatabase, this._gaxModule.createDefaultBackoffSettings()); + return decodeOperation; + } + deleteDatabase(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.deleteDatabase(request, options, callback); + } + /** + * Check the status of the long running operation returned by `deleteDatabase()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.delete_database.js + * region_tag:firestore_v1_generated_FirestoreAdmin_DeleteDatabase_async + */ + async checkDeleteDatabaseProgress(name) { + const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest({ name }); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new this._gaxModule.Operation(operation, this.descriptors.longrunning.deleteDatabase, this._gaxModule.createDefaultBackoffSettings()); + return decodeOperation; + } + restoreDatabase(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.restoreDatabase(request, options, callback); + } + /** + * Check the status of the long running operation returned by `restoreDatabase()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.restore_database.js + * region_tag:firestore_v1_generated_FirestoreAdmin_RestoreDatabase_async + */ + async checkRestoreDatabaseProgress(name) { + const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest({ name }); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new this._gaxModule.Operation(operation, this.descriptors.longrunning.restoreDatabase, this._gaxModule.createDefaultBackoffSettings()); + return decodeOperation; + } + listIndexes(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.listIndexes(request, options, callback); + } + /** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. A parent name of the form + * `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}` + * @param {string} request.filter + * The filter to apply to list results. + * @param {number} request.pageSize + * The number of results to return. + * @param {string} request.pageToken + * A page token, returned from a previous call to + * {@link protos.google.firestore.admin.v1.FirestoreAdmin.ListIndexes|FirestoreAdmin.ListIndexes}, + * that may be used to get the next page of results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing {@link protos.google.firestore.admin.v1.Index|Index} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listIndexesAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + */ + listIndexesStream(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['listIndexes']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listIndexes.createStream(this.innerApiCalls.listIndexes, request, callSettings); + } + /** + * Equivalent to `listIndexes`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. A parent name of the form + * `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}` + * @param {string} request.filter + * The filter to apply to list results. + * @param {number} request.pageSize + * The number of results to return. + * @param {string} request.pageToken + * A page token, returned from a previous call to + * {@link protos.google.firestore.admin.v1.FirestoreAdmin.ListIndexes|FirestoreAdmin.ListIndexes}, + * that may be used to get the next page of results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | async iteration }. + * When you iterate the returned iterable, each element will be an object representing + * {@link protos.google.firestore.admin.v1.Index|Index}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.list_indexes.js + * region_tag:firestore_v1_generated_FirestoreAdmin_ListIndexes_async + */ + listIndexesAsync(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['listIndexes']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listIndexes.asyncIterate(this.innerApiCalls['listIndexes'], request, callSettings); + } + listFields(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.listFields(request, options, callback); + } + /** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. A parent name of the form + * `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}` + * @param {string} request.filter + * The filter to apply to list results. Currently, + * {@link protos.google.firestore.admin.v1.FirestoreAdmin.ListFields|FirestoreAdmin.ListFields} + * only supports listing fields that have been explicitly overridden. To issue + * this query, call + * {@link protos.google.firestore.admin.v1.FirestoreAdmin.ListFields|FirestoreAdmin.ListFields} + * with a filter that includes `indexConfig.usesAncestorConfig:false` or + * `ttlConfig:*`. + * @param {number} request.pageSize + * The number of results to return. + * @param {string} request.pageToken + * A page token, returned from a previous call to + * {@link protos.google.firestore.admin.v1.FirestoreAdmin.ListFields|FirestoreAdmin.ListFields}, + * that may be used to get the next page of results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing {@link protos.google.firestore.admin.v1.Field|Field} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listFieldsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + */ + listFieldsStream(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['listFields']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listFields.createStream(this.innerApiCalls.listFields, request, callSettings); + } + /** + * Equivalent to `listFields`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. A parent name of the form + * `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}` + * @param {string} request.filter + * The filter to apply to list results. Currently, + * {@link protos.google.firestore.admin.v1.FirestoreAdmin.ListFields|FirestoreAdmin.ListFields} + * only supports listing fields that have been explicitly overridden. To issue + * this query, call + * {@link protos.google.firestore.admin.v1.FirestoreAdmin.ListFields|FirestoreAdmin.ListFields} + * with a filter that includes `indexConfig.usesAncestorConfig:false` or + * `ttlConfig:*`. + * @param {number} request.pageSize + * The number of results to return. + * @param {string} request.pageToken + * A page token, returned from a previous call to + * {@link protos.google.firestore.admin.v1.FirestoreAdmin.ListFields|FirestoreAdmin.ListFields}, + * that may be used to get the next page of results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | async iteration }. + * When you iterate the returned iterable, each element will be an object representing + * {@link protos.google.firestore.admin.v1.Field|Field}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore_admin.list_fields.js + * region_tag:firestore_v1_generated_FirestoreAdmin_ListFields_async + */ + listFieldsAsync(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['listFields']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listFields.asyncIterate(this.innerApiCalls['listFields'], request, callSettings); + } + /** + * Gets information about a location. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Resource name for the location. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html | CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link google.cloud.location.Location | Location}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + * @example + * ``` + * const [response] = await client.getLocation(request); + * ``` + */ + getLocation(request, options, callback) { + return this.locationsClient.getLocation(request, options, callback); + } + /** + * Lists information about the supported locations for this service. Returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * The resource that owns the locations collection, if applicable. + * @param {string} request.filter + * The standard list filter. + * @param {number} request.pageSize + * The standard list page size. + * @param {string} request.pageToken + * The standard list page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | async iteration }. + * When you iterate the returned iterable, each element will be an object representing + * {@link google.cloud.location.Location | Location}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + * @example + * ``` + * const iterable = client.listLocationsAsync(request); + * for await (const response of iterable) { + * // process response + * } + * ``` + */ + listLocationsAsync(request, options) { + return this.locationsClient.listLocationsAsync(request, options); + } + /** + * Gets the latest state of a long-running operation. Clients can use this + * method to poll the operation result at intervals as recommended by the API + * service. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation resource. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See {@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions | gax.CallOptions} + * for the details. + * @param {function(?Error, ?Object)=} callback + * The function which will be called with the result of the API call. + * + * The second parameter to the callback is an object representing + * {@link google.longrunning.Operation | google.longrunning.Operation}. + * @return {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * {@link google.longrunning.Operation | google.longrunning.Operation}. + * The promise has a method named "cancel" which cancels the ongoing API call. + * + * @example + * ``` + * const client = longrunning.operationsClient(); + * const name = ''; + * const [response] = await client.getOperation({name}); + * // doThingsWith(response) + * ``` + */ + getOperation(request, options, callback) { + return this.operationsClient.getOperation(request, options, callback); + } + /** + * Lists operations that match the specified filter in the request. If the + * server doesn't support this method, it returns `UNIMPLEMENTED`. Returns an iterable object. + * + * For-await-of syntax is used with the iterable to recursively get response element on-demand. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation collection. + * @param {string} request.filter - The standard list filter. + * @param {number=} request.pageSize - + * The maximum number of resources contained in the underlying API + * response. If page streaming is performed per-resource, this + * parameter does not affect the return value. If page streaming is + * performed per-page, this determines the maximum number of + * resources in a page. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See {@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions | gax.CallOptions} for the + * details. + * @returns {Object} + * An iterable Object that conforms to {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | iteration protocols}. + * + * @example + * ``` + * const client = longrunning.operationsClient(); + * for await (const response of client.listOperationsAsync(request)); + * // doThingsWith(response) + * ``` + */ + listOperationsAsync(request, options) { + return this.operationsClient.listOperationsAsync(request, options); + } + /** + * Starts asynchronous cancellation on a long-running operation. The server + * makes a best effort to cancel the operation, but success is not + * guaranteed. If the server doesn't support this method, it returns + * `google.rpc.Code.UNIMPLEMENTED`. Clients can use + * {@link Operations.GetOperation} or + * other methods to check whether the cancellation succeeded or whether the + * operation completed despite cancellation. On successful cancellation, + * the operation is not deleted; instead, it becomes an operation with + * an {@link Operation.error} value with a {@link google.rpc.Status.code} of + * 1, corresponding to `Code.CANCELLED`. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation resource to be cancelled. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See {@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions | gax.CallOptions} for the + * details. + * @param {function(?Error)=} callback + * The function which will be called with the result of the API call. + * @return {Promise} - The promise which resolves when API call finishes. + * The promise has a method named "cancel" which cancels the ongoing API + * call. + * + * @example + * ``` + * const client = longrunning.operationsClient(); + * await client.cancelOperation({name: ''}); + * ``` + */ + cancelOperation(request, options, callback) { + return this.operationsClient.cancelOperation(request, options, callback); + } + /** + * Deletes a long-running operation. This method indicates that the client is + * no longer interested in the operation result. It does not cancel the + * operation. If the server doesn't support this method, it returns + * `google.rpc.Code.UNIMPLEMENTED`. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation resource to be deleted. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See {@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions | gax.CallOptions} + * for the details. + * @param {function(?Error)=} callback + * The function which will be called with the result of the API call. + * @return {Promise} - The promise which resolves when API call finishes. + * The promise has a method named "cancel" which cancels the ongoing API + * call. + * + * @example + * ``` + * const client = longrunning.operationsClient(); + * await client.deleteOperation({name: ''}); + * ``` + */ + deleteOperation(request, options, callback) { + return this.operationsClient.deleteOperation(request, options, callback); + } + // -------------------- + // -- Path templates -- + // -------------------- + /** + * Return a fully-qualified backup resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} backup + * @returns {string} Resource name string. + */ + backupPath(project, location, backup) { + return this.pathTemplates.backupPathTemplate.render({ + project: project, + location: location, + backup: backup, + }); + } + /** + * Parse the project from Backup resource. + * + * @param {string} backupName + * A fully-qualified path representing Backup resource. + * @returns {string} A string representing the project. + */ + matchProjectFromBackupName(backupName) { + return this.pathTemplates.backupPathTemplate.match(backupName).project; + } + /** + * Parse the location from Backup resource. + * + * @param {string} backupName + * A fully-qualified path representing Backup resource. + * @returns {string} A string representing the location. + */ + matchLocationFromBackupName(backupName) { + return this.pathTemplates.backupPathTemplate.match(backupName).location; + } + /** + * Parse the backup from Backup resource. + * + * @param {string} backupName + * A fully-qualified path representing Backup resource. + * @returns {string} A string representing the backup. + */ + matchBackupFromBackupName(backupName) { + return this.pathTemplates.backupPathTemplate.match(backupName).backup; + } + /** + * Return a fully-qualified backupSchedule resource name string. + * + * @param {string} project + * @param {string} database + * @param {string} backup_schedule + * @returns {string} Resource name string. + */ + backupSchedulePath(project, database, backupSchedule) { + return this.pathTemplates.backupSchedulePathTemplate.render({ + project: project, + database: database, + backup_schedule: backupSchedule, + }); + } + /** + * Parse the project from BackupSchedule resource. + * + * @param {string} backupScheduleName + * A fully-qualified path representing BackupSchedule resource. + * @returns {string} A string representing the project. + */ + matchProjectFromBackupScheduleName(backupScheduleName) { + return this.pathTemplates.backupSchedulePathTemplate.match(backupScheduleName).project; + } + /** + * Parse the database from BackupSchedule resource. + * + * @param {string} backupScheduleName + * A fully-qualified path representing BackupSchedule resource. + * @returns {string} A string representing the database. + */ + matchDatabaseFromBackupScheduleName(backupScheduleName) { + return this.pathTemplates.backupSchedulePathTemplate.match(backupScheduleName).database; + } + /** + * Parse the backup_schedule from BackupSchedule resource. + * + * @param {string} backupScheduleName + * A fully-qualified path representing BackupSchedule resource. + * @returns {string} A string representing the backup_schedule. + */ + matchBackupScheduleFromBackupScheduleName(backupScheduleName) { + return this.pathTemplates.backupSchedulePathTemplate.match(backupScheduleName).backup_schedule; + } + /** + * Return a fully-qualified collectionGroup resource name string. + * + * @param {string} project + * @param {string} database + * @param {string} collection + * @returns {string} Resource name string. + */ + collectionGroupPath(project, database, collection) { + return this.pathTemplates.collectionGroupPathTemplate.render({ + project: project, + database: database, + collection: collection, + }); + } + /** + * Parse the project from CollectionGroup resource. + * + * @param {string} collectionGroupName + * A fully-qualified path representing CollectionGroup resource. + * @returns {string} A string representing the project. + */ + matchProjectFromCollectionGroupName(collectionGroupName) { + return this.pathTemplates.collectionGroupPathTemplate.match(collectionGroupName).project; + } + /** + * Parse the database from CollectionGroup resource. + * + * @param {string} collectionGroupName + * A fully-qualified path representing CollectionGroup resource. + * @returns {string} A string representing the database. + */ + matchDatabaseFromCollectionGroupName(collectionGroupName) { + return this.pathTemplates.collectionGroupPathTemplate.match(collectionGroupName).database; + } + /** + * Parse the collection from CollectionGroup resource. + * + * @param {string} collectionGroupName + * A fully-qualified path representing CollectionGroup resource. + * @returns {string} A string representing the collection. + */ + matchCollectionFromCollectionGroupName(collectionGroupName) { + return this.pathTemplates.collectionGroupPathTemplate.match(collectionGroupName).collection; + } + /** + * Return a fully-qualified database resource name string. + * + * @param {string} project + * @param {string} database + * @returns {string} Resource name string. + */ + databasePath(project, database) { + return this.pathTemplates.databasePathTemplate.render({ + project: project, + database: database, + }); + } + /** + * Parse the project from Database resource. + * + * @param {string} databaseName + * A fully-qualified path representing Database resource. + * @returns {string} A string representing the project. + */ + matchProjectFromDatabaseName(databaseName) { + return this.pathTemplates.databasePathTemplate.match(databaseName).project; + } + /** + * Parse the database from Database resource. + * + * @param {string} databaseName + * A fully-qualified path representing Database resource. + * @returns {string} A string representing the database. + */ + matchDatabaseFromDatabaseName(databaseName) { + return this.pathTemplates.databasePathTemplate.match(databaseName).database; + } + /** + * Return a fully-qualified field resource name string. + * + * @param {string} project + * @param {string} database + * @param {string} collection + * @param {string} field + * @returns {string} Resource name string. + */ + fieldPath(project, database, collection, field) { + return this.pathTemplates.fieldPathTemplate.render({ + project: project, + database: database, + collection: collection, + field: field, + }); + } + /** + * Parse the project from Field resource. + * + * @param {string} fieldName + * A fully-qualified path representing Field resource. + * @returns {string} A string representing the project. + */ + matchProjectFromFieldName(fieldName) { + return this.pathTemplates.fieldPathTemplate.match(fieldName).project; + } + /** + * Parse the database from Field resource. + * + * @param {string} fieldName + * A fully-qualified path representing Field resource. + * @returns {string} A string representing the database. + */ + matchDatabaseFromFieldName(fieldName) { + return this.pathTemplates.fieldPathTemplate.match(fieldName).database; + } + /** + * Parse the collection from Field resource. + * + * @param {string} fieldName + * A fully-qualified path representing Field resource. + * @returns {string} A string representing the collection. + */ + matchCollectionFromFieldName(fieldName) { + return this.pathTemplates.fieldPathTemplate.match(fieldName).collection; + } + /** + * Parse the field from Field resource. + * + * @param {string} fieldName + * A fully-qualified path representing Field resource. + * @returns {string} A string representing the field. + */ + matchFieldFromFieldName(fieldName) { + return this.pathTemplates.fieldPathTemplate.match(fieldName).field; + } + /** + * Return a fully-qualified index resource name string. + * + * @param {string} project + * @param {string} database + * @param {string} collection + * @param {string} index + * @returns {string} Resource name string. + */ + indexPath(project, database, collection, index) { + return this.pathTemplates.indexPathTemplate.render({ + project: project, + database: database, + collection: collection, + index: index, + }); + } + /** + * Parse the project from Index resource. + * + * @param {string} indexName + * A fully-qualified path representing Index resource. + * @returns {string} A string representing the project. + */ + matchProjectFromIndexName(indexName) { + return this.pathTemplates.indexPathTemplate.match(indexName).project; + } + /** + * Parse the database from Index resource. + * + * @param {string} indexName + * A fully-qualified path representing Index resource. + * @returns {string} A string representing the database. + */ + matchDatabaseFromIndexName(indexName) { + return this.pathTemplates.indexPathTemplate.match(indexName).database; + } + /** + * Parse the collection from Index resource. + * + * @param {string} indexName + * A fully-qualified path representing Index resource. + * @returns {string} A string representing the collection. + */ + matchCollectionFromIndexName(indexName) { + return this.pathTemplates.indexPathTemplate.match(indexName).collection; + } + /** + * Parse the index from Index resource. + * + * @param {string} indexName + * A fully-qualified path representing Index resource. + * @returns {string} A string representing the index. + */ + matchIndexFromIndexName(indexName) { + return this.pathTemplates.indexPathTemplate.match(indexName).index; + } + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project, location) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + /** + * Return a fully-qualified project resource name string. + * + * @param {string} project + * @returns {string} Resource name string. + */ + projectPath(project) { + return this.pathTemplates.projectPathTemplate.render({ + project: project, + }); + } + /** + * Parse the project from Project resource. + * + * @param {string} projectName + * A fully-qualified path representing Project resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectName(projectName) { + return this.pathTemplates.projectPathTemplate.match(projectName).project; + } + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close() { + if (this.firestoreAdminStub && !this._terminated) { + return this.firestoreAdminStub.then(stub => { + this._terminated = true; + stub.close(); + this.locationsClient.close(); + this.operationsClient.close(); + }); + } + return Promise.resolve(); + } +} +exports.FirestoreAdminClient = FirestoreAdminClient; +//# sourceMappingURL=firestore_admin_client.js.map + +/***/ }), + +/***/ 77454: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FirestoreClient = void 0; +const stream_1 = __nccwpck_require__(12781); +const jsonProtos = __nccwpck_require__(85143); +/** + * Client JSON configuration object, loaded from + * `src/v1/firestore_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +const gapicConfig = __nccwpck_require__(89671); +const version = (__nccwpck_require__(49830)/* .version */ .i8); +/** + * The Cloud Firestore service. + * + * Cloud Firestore is a fast, fully managed, serverless, cloud-native NoSQL + * document database that simplifies storing, syncing, and querying data for + * your mobile, web, and IoT apps at global scale. Its client libraries provide + * live synchronization and offline support, while its security features and + * integrations with Firebase and Google Cloud Platform accelerate building + * truly serverless apps. + * @class + * @memberof v1 + */ +class FirestoreClient { + /** + * Construct an instance of FirestoreClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP/1.1 REST mode. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new FirestoreClient({fallback: true}, gax); + * ``` + */ + constructor(opts, gaxInstance) { + var _a, _b, _c, _d, _e; + this._terminated = false; + this.descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + // Ensure that options include all the required fields. + const staticMembers = this.constructor; + if ((opts === null || opts === void 0 ? void 0 : opts.universe_domain) && + (opts === null || opts === void 0 ? void 0 : opts.universeDomain) && + (opts === null || opts === void 0 ? void 0 : opts.universe_domain) !== (opts === null || opts === void 0 ? void 0 : opts.universeDomain)) { + throw new Error('Please set either universe_domain or universeDomain, but not both.'); + } + const universeDomainEnvVar = typeof process === 'object' && typeof process.env === 'object' + ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] + : undefined; + this._universeDomain = + (_c = (_b = (_a = opts === null || opts === void 0 ? void 0 : opts.universeDomain) !== null && _a !== void 0 ? _a : opts === null || opts === void 0 ? void 0 : opts.universe_domain) !== null && _b !== void 0 ? _b : universeDomainEnvVar) !== null && _c !== void 0 ? _c : 'googleapis.com'; + this._servicePath = 'firestore.' + this._universeDomain; + const servicePath = (opts === null || opts === void 0 ? void 0 : opts.servicePath) || (opts === null || opts === void 0 ? void 0 : opts.apiEndpoint) || this._servicePath; + this._providedCustomServicePath = !!((opts === null || opts === void 0 ? void 0 : opts.servicePath) || (opts === null || opts === void 0 ? void 0 : opts.apiEndpoint)); + const port = (opts === null || opts === void 0 ? void 0 : opts.port) || staticMembers.port; + const clientConfig = (_d = opts === null || opts === void 0 ? void 0 : opts.clientConfig) !== null && _d !== void 0 ? _d : {}; + const fallback = (_e = opts === null || opts === void 0 ? void 0 : opts.fallback) !== null && _e !== void 0 ? _e : (typeof window !== 'undefined' && typeof (window === null || window === void 0 ? void 0 : window.fetch) === 'function'); + opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts); + // Request numeric enum values if REST transport is used. + opts.numericEnums = true; + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== this._servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = __nccwpck_require__(12263); + } + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + // Save options to use in initialize() method. + this._opts = opts; + // Save the auth object to the client, for use by other methods. + this.auth = this._gaxGrpc.auth; + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = this._servicePath; + // Set the default scopes in auth client if needed. + if (servicePath === this._servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + this.locationsClient = new this._gaxModule.LocationsClient(this._gaxGrpc, opts); + // Determine the client header string. + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; + if (typeof process === 'object' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } + else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } + else { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listDocuments: new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'documents'), + partitionQuery: new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'partitions'), + listCollectionIds: new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'collectionIds'), + }; + // Some of the methods on this service provide streaming responses. + // Provide descriptors for these. + this.descriptors.stream = { + batchGetDocuments: new this._gaxModule.StreamDescriptor(this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, !!opts.gaxServerStreamingRetries), + runQuery: new this._gaxModule.StreamDescriptor(this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, !!opts.gaxServerStreamingRetries), + runAggregationQuery: new this._gaxModule.StreamDescriptor(this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, !!opts.gaxServerStreamingRetries), + write: new this._gaxModule.StreamDescriptor(this._gaxModule.StreamType.BIDI_STREAMING, !!opts.fallback, !!opts.gaxServerStreamingRetries), + listen: new this._gaxModule.StreamDescriptor(this._gaxModule.StreamType.BIDI_STREAMING, !!opts.fallback, !!opts.gaxServerStreamingRetries), + }; + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings('google.firestore.v1.Firestore', gapicConfig, opts.clientConfig || {}, { 'x-goog-api-client': clientHeader.join(' ') }); + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.firestoreStub) { + return this.firestoreStub; + } + // Put together the "service stub" for + // google.firestore.v1.Firestore. + this.firestoreStub = this._gaxGrpc.createStub(this._opts.fallback + ? this._protos.lookupService('google.firestore.v1.Firestore') + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + this._protos.google.firestore.v1.Firestore, this._opts, this._providedCustomServicePath); + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const firestoreStubMethods = [ + 'getDocument', + 'listDocuments', + 'updateDocument', + 'deleteDocument', + 'batchGetDocuments', + 'beginTransaction', + 'commit', + 'rollback', + 'runQuery', + 'runAggregationQuery', + 'partitionQuery', + 'write', + 'listen', + 'listCollectionIds', + 'batchWrite', + 'createDocument', + ]; + for (const methodName of firestoreStubMethods) { + const callPromise = this.firestoreStub.then(stub => (...args) => { + if (this._terminated) { + if (methodName in this.descriptors.stream) { + const stream = new stream_1.PassThrough(); + setImmediate(() => { + stream.emit('error', new this._gaxModule.GoogleError('The client has already been closed.')); + }); + return stream; + } + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, (err) => () => { + throw err; + }); + const descriptor = this.descriptors.page[methodName] || + this.descriptors.stream[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall(callPromise, this._defaults[methodName], descriptor, this._opts.fallback); + this.innerApiCalls[methodName] = apiCall; + } + return this.firestoreStub; + } + /** + * The DNS address for this API service. + * @deprecated Use the apiEndpoint method of the client instance. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + if (typeof process === 'object' && + typeof process.emitWarning === 'function') { + process.emitWarning('Static servicePath is deprecated, please use the instance method instead.', 'DeprecationWarning'); + } + return 'firestore.googleapis.com'; + } + /** + * The DNS address for this API service - same as servicePath. + * @deprecated Use the apiEndpoint method of the client instance. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + if (typeof process === 'object' && + typeof process.emitWarning === 'function') { + process.emitWarning('Static apiEndpoint is deprecated, please use the instance method instead.', 'DeprecationWarning'); + } + return 'firestore.googleapis.com'; + } + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + get apiEndpoint() { + return this._servicePath; + } + get universeDomain() { + return this._universeDomain; + } + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', + ]; + } + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback) { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + getDocument(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.getDocument(request, options, callback); + } + updateDocument(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'document.name': (_a = request.document.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.updateDocument(request, options, callback); + } + deleteDocument(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.deleteDocument(request, options, callback); + } + beginTransaction(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + database: (_a = request.database) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.beginTransaction(request, options, callback); + } + commit(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + database: (_a = request.database) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.commit(request, options, callback); + } + rollback(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + database: (_a = request.database) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.rollback(request, options, callback); + } + batchWrite(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + database: (_a = request.database) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.batchWrite(request, options, callback); + } + createDocument(request, optionsOrCallback, callback) { + var _a, _b; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + collection_id: (_b = request.collectionId) !== null && _b !== void 0 ? _b : '', + }); + this.initialize(); + return this.innerApiCalls.createDocument(request, options, callback); + } + /** + * Gets multiple documents. + * + * Documents returned by this method are not guaranteed to be returned in the + * same order that they were requested. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.database + * Required. The database name. In the format: + * `projects/{project_id}/databases/{database_id}`. + * @param {string[]} request.documents + * The names of the documents to retrieve. In the format: + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * The request will fail if any of the document is not a child resource of the + * given `database`. Duplicate names will be elided. + * @param {google.firestore.v1.DocumentMask} request.mask + * The fields to return. If not set, returns all fields. + * + * If a document has a field that is not present in this mask, that field will + * not be returned in the response. + * @param {Buffer} request.transaction + * Reads documents in a transaction. + * @param {google.firestore.v1.TransactionOptions} request.newTransaction + * Starts a new transaction and reads the documents. + * Defaults to a read-only transaction. + * The new transaction ID will be returned as the first response in the + * stream. + * @param {google.protobuf.Timestamp} request.readTime + * Reads documents as they were at the given time. + * + * This must be a microsecond precision timestamp within the past one hour, + * or if Point-in-Time Recovery is enabled, can additionally be a whole + * minute timestamp within the past 7 days. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits {@link protos.google.firestore.v1.BatchGetDocumentsResponse|BatchGetDocumentsResponse} on 'data' event. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore.batch_get_documents.js + * region_tag:firestore_v1_generated_Firestore_BatchGetDocuments_async + */ + batchGetDocuments(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + database: (_a = request.database) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.batchGetDocuments(request, options); + } + /** + * Runs a query. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents` or + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * For example: + * `projects/my-project/databases/my-database/documents` or + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {google.firestore.v1.StructuredQuery} request.structuredQuery + * A structured query. + * @param {Buffer} request.transaction + * Run the query within an already active transaction. + * + * The value here is the opaque transaction ID to execute the query in. + * @param {google.firestore.v1.TransactionOptions} request.newTransaction + * Starts a new transaction and reads the documents. + * Defaults to a read-only transaction. + * The new transaction ID will be returned as the first response in the + * stream. + * @param {google.protobuf.Timestamp} request.readTime + * Reads documents as they were at the given time. + * + * This must be a microsecond precision timestamp within the past one hour, + * or if Point-in-Time Recovery is enabled, can additionally be a whole + * minute timestamp within the past 7 days. + * @param {google.firestore.v1.ExplainOptions} [request.explainOptions] + * Optional. Explain options for the query. If set, additional query + * statistics will be returned. If not, only query results will be returned. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits {@link protos.google.firestore.v1.RunQueryResponse|RunQueryResponse} on 'data' event. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore.run_query.js + * region_tag:firestore_v1_generated_Firestore_RunQuery_async + */ + runQuery(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.runQuery(request, options); + } + /** + * Runs an aggregation query. + * + * Rather than producing {@link protos.google.firestore.v1.Document|Document} results like + * {@link protos.google.firestore.v1.Firestore.RunQuery|Firestore.RunQuery}, this API + * allows running an aggregation to produce a series of + * {@link protos.google.firestore.v1.AggregationResult|AggregationResult} server-side. + * + * High-Level Example: + * + * ``` + * -- Return the number of documents in table given a filter. + * SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); + * ``` + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents` or + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * For example: + * `projects/my-project/databases/my-database/documents` or + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {google.firestore.v1.StructuredAggregationQuery} request.structuredAggregationQuery + * An aggregation query. + * @param {Buffer} request.transaction + * Run the aggregation within an already active transaction. + * + * The value here is the opaque transaction ID to execute the query in. + * @param {google.firestore.v1.TransactionOptions} request.newTransaction + * Starts a new transaction as part of the query, defaulting to read-only. + * + * The new transaction ID will be returned as the first response in the + * stream. + * @param {google.protobuf.Timestamp} request.readTime + * Executes the query at the given timestamp. + * + * This must be a microsecond precision timestamp within the past one hour, + * or if Point-in-Time Recovery is enabled, can additionally be a whole + * minute timestamp within the past 7 days. + * @param {google.firestore.v1.ExplainOptions} [request.explainOptions] + * Optional. Explain options for the query. If set, additional query + * statistics will be returned. If not, only query results will be returned. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits {@link protos.google.firestore.v1.RunAggregationQueryResponse|RunAggregationQueryResponse} on 'data' event. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore.run_aggregation_query.js + * region_tag:firestore_v1_generated_Firestore_RunAggregationQuery_async + */ + runAggregationQuery(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.runAggregationQuery(request, options); + } + /** + * Streams batches of document updates and deletes, in order. This method is + * only available via gRPC or WebChannel (not REST). + * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which is both readable and writable. It accepts objects + * representing {@link protos.google.firestore.v1.WriteRequest|WriteRequest} for write() method, and + * will emit objects representing {@link protos.google.firestore.v1.WriteResponse|WriteResponse} on 'data' event asynchronously. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore.write.js + * region_tag:firestore_v1_generated_Firestore_Write_async + */ + write(options) { + this.initialize(); + return this.innerApiCalls.write(null, options); + } + /** + * Listens to changes. This method is only available via gRPC or WebChannel + * (not REST). + * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which is both readable and writable. It accepts objects + * representing {@link protos.google.firestore.v1.ListenRequest|ListenRequest} for write() method, and + * will emit objects representing {@link protos.google.firestore.v1.ListenResponse|ListenResponse} on 'data' event asynchronously. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore.listen.js + * region_tag:firestore_v1_generated_Firestore_Listen_async + */ + listen(options) { + this.initialize(); + return this.innerApiCalls.listen(null, options); + } + listDocuments(request, optionsOrCallback, callback) { + var _a, _b; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + collection_id: (_b = request.collectionId) !== null && _b !== void 0 ? _b : '', + }); + this.initialize(); + return this.innerApiCalls.listDocuments(request, options, callback); + } + /** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents` or + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * + * For example: + * `projects/my-project/databases/my-database/documents` or + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {string} [request.collectionId] + * Optional. The collection ID, relative to `parent`, to list. + * + * For example: `chatrooms` or `messages`. + * + * This is optional, and when not provided, Firestore will list documents + * from all collections under the provided `parent`. + * @param {number} [request.pageSize] + * Optional. The maximum number of documents to return in a single response. + * + * Firestore may return fewer than this value. + * @param {string} [request.pageToken] + * Optional. A page token, received from a previous `ListDocuments` response. + * + * Provide this to retrieve the subsequent page. When paginating, all other + * parameters (with the exception of `page_size`) must match the values set + * in the request that generated the page token. + * @param {string} [request.orderBy] + * Optional. The optional ordering of the documents to return. + * + * For example: `priority desc, __name__ desc`. + * + * This mirrors the {@link protos.google.firestore.v1.StructuredQuery.order_by|`ORDER BY`} + * used in Firestore queries but in a string representation. When absent, + * documents are ordered based on `__name__ ASC`. + * @param {google.firestore.v1.DocumentMask} [request.mask] + * Optional. The fields to return. If not set, returns all fields. + * + * If a document has a field that is not present in this mask, that field + * will not be returned in the response. + * @param {Buffer} request.transaction + * Perform the read as part of an already active transaction. + * @param {google.protobuf.Timestamp} request.readTime + * Perform the read at the provided time. + * + * This must be a microsecond precision timestamp within the past one hour, + * or if Point-in-Time Recovery is enabled, can additionally be a whole + * minute timestamp within the past 7 days. + * @param {boolean} request.showMissing + * If the list should show missing documents. + * + * A document is missing if it does not exist, but there are sub-documents + * nested underneath it. When true, such missing documents will be returned + * with a key but will not have fields, + * {@link protos.google.firestore.v1.Document.create_time|`create_time`}, or + * {@link protos.google.firestore.v1.Document.update_time|`update_time`} set. + * + * Requests with `show_missing` may not specify `where` or `order_by`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing {@link protos.google.firestore.v1.Document|Document} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listDocumentsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + */ + listDocumentsStream(request, options) { + var _a, _b; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + collection_id: (_b = request.collectionId) !== null && _b !== void 0 ? _b : '', + }); + const defaultCallSettings = this._defaults['listDocuments']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listDocuments.createStream(this.innerApiCalls.listDocuments, request, callSettings); + } + /** + * Equivalent to `listDocuments`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents` or + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * + * For example: + * `projects/my-project/databases/my-database/documents` or + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {string} [request.collectionId] + * Optional. The collection ID, relative to `parent`, to list. + * + * For example: `chatrooms` or `messages`. + * + * This is optional, and when not provided, Firestore will list documents + * from all collections under the provided `parent`. + * @param {number} [request.pageSize] + * Optional. The maximum number of documents to return in a single response. + * + * Firestore may return fewer than this value. + * @param {string} [request.pageToken] + * Optional. A page token, received from a previous `ListDocuments` response. + * + * Provide this to retrieve the subsequent page. When paginating, all other + * parameters (with the exception of `page_size`) must match the values set + * in the request that generated the page token. + * @param {string} [request.orderBy] + * Optional. The optional ordering of the documents to return. + * + * For example: `priority desc, __name__ desc`. + * + * This mirrors the {@link protos.google.firestore.v1.StructuredQuery.order_by|`ORDER BY`} + * used in Firestore queries but in a string representation. When absent, + * documents are ordered based on `__name__ ASC`. + * @param {google.firestore.v1.DocumentMask} [request.mask] + * Optional. The fields to return. If not set, returns all fields. + * + * If a document has a field that is not present in this mask, that field + * will not be returned in the response. + * @param {Buffer} request.transaction + * Perform the read as part of an already active transaction. + * @param {google.protobuf.Timestamp} request.readTime + * Perform the read at the provided time. + * + * This must be a microsecond precision timestamp within the past one hour, + * or if Point-in-Time Recovery is enabled, can additionally be a whole + * minute timestamp within the past 7 days. + * @param {boolean} request.showMissing + * If the list should show missing documents. + * + * A document is missing if it does not exist, but there are sub-documents + * nested underneath it. When true, such missing documents will be returned + * with a key but will not have fields, + * {@link protos.google.firestore.v1.Document.create_time|`create_time`}, or + * {@link protos.google.firestore.v1.Document.update_time|`update_time`} set. + * + * Requests with `show_missing` may not specify `where` or `order_by`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | async iteration }. + * When you iterate the returned iterable, each element will be an object representing + * {@link protos.google.firestore.v1.Document|Document}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore.list_documents.js + * region_tag:firestore_v1_generated_Firestore_ListDocuments_async + */ + listDocumentsAsync(request, options) { + var _a, _b; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + collection_id: (_b = request.collectionId) !== null && _b !== void 0 ? _b : '', + }); + const defaultCallSettings = this._defaults['listDocuments']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listDocuments.asyncIterate(this.innerApiCalls['listDocuments'], request, callSettings); + } + partitionQuery(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.partitionQuery(request, options, callback); + } + /** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents`. + * Document resource names are not supported; only database resource names + * can be specified. + * @param {google.firestore.v1.StructuredQuery} request.structuredQuery + * A structured query. + * Query must specify collection with all descendants and be ordered by name + * ascending. Other filters, order bys, limits, offsets, and start/end + * cursors are not supported. + * @param {number} request.partitionCount + * The desired maximum number of partition points. + * The partitions may be returned across multiple pages of results. + * The number must be positive. The actual number of partitions + * returned may be fewer. + * + * For example, this may be set to one fewer than the number of parallel + * queries to be run, or in running a data pipeline job, one fewer than the + * number of workers or compute instances available. + * @param {string} request.pageToken + * The `next_page_token` value returned from a previous call to + * PartitionQuery that may be used to get an additional set of results. + * There are no ordering guarantees between sets of results. Thus, using + * multiple sets of results will require merging the different result sets. + * + * For example, two subsequent calls using a page_token may return: + * + * * cursor B, cursor M, cursor Q + * * cursor A, cursor U, cursor W + * + * To obtain a complete result set ordered with respect to the results of the + * query supplied to PartitionQuery, the results sets should be merged: + * cursor A, cursor B, cursor M, cursor Q, cursor U, cursor W + * @param {number} request.pageSize + * The maximum number of partitions to return in this call, subject to + * `partition_count`. + * + * For example, if `partition_count` = 10 and `page_size` = 8, the first call + * to PartitionQuery will return up to 8 partitions and a `next_page_token` + * if more results exist. A second call to PartitionQuery will return up to + * 2 partitions, to complete the total of 10 specified in `partition_count`. + * @param {google.protobuf.Timestamp} request.readTime + * Reads documents as they were at the given time. + * + * This must be a microsecond precision timestamp within the past one hour, + * or if Point-in-Time Recovery is enabled, can additionally be a whole + * minute timestamp within the past 7 days. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing {@link protos.google.firestore.v1.Cursor|Cursor} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `partitionQueryAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + */ + partitionQueryStream(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['partitionQuery']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.partitionQuery.createStream(this.innerApiCalls.partitionQuery, request, callSettings); + } + /** + * Equivalent to `partitionQuery`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents`. + * Document resource names are not supported; only database resource names + * can be specified. + * @param {google.firestore.v1.StructuredQuery} request.structuredQuery + * A structured query. + * Query must specify collection with all descendants and be ordered by name + * ascending. Other filters, order bys, limits, offsets, and start/end + * cursors are not supported. + * @param {number} request.partitionCount + * The desired maximum number of partition points. + * The partitions may be returned across multiple pages of results. + * The number must be positive. The actual number of partitions + * returned may be fewer. + * + * For example, this may be set to one fewer than the number of parallel + * queries to be run, or in running a data pipeline job, one fewer than the + * number of workers or compute instances available. + * @param {string} request.pageToken + * The `next_page_token` value returned from a previous call to + * PartitionQuery that may be used to get an additional set of results. + * There are no ordering guarantees between sets of results. Thus, using + * multiple sets of results will require merging the different result sets. + * + * For example, two subsequent calls using a page_token may return: + * + * * cursor B, cursor M, cursor Q + * * cursor A, cursor U, cursor W + * + * To obtain a complete result set ordered with respect to the results of the + * query supplied to PartitionQuery, the results sets should be merged: + * cursor A, cursor B, cursor M, cursor Q, cursor U, cursor W + * @param {number} request.pageSize + * The maximum number of partitions to return in this call, subject to + * `partition_count`. + * + * For example, if `partition_count` = 10 and `page_size` = 8, the first call + * to PartitionQuery will return up to 8 partitions and a `next_page_token` + * if more results exist. A second call to PartitionQuery will return up to + * 2 partitions, to complete the total of 10 specified in `partition_count`. + * @param {google.protobuf.Timestamp} request.readTime + * Reads documents as they were at the given time. + * + * This must be a microsecond precision timestamp within the past one hour, + * or if Point-in-Time Recovery is enabled, can additionally be a whole + * minute timestamp within the past 7 days. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | async iteration }. + * When you iterate the returned iterable, each element will be an object representing + * {@link protos.google.firestore.v1.Cursor|Cursor}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore.partition_query.js + * region_tag:firestore_v1_generated_Firestore_PartitionQuery_async + */ + partitionQueryAsync(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['partitionQuery']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.partitionQuery.asyncIterate(this.innerApiCalls['partitionQuery'], request, callSettings); + } + listCollectionIds(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.listCollectionIds(request, options, callback); + } + /** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent document. In the format: + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * For example: + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {number} request.pageSize + * The maximum number of results to return. + * @param {string} request.pageToken + * A page token. Must be a value from + * {@link protos.google.firestore.v1.ListCollectionIdsResponse|ListCollectionIdsResponse}. + * @param {google.protobuf.Timestamp} request.readTime + * Reads documents as they were at the given time. + * + * This must be a microsecond precision timestamp within the past one hour, + * or if Point-in-Time Recovery is enabled, can additionally be a whole + * minute timestamp within the past 7 days. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing string on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listCollectionIdsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + */ + listCollectionIdsStream(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['listCollectionIds']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listCollectionIds.createStream(this.innerApiCalls.listCollectionIds, request, callSettings); + } + /** + * Equivalent to `listCollectionIds`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent document. In the format: + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * For example: + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {number} request.pageSize + * The maximum number of results to return. + * @param {string} request.pageToken + * A page token. Must be a value from + * {@link protos.google.firestore.v1.ListCollectionIdsResponse|ListCollectionIdsResponse}. + * @param {google.protobuf.Timestamp} request.readTime + * Reads documents as they were at the given time. + * + * This must be a microsecond precision timestamp within the past one hour, + * or if Point-in-Time Recovery is enabled, can additionally be a whole + * minute timestamp within the past 7 days. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | async iteration }. + * When you iterate the returned iterable, each element will be an object representing + * string. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + * @example include:samples/generated/v1/firestore.list_collection_ids.js + * region_tag:firestore_v1_generated_Firestore_ListCollectionIds_async + */ + listCollectionIdsAsync(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['listCollectionIds']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listCollectionIds.asyncIterate(this.innerApiCalls['listCollectionIds'], request, callSettings); + } + /** + * Gets information about a location. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Resource name for the location. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html | CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link google.cloud.location.Location | Location}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + * @example + * ``` + * const [response] = await client.getLocation(request); + * ``` + */ + getLocation(request, options, callback) { + return this.locationsClient.getLocation(request, options, callback); + } + /** + * Lists information about the supported locations for this service. Returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * The resource that owns the locations collection, if applicable. + * @param {string} request.filter + * The standard list filter. + * @param {number} request.pageSize + * The standard list page size. + * @param {string} request.pageToken + * The standard list page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | async iteration }. + * When you iterate the returned iterable, each element will be an object representing + * {@link google.cloud.location.Location | Location}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + * @example + * ``` + * const iterable = client.listLocationsAsync(request); + * for await (const response of iterable) { + * // process response + * } + * ``` + */ + listLocationsAsync(request, options) { + return this.locationsClient.listLocationsAsync(request, options); + } + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close() { + if (this.firestoreStub && !this._terminated) { + return this.firestoreStub.then(stub => { + this._terminated = true; + stub.close(); + this.locationsClient.close(); + }); + } + return Promise.resolve(); + } +} +exports.FirestoreClient = FirestoreClient; +//# sourceMappingURL=firestore_client.js.map + +/***/ }), + +/***/ 31644: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FirestoreAdminClient = exports.FirestoreClient = void 0; +const firestore_admin_client_1 = __nccwpck_require__(34184); +Object.defineProperty(exports, "FirestoreAdminClient", ({ enumerable: true, get: function () { return firestore_admin_client_1.FirestoreAdminClient; } })); +const firestore_client_1 = __nccwpck_require__(77454); +Object.defineProperty(exports, "FirestoreClient", ({ enumerable: true, get: function () { return firestore_client_1.FirestoreClient; } })); +// Doing something really horrible for reverse compatibility with original JavaScript exports +const existingExports = module.exports; +module.exports = firestore_client_1.FirestoreClient; +module.exports = Object.assign(module.exports, existingExports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 55485: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FirestoreClient = void 0; +const stream_1 = __nccwpck_require__(12781); +const jsonProtos = __nccwpck_require__(88689); +/** + * Client JSON configuration object, loaded from + * `src/v1beta1/firestore_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +const gapicConfig = __nccwpck_require__(10306); +// tslint:disable deprecation +const version = (__nccwpck_require__(49830)/* .version */ .i8); +/** + * The Cloud Firestore service. + * + * Cloud Firestore is a fast, fully managed, serverless, cloud-native NoSQL + * document database that simplifies storing, syncing, and querying data for + * your mobile, web, and IoT apps at global scale. Its client libraries provide + * live synchronization and offline support, while its security features and + * integrations with Firebase and Google Cloud Platform (GCP) accelerate + * building truly serverless apps. + * @class + * @deprecated Use v1/firestore_client instead. + * @memberof v1beta1 + */ +class FirestoreClient { + /** + * Construct an instance of FirestoreClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP/1.1 REST mode. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new FirestoreClient({fallback: true}, gax); + * ``` + */ + constructor(opts, gaxInstance) { + var _a, _b, _c, _d, _e; + this._terminated = false; + this.descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + // Ensure that options include all the required fields. + const staticMembers = this.constructor; + if ((opts === null || opts === void 0 ? void 0 : opts.universe_domain) && + (opts === null || opts === void 0 ? void 0 : opts.universeDomain) && + (opts === null || opts === void 0 ? void 0 : opts.universe_domain) !== (opts === null || opts === void 0 ? void 0 : opts.universeDomain)) { + throw new Error('Please set either universe_domain or universeDomain, but not both.'); + } + const universeDomainEnvVar = typeof process === 'object' && typeof process.env === 'object' + ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] + : undefined; + this._universeDomain = + (_c = (_b = (_a = opts === null || opts === void 0 ? void 0 : opts.universeDomain) !== null && _a !== void 0 ? _a : opts === null || opts === void 0 ? void 0 : opts.universe_domain) !== null && _b !== void 0 ? _b : universeDomainEnvVar) !== null && _c !== void 0 ? _c : 'googleapis.com'; + this._servicePath = 'firestore.' + this._universeDomain; + const servicePath = (opts === null || opts === void 0 ? void 0 : opts.servicePath) || (opts === null || opts === void 0 ? void 0 : opts.apiEndpoint) || this._servicePath; + this._providedCustomServicePath = !!((opts === null || opts === void 0 ? void 0 : opts.servicePath) || (opts === null || opts === void 0 ? void 0 : opts.apiEndpoint)); + const port = (opts === null || opts === void 0 ? void 0 : opts.port) || staticMembers.port; + const clientConfig = (_d = opts === null || opts === void 0 ? void 0 : opts.clientConfig) !== null && _d !== void 0 ? _d : {}; + const fallback = (_e = opts === null || opts === void 0 ? void 0 : opts.fallback) !== null && _e !== void 0 ? _e : (typeof window !== 'undefined' && typeof (window === null || window === void 0 ? void 0 : window.fetch) === 'function'); + opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts); + // Request numeric enum values if REST transport is used. + opts.numericEnums = true; + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== this._servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = __nccwpck_require__(12263); + } + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + // Save options to use in initialize() method. + this._opts = opts; + // Save the auth object to the client, for use by other methods. + this.auth = this._gaxGrpc.auth; + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = this._servicePath; + // Set the default scopes in auth client if needed. + if (servicePath === this._servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + // Determine the client header string. + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; + if (typeof process === 'object' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } + else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } + else { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listDocuments: new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'documents'), + partitionQuery: new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'partitions'), + listCollectionIds: new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'collectionIds'), + }; + // Some of the methods on this service provide streaming responses. + // Provide descriptors for these. + this.descriptors.stream = { + batchGetDocuments: new this._gaxModule.StreamDescriptor(this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, !!opts.gaxServerStreamingRetries), + runQuery: new this._gaxModule.StreamDescriptor(this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, !!opts.gaxServerStreamingRetries), + write: new this._gaxModule.StreamDescriptor(this._gaxModule.StreamType.BIDI_STREAMING, !!opts.fallback, !!opts.gaxServerStreamingRetries), + listen: new this._gaxModule.StreamDescriptor(this._gaxModule.StreamType.BIDI_STREAMING, !!opts.fallback, !!opts.gaxServerStreamingRetries), + }; + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings('google.firestore.v1beta1.Firestore', gapicConfig, opts.clientConfig || {}, { 'x-goog-api-client': clientHeader.join(' ') }); + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.firestoreStub) { + return this.firestoreStub; + } + // Put together the "service stub" for + // google.firestore.v1beta1.Firestore. + this.firestoreStub = this._gaxGrpc.createStub(this._opts.fallback + ? this._protos.lookupService('google.firestore.v1beta1.Firestore') + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + this._protos.google.firestore.v1beta1.Firestore, this._opts, this._providedCustomServicePath); + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const firestoreStubMethods = [ + 'getDocument', + 'listDocuments', + 'updateDocument', + 'deleteDocument', + 'batchGetDocuments', + 'beginTransaction', + 'commit', + 'rollback', + 'runQuery', + 'partitionQuery', + 'write', + 'listen', + 'listCollectionIds', + 'batchWrite', + 'createDocument', + ]; + for (const methodName of firestoreStubMethods) { + const callPromise = this.firestoreStub.then(stub => (...args) => { + if (this._terminated) { + if (methodName in this.descriptors.stream) { + const stream = new stream_1.PassThrough(); + setImmediate(() => { + stream.emit('error', new this._gaxModule.GoogleError('The client has already been closed.')); + }); + return stream; + } + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, (err) => () => { + throw err; + }); + const descriptor = this.descriptors.page[methodName] || + this.descriptors.stream[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall(callPromise, this._defaults[methodName], descriptor, this._opts.fallback); + this.innerApiCalls[methodName] = apiCall; + } + return this.firestoreStub; + } + /** + * The DNS address for this API service. + * @deprecated Use the apiEndpoint method of the client instance. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + if (typeof process === 'object' && + typeof process.emitWarning === 'function') { + process.emitWarning('Static servicePath is deprecated, please use the instance method instead.', 'DeprecationWarning'); + } + return 'firestore.googleapis.com'; + } + /** + * The DNS address for this API service - same as servicePath. + * @deprecated Use the apiEndpoint method of the client instance. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + if (typeof process === 'object' && + typeof process.emitWarning === 'function') { + process.emitWarning('Static apiEndpoint is deprecated, please use the instance method instead.', 'DeprecationWarning'); + } + return 'firestore.googleapis.com'; + } + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + get apiEndpoint() { + return this._servicePath; + } + get universeDomain() { + return this._universeDomain; + } + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', + ]; + } + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback) { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + getDocument(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.getDocument(request, options, callback); + } + updateDocument(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'document.name': (_a = request.document.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.updateDocument(request, options, callback); + } + deleteDocument(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: (_a = request.name) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.deleteDocument(request, options, callback); + } + beginTransaction(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + database: (_a = request.database) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.beginTransaction(request, options, callback); + } + commit(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + database: (_a = request.database) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.commit(request, options, callback); + } + rollback(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + database: (_a = request.database) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.rollback(request, options, callback); + } + batchWrite(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + database: (_a = request.database) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.batchWrite(request, options, callback); + } + createDocument(request, optionsOrCallback, callback) { + var _a, _b; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + collection_id: (_b = request.collectionId) !== null && _b !== void 0 ? _b : '', + }); + this.initialize(); + return this.innerApiCalls.createDocument(request, options, callback); + } + /** + * Gets multiple documents. + * + * Documents returned by this method are not guaranteed to be returned in the + * same order that they were requested. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.database + * Required. The database name. In the format: + * `projects/{project_id}/databases/{database_id}`. + * @param {string[]} request.documents + * The names of the documents to retrieve. In the format: + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * The request will fail if any of the document is not a child resource of the + * given `database`. Duplicate names will be elided. + * @param {google.firestore.v1beta1.DocumentMask} request.mask + * The fields to return. If not set, returns all fields. + * + * If a document has a field that is not present in this mask, that field will + * not be returned in the response. + * @param {Buffer} request.transaction + * Reads documents in a transaction. + * @param {google.firestore.v1beta1.TransactionOptions} request.newTransaction + * Starts a new transaction and reads the documents. + * Defaults to a read-only transaction. + * The new transaction ID will be returned as the first response in the + * stream. + * @param {google.protobuf.Timestamp} request.readTime + * Reads documents as they were at the given time. + * This may not be older than 270 seconds. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits {@link protos.google.firestore.v1beta1.BatchGetDocumentsResponse|BatchGetDocumentsResponse} on 'data' event. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta1/firestore.batch_get_documents.js + * region_tag:firestore_v1beta1_generated_Firestore_BatchGetDocuments_async + */ + batchGetDocuments(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + database: (_a = request.database) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.batchGetDocuments(request, options); + } + /** + * Runs a query. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents` or + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * For example: + * `projects/my-project/databases/my-database/documents` or + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {google.firestore.v1beta1.StructuredQuery} request.structuredQuery + * A structured query. + * @param {Buffer} request.transaction + * Reads documents in a transaction. + * @param {google.firestore.v1beta1.TransactionOptions} request.newTransaction + * Starts a new transaction and reads the documents. + * Defaults to a read-only transaction. + * The new transaction ID will be returned as the first response in the + * stream. + * @param {google.protobuf.Timestamp} request.readTime + * Reads documents as they were at the given time. + * This may not be older than 270 seconds. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits {@link protos.google.firestore.v1beta1.RunQueryResponse|RunQueryResponse} on 'data' event. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta1/firestore.run_query.js + * region_tag:firestore_v1beta1_generated_Firestore_RunQuery_async + */ + runQuery(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.runQuery(request, options); + } + /** + * Streams batches of document updates and deletes, in order. + * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which is both readable and writable. It accepts objects + * representing {@link protos.google.firestore.v1beta1.WriteRequest|WriteRequest} for write() method, and + * will emit objects representing {@link protos.google.firestore.v1beta1.WriteResponse|WriteResponse} on 'data' event asynchronously. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta1/firestore.write.js + * region_tag:firestore_v1beta1_generated_Firestore_Write_async + */ + write(options) { + this.initialize(); + return this.innerApiCalls.write(null, options); + } + /** + * Listens to changes. + * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which is both readable and writable. It accepts objects + * representing {@link protos.google.firestore.v1beta1.ListenRequest|ListenRequest} for write() method, and + * will emit objects representing {@link protos.google.firestore.v1beta1.ListenResponse|ListenResponse} on 'data' event asynchronously. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta1/firestore.listen.js + * region_tag:firestore_v1beta1_generated_Firestore_Listen_async + */ + listen(options) { + this.initialize(); + return this.innerApiCalls.listen(null, options); + } + listDocuments(request, optionsOrCallback, callback) { + var _a, _b; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + collection_id: (_b = request.collectionId) !== null && _b !== void 0 ? _b : '', + }); + this.initialize(); + return this.innerApiCalls.listDocuments(request, options, callback); + } + /** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents` or + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * For example: + * `projects/my-project/databases/my-database/documents` or + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {string} request.collectionId + * Required. The collection ID, relative to `parent`, to list. For example: `chatrooms` + * or `messages`. + * @param {number} request.pageSize + * The maximum number of documents to return. + * @param {string} request.pageToken + * The `next_page_token` value returned from a previous List request, if any. + * @param {string} request.orderBy + * The order to sort results by. For example: `priority desc, name`. + * @param {google.firestore.v1beta1.DocumentMask} request.mask + * The fields to return. If not set, returns all fields. + * + * If a document has a field that is not present in this mask, that field + * will not be returned in the response. + * @param {Buffer} request.transaction + * Reads documents in a transaction. + * @param {google.protobuf.Timestamp} request.readTime + * Reads documents as they were at the given time. + * This may not be older than 270 seconds. + * @param {boolean} request.showMissing + * If the list should show missing documents. A missing document is a + * document that does not exist but has sub-documents. These documents will + * be returned with a key but will not have fields, {@link protos.google.firestore.v1beta1.Document.create_time|Document.create_time}, + * or {@link protos.google.firestore.v1beta1.Document.update_time|Document.update_time} set. + * + * Requests with `show_missing` may not specify `where` or + * `order_by`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing {@link protos.google.firestore.v1beta1.Document|Document} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listDocumentsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + */ + listDocumentsStream(request, options) { + var _a, _b; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + collection_id: (_b = request.collectionId) !== null && _b !== void 0 ? _b : '', + }); + const defaultCallSettings = this._defaults['listDocuments']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listDocuments.createStream(this.innerApiCalls.listDocuments, request, callSettings); + } + /** + * Equivalent to `listDocuments`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents` or + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * For example: + * `projects/my-project/databases/my-database/documents` or + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {string} request.collectionId + * Required. The collection ID, relative to `parent`, to list. For example: `chatrooms` + * or `messages`. + * @param {number} request.pageSize + * The maximum number of documents to return. + * @param {string} request.pageToken + * The `next_page_token` value returned from a previous List request, if any. + * @param {string} request.orderBy + * The order to sort results by. For example: `priority desc, name`. + * @param {google.firestore.v1beta1.DocumentMask} request.mask + * The fields to return. If not set, returns all fields. + * + * If a document has a field that is not present in this mask, that field + * will not be returned in the response. + * @param {Buffer} request.transaction + * Reads documents in a transaction. + * @param {google.protobuf.Timestamp} request.readTime + * Reads documents as they were at the given time. + * This may not be older than 270 seconds. + * @param {boolean} request.showMissing + * If the list should show missing documents. A missing document is a + * document that does not exist but has sub-documents. These documents will + * be returned with a key but will not have fields, {@link protos.google.firestore.v1beta1.Document.create_time|Document.create_time}, + * or {@link protos.google.firestore.v1beta1.Document.update_time|Document.update_time} set. + * + * Requests with `show_missing` may not specify `where` or + * `order_by`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | async iteration }. + * When you iterate the returned iterable, each element will be an object representing + * {@link protos.google.firestore.v1beta1.Document|Document}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta1/firestore.list_documents.js + * region_tag:firestore_v1beta1_generated_Firestore_ListDocuments_async + */ + listDocumentsAsync(request, options) { + var _a, _b; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + collection_id: (_b = request.collectionId) !== null && _b !== void 0 ? _b : '', + }); + const defaultCallSettings = this._defaults['listDocuments']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listDocuments.asyncIterate(this.innerApiCalls['listDocuments'], request, callSettings); + } + partitionQuery(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.partitionQuery(request, options, callback); + } + /** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents`. + * Document resource names are not supported; only database resource names + * can be specified. + * @param {google.firestore.v1beta1.StructuredQuery} request.structuredQuery + * A structured query. + * Query must specify collection with all descendants and be ordered by name + * ascending. Other filters, order bys, limits, offsets, and start/end + * cursors are not supported. + * @param {number} request.partitionCount + * The desired maximum number of partition points. + * The partitions may be returned across multiple pages of results. + * The number must be positive. The actual number of partitions + * returned may be fewer. + * + * For example, this may be set to one fewer than the number of parallel + * queries to be run, or in running a data pipeline job, one fewer than the + * number of workers or compute instances available. + * @param {string} request.pageToken + * The `next_page_token` value returned from a previous call to + * PartitionQuery that may be used to get an additional set of results. + * There are no ordering guarantees between sets of results. Thus, using + * multiple sets of results will require merging the different result sets. + * + * For example, two subsequent calls using a page_token may return: + * + * * cursor B, cursor M, cursor Q + * * cursor A, cursor U, cursor W + * + * To obtain a complete result set ordered with respect to the results of the + * query supplied to PartitionQuery, the results sets should be merged: + * cursor A, cursor B, cursor M, cursor Q, cursor U, cursor W + * @param {number} request.pageSize + * The maximum number of partitions to return in this call, subject to + * `partition_count`. + * + * For example, if `partition_count` = 10 and `page_size` = 8, the first call + * to PartitionQuery will return up to 8 partitions and a `next_page_token` + * if more results exist. A second call to PartitionQuery will return up to + * 2 partitions, to complete the total of 10 specified in `partition_count`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing {@link protos.google.firestore.v1beta1.Cursor|Cursor} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `partitionQueryAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + */ + partitionQueryStream(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['partitionQuery']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.partitionQuery.createStream(this.innerApiCalls.partitionQuery, request, callSettings); + } + /** + * Equivalent to `partitionQuery`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource name. In the format: + * `projects/{project_id}/databases/{database_id}/documents`. + * Document resource names are not supported; only database resource names + * can be specified. + * @param {google.firestore.v1beta1.StructuredQuery} request.structuredQuery + * A structured query. + * Query must specify collection with all descendants and be ordered by name + * ascending. Other filters, order bys, limits, offsets, and start/end + * cursors are not supported. + * @param {number} request.partitionCount + * The desired maximum number of partition points. + * The partitions may be returned across multiple pages of results. + * The number must be positive. The actual number of partitions + * returned may be fewer. + * + * For example, this may be set to one fewer than the number of parallel + * queries to be run, or in running a data pipeline job, one fewer than the + * number of workers or compute instances available. + * @param {string} request.pageToken + * The `next_page_token` value returned from a previous call to + * PartitionQuery that may be used to get an additional set of results. + * There are no ordering guarantees between sets of results. Thus, using + * multiple sets of results will require merging the different result sets. + * + * For example, two subsequent calls using a page_token may return: + * + * * cursor B, cursor M, cursor Q + * * cursor A, cursor U, cursor W + * + * To obtain a complete result set ordered with respect to the results of the + * query supplied to PartitionQuery, the results sets should be merged: + * cursor A, cursor B, cursor M, cursor Q, cursor U, cursor W + * @param {number} request.pageSize + * The maximum number of partitions to return in this call, subject to + * `partition_count`. + * + * For example, if `partition_count` = 10 and `page_size` = 8, the first call + * to PartitionQuery will return up to 8 partitions and a `next_page_token` + * if more results exist. A second call to PartitionQuery will return up to + * 2 partitions, to complete the total of 10 specified in `partition_count`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | async iteration }. + * When you iterate the returned iterable, each element will be an object representing + * {@link protos.google.firestore.v1beta1.Cursor|Cursor}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta1/firestore.partition_query.js + * region_tag:firestore_v1beta1_generated_Firestore_PartitionQuery_async + */ + partitionQueryAsync(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['partitionQuery']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.partitionQuery.asyncIterate(this.innerApiCalls['partitionQuery'], request, callSettings); + } + listCollectionIds(request, optionsOrCallback, callback) { + var _a; + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + this.initialize(); + return this.innerApiCalls.listCollectionIds(request, options, callback); + } + /** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent document. In the format: + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * For example: + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {number} request.pageSize + * The maximum number of results to return. + * @param {string} request.pageToken + * A page token. Must be a value from + * {@link protos.google.firestore.v1beta1.ListCollectionIdsResponse|ListCollectionIdsResponse}. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing string on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listCollectionIdsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + */ + listCollectionIdsStream(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['listCollectionIds']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listCollectionIds.createStream(this.innerApiCalls.listCollectionIds, request, callSettings); + } + /** + * Equivalent to `listCollectionIds`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent document. In the format: + * `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + * For example: + * `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + * @param {number} request.pageSize + * The maximum number of results to return. + * @param {string} request.pageToken + * A page token. Must be a value from + * {@link protos.google.firestore.v1beta1.ListCollectionIdsResponse|ListCollectionIdsResponse}. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | async iteration }. + * When you iterate the returned iterable, each element will be an object representing + * string. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta1/firestore.list_collection_ids.js + * region_tag:firestore_v1beta1_generated_Firestore_ListCollectionIds_async + */ + listCollectionIdsAsync(request, options) { + var _a; + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: (_a = request.parent) !== null && _a !== void 0 ? _a : '', + }); + const defaultCallSettings = this._defaults['listCollectionIds']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listCollectionIds.asyncIterate(this.innerApiCalls['listCollectionIds'], request, callSettings); + } + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close() { + if (this.firestoreStub && !this._terminated) { + return this.firestoreStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} +exports.FirestoreClient = FirestoreClient; +//# sourceMappingURL=firestore_client.js.map + +/***/ }), + +/***/ 92170: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FirestoreClient = void 0; +// tslint:disable deprecation +const firestore_client_1 = __nccwpck_require__(55485); +Object.defineProperty(exports, "FirestoreClient", ({ enumerable: true, get: function () { return firestore_client_1.FirestoreClient; } })); +// Doing something really horrible for reverse compatibility with original JavaScript exports +const existingExports = module.exports; +module.exports = firestore_client_1.FirestoreClient; +module.exports = Object.assign(module.exports, existingExports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 33822: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2017 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.customObjectMessage = customObjectMessage; +exports.validateFunction = validateFunction; +exports.validateObject = validateObject; +exports.validateString = validateString; +exports.validateHost = validateHost; +exports.validateBoolean = validateBoolean; +exports.validateNumber = validateNumber; +exports.validateInteger = validateInteger; +exports.validateTimestamp = validateTimestamp; +exports.invalidArgumentMessage = invalidArgumentMessage; +exports.validateOptional = validateOptional; +exports.validateMinNumberOfArguments = validateMinNumberOfArguments; +exports.validateMaxNumberOfArguments = validateMaxNumberOfArguments; +exports.validateEnumValue = validateEnumValue; +const url_1 = __nccwpck_require__(57310); +const util_1 = __nccwpck_require__(15468); +const timestamp_1 = __nccwpck_require__(29061); +/** + * Generates an error message to use with custom objects that cannot be + * serialized. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The value that failed serialization. + * @param path The field path that the object is assigned to. + */ +function customObjectMessage(arg, value, path) { + const fieldPathMessage = path ? ` (found in field "${path}")` : ''; + if ((0, util_1.isObject)(value)) { + // We use the base class name as the type name as the sentinel classes + // returned by the public FieldValue API are subclasses of FieldValue. By + // using the base name, we reduce the number of special cases below. + const typeName = value.constructor.name; + switch (typeName) { + case 'DocumentReference': + case 'FieldPath': + case 'FieldValue': + case 'GeoPoint': + case 'Timestamp': + return (`${invalidArgumentMessage(arg, 'Firestore document')} Detected an object of type "${typeName}" that doesn't match the ` + + `expected instance${fieldPathMessage}. Please ensure that the ` + + 'Firestore types you are using are from the same NPM package.)'); + case 'Object': + return `${invalidArgumentMessage(arg, 'Firestore document')} Invalid use of type "${typeof value}" as a Firestore argument${fieldPathMessage}.`; + default: + return (`${invalidArgumentMessage(arg, 'Firestore document')} Couldn't serialize object of type "${typeName}"${fieldPathMessage}. Firestore doesn't support JavaScript ` + + 'objects with custom prototypes (i.e. objects that were created ' + + 'via the "new" operator).'); + } + } + else { + return `${invalidArgumentMessage(arg, 'Firestore document')} Input is not a plain JavaScript object${fieldPathMessage}.`; + } +} +/** + * Validates that 'value' is a function. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + * @param options Options that specify whether the function can be omitted. + */ +function validateFunction(arg, value, options) { + if (!validateOptional(value, options)) { + if (!(0, util_1.isFunction)(value)) { + throw new Error(invalidArgumentMessage(arg, 'function')); + } + } +} +/** + * Validates that 'value' is an object. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + * @param options Options that specify whether the object can be omitted. + */ +function validateObject(arg, value, options) { + if (!validateOptional(value, options)) { + if (!(0, util_1.isObject)(value)) { + throw new Error(invalidArgumentMessage(arg, 'object')); + } + } +} +/** + * Validates that 'value' is a string. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + * @param options Options that specify whether the string can be omitted. + */ +function validateString(arg, value, options) { + if (!validateOptional(value, options)) { + if (typeof value !== 'string') { + throw new Error(invalidArgumentMessage(arg, 'string')); + } + } +} +/** + * Validates that 'value' is a host. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + * @param options Options that specify whether the host can be omitted. + */ +function validateHost(arg, value, options) { + if (!validateOptional(value, options)) { + validateString(arg, value); + const urlString = `http://${value}/`; + let parsed; + try { + parsed = new url_1.URL(urlString); + } + catch (e) { + throw new Error(invalidArgumentMessage(arg, 'host')); + } + if (parsed.search !== '' || + parsed.pathname !== '/' || + parsed.username !== '') { + throw new Error(invalidArgumentMessage(arg, 'host')); + } + } +} +/** + * Validates that 'value' is a boolean. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + * @param options Options that specify whether the boolean can be omitted. + */ +function validateBoolean(arg, value, options) { + if (!validateOptional(value, options)) { + if (typeof value !== 'boolean') { + throw new Error(invalidArgumentMessage(arg, 'boolean')); + } + } +} +/** + * Validates that 'value' is a number. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + * @param options Options that specify whether the number can be omitted. + */ +function validateNumber(arg, value, options) { + const min = options !== undefined && options.minValue !== undefined + ? options.minValue + : -Infinity; + const max = options !== undefined && options.maxValue !== undefined + ? options.maxValue + : Infinity; + if (!validateOptional(value, options)) { + if (typeof value !== 'number' || isNaN(value)) { + throw new Error(invalidArgumentMessage(arg, 'number')); + } + else if (value < min || value > max) { + throw new Error(`${formatArgumentName(arg)} must be within [${min}, ${max}] inclusive, but was: ${value}`); + } + } +} +/** + * Validates that 'value' is a integer. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + * @param options Options that specify whether the integer can be omitted. + */ +function validateInteger(arg, value, options) { + const min = options !== undefined && options.minValue !== undefined + ? options.minValue + : -Infinity; + const max = options !== undefined && options.maxValue !== undefined + ? options.maxValue + : Infinity; + if (!validateOptional(value, options)) { + if (typeof value !== 'number' || isNaN(value) || value % 1 !== 0) { + throw new Error(invalidArgumentMessage(arg, 'integer')); + } + else if (value < min || value > max) { + throw new Error(`${formatArgumentName(arg)} must be within [${min}, ${max}] inclusive, but was: ${value}`); + } + } +} +/** + * Validates that 'value' is a Timestamp. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The input to validate. + * @param options Options that specify whether the Timestamp can be omitted. + */ +function validateTimestamp(arg, value, options) { + if (!validateOptional(value, options)) { + if (!(value instanceof timestamp_1.Timestamp)) { + throw new Error(invalidArgumentMessage(arg, 'Timestamp')); + } + } +} +/** + * Generates an error message to use with invalid arguments. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param expectedType The expected input type. + */ +function invalidArgumentMessage(arg, expectedType) { + return `${formatArgumentName(arg)} is not a valid ${expectedType}.`; +} +/** + * Enforces the 'options.optional' constraint for 'value'. + * + * @private + * @internal + * @param value The input to validate. + * @param options Whether the function can be omitted. + * @return Whether the object is omitted and is allowed to be omitted. + */ +function validateOptional(value, options) { + return (value === undefined && options !== undefined && options.optional === true); +} +/** + * Formats the given word as plural conditionally given the preceding number. + * + * @private + * @internal + * @param num The number to use for formatting. + * @param str The string to format. + */ +function formatPlural(num, str) { + return `${num} ${str}` + (num === 1 ? '' : 's'); +} +/** + * Creates a descriptive name for the provided argument name or index. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @return Either the argument name or its index description. + */ +function formatArgumentName(arg) { + return typeof arg === 'string' + ? `Value for argument "${arg}"` + : `Element at index ${arg}`; +} +/** + * Verifies that 'args' has at least 'minSize' elements. + * + * @private + * @internal + * @param funcName The function name to use in the error message. + * @param args The array (or array-like structure) to verify. + * @param minSize The minimum number of elements to enforce. + * @throws if the expectation is not met. + */ +function validateMinNumberOfArguments(funcName, args, minSize) { + if (args.length < minSize) { + throw new Error(`Function "${funcName}()" requires at least ` + + `${formatPlural(minSize, 'argument')}.`); + } +} +/** + * Verifies that 'args' has at most 'maxSize' elements. + * + * @private + * @internal + * @param funcName The function name to use in the error message. + * @param args The array (or array-like structure) to verify. + * @param maxSize The maximum number of elements to enforce. + * @throws if the expectation is not met. + */ +function validateMaxNumberOfArguments(funcName, args, maxSize) { + if (args.length > maxSize) { + throw new Error(`Function "${funcName}()" accepts at most ` + + `${formatPlural(maxSize, 'argument')}.`); + } +} +/** + * Validates that the provided named option equals one of the expected values. + * + * @param arg The argument name or argument index (for varargs methods).). + * @param value The input to validate. + * @param allowedValues A list of expected values. + * @param options Whether the input can be omitted. + * @private + * @internal + */ +function validateEnumValue(arg, value, allowedValues, options) { + if (!validateOptional(value, options)) { + const expectedDescription = []; + for (const allowed of allowedValues) { + if (allowed === value) { + return; + } + expectedDescription.push(allowed); + } + throw new Error(`${formatArgumentName(arg)} is invalid. Acceptable values are: ${expectedDescription.join(', ')}`); + } +} +//# sourceMappingURL=validate.js.map + +/***/ }), + +/***/ 97462: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +var __webpack_unused_export__; + +/*! + * Copyright 2017 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +__webpack_unused_export__ = ({ value: true }); +exports.mI = exports.i9 = exports.mc = void 0; +const assert = __nccwpck_require__(39491); +const rbtree = __nccwpck_require__(38699); +const google_gax_1 = __nccwpck_require__(12263); +const backoff_1 = __nccwpck_require__(63544); +const document_1 = __nccwpck_require__(98912); +const document_change_1 = __nccwpck_require__(62270); +const logger_1 = __nccwpck_require__(42718); +const path_1 = __nccwpck_require__(34908); +const timestamp_1 = __nccwpck_require__(29061); +const types_1 = __nccwpck_require__(75371); +const util_1 = __nccwpck_require__(15468); +/*! + * Target ID used by watch. Watch uses a fixed target id since we only support + * one target per stream. + * @type {number} + */ +const WATCH_TARGET_ID = 0x1; +/*! + * Idle timeout used to detect Watch streams that stall (see + * https://github.com/googleapis/nodejs-firestore/issues/1057, b/156308554). + * Under normal load, the Watch backend will send a TARGET_CHANGE message + * roughly every 30 seconds. As discussed with the backend team, we reset the + * Watch stream if we do not receive any message within 120 seconds. + */ +exports.mc = 120 * 1000; +/*! + * Sentinel value for a document remove. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const REMOVED = {}; +/*! + * The change type for document change events. + */ +// tslint:disable-next-line:variable-name +const ChangeType = { + added: 'added', + modified: 'modified', + removed: 'removed', +}; +/*! + * The comparator used for document watches (which should always get called with + * the same document). + */ +const DOCUMENT_WATCH_COMPARATOR = (doc1, doc2) => { + assert(doc1 === doc2, 'Document watches only support one document.'); + return 0; +}; +const EMPTY_FUNCTION = () => { }; +/** + * Watch provides listen functionality and exposes the 'onSnapshot' observer. It + * can be used with a valid Firestore Listen target. + * + * @class + * @private + * @internal + */ +class Watch { + /** + * @private + * @internal + * + * @param firestore The Firestore Database client. + */ + constructor(firestore, _converter = (0, types_1.defaultConverter)()) { + this._converter = _converter; + /** + * Indicates whether we are interested in data from the stream. Set to false in the + * 'unsubscribe()' callback. + * @private + * @internal + */ + this.isActive = true; + /** + * The current stream to the backend. + * @private + * @internal + */ + this.currentStream = null; + /** + * The server assigns and updates the resume token. + * @private + * @internal + */ + this.resumeToken = undefined; + /** + * A map of document names to QueryDocumentSnapshots for the last sent snapshot. + * @private + * @internal + */ + this.docMap = new Map(); + /** + * The accumulated map of document changes (keyed by document name) for the + * current snapshot. + * @private + * @internal + */ + this.changeMap = new Map(); + /** + * The current state of the query results. * + * @private + * @internal + */ + this.current = false; + /** + * We need this to track whether we've pushed an initial set of changes, + * since we should push those even when there are no changes, if there + * aren't docs. + * @private + * @internal + */ + this.hasPushed = false; + this.firestore = firestore; + this.backoff = new backoff_1.ExponentialBackoff(); + this.requestTag = (0, util_1.requestTag)(); + this.onNext = EMPTY_FUNCTION; + this.onError = EMPTY_FUNCTION; + } + /** + * Starts a watch and attaches a listener for document change events. + * + * @private + * @internal + * @param onNext A callback to be called every time a new snapshot is + * available. + * @param onError A callback to be called if the listen fails or is cancelled. + * No further callbacks will occur. + * + * @returns An unsubscribe function that can be called to cancel the snapshot + * listener. + */ + onSnapshot(onNext, onError) { + assert(this.onNext === EMPTY_FUNCTION, 'onNext should not already be defined.'); + assert(this.onError === EMPTY_FUNCTION, 'onError should not already be defined.'); + assert(this.docTree === undefined, 'docTree should not already be defined.'); + this.onNext = onNext; + this.onError = onError; + this.docTree = rbtree(this.getComparator()); + this.initStream(); + const unsubscribe = () => { + (0, logger_1.logger)('Watch.onSnapshot', this.requestTag, 'Unsubscribe called'); + // Prevent further callbacks. + this.onNext = () => { }; + this.onError = () => { }; + this.shutdown(); + }; + this.firestore.registerListener(); + return unsubscribe; + } + /** + * Returns the current count of all documents, including the changes from + * the current changeMap. + * @private + * @internal + */ + currentSize() { + const changes = this.extractCurrentChanges(timestamp_1.Timestamp.now()); + return this.docMap.size + changes.adds.length - changes.deletes.length; + } + /** + * Splits up document changes into removals, additions, and updates. + * @private + * @internal + */ + extractCurrentChanges(readTime) { + const deletes = []; + const adds = []; + const updates = []; + this.changeMap.forEach((value, name) => { + if (value === REMOVED) { + if (this.docMap.has(name)) { + deletes.push(name); + } + } + else if (this.docMap.has(name)) { + value.readTime = readTime; + updates.push(value.build()); + } + else { + value.readTime = readTime; + adds.push(value.build()); + } + }); + return { deletes, adds, updates }; + } + /** + * Helper to clear the docs on RESET or filter mismatch. + * @private + * @internal + */ + resetDocs() { + (0, logger_1.logger)('Watch.resetDocs', this.requestTag, 'Resetting documents'); + this.changeMap.clear(); + this.resumeToken = undefined; + this.docTree.forEach((snapshot) => { + // Mark each document as deleted. If documents are not deleted, they + // will be send again by the server. + this.changeMap.set(snapshot.ref.path, REMOVED); + }); + this.current = false; + } + /** + * Closes the stream and calls onError() if the stream is still active. + * @private + * @internal + */ + closeStream(err) { + if (this.isActive) { + (0, logger_1.logger)('Watch.closeStream', this.requestTag, 'Invoking onError: ', err); + this.onError(err); + } + this.shutdown(); + } + /** + * Re-opens the stream unless the specified error is considered permanent. + * Clears the change map. + * @private + * @internal + */ + maybeReopenStream(err) { + if (this.isActive && !this.isPermanentWatchError(err)) { + (0, logger_1.logger)('Watch.maybeReopenStream', this.requestTag, 'Stream ended, re-opening after retryable error:', err); + this.changeMap.clear(); + if (this.isResourceExhaustedError(err)) { + this.backoff.resetToMax(); + } + this.initStream(); + } + else { + this.closeStream(err); + } + } + /** + * Cancels the current idle timeout and reschedules a new timer. + * + * @private + * @internal + */ + resetIdleTimeout() { + if (this.idleTimeoutHandle) { + clearTimeout(this.idleTimeoutHandle); + } + this.idleTimeoutHandle = (0, backoff_1.delayExecution)(() => { + var _a; + (0, logger_1.logger)('Watch.resetIdleTimeout', this.requestTag, 'Resetting stream after idle timeout'); + (_a = this.currentStream) === null || _a === void 0 ? void 0 : _a.end(); + this.currentStream = null; + const error = new google_gax_1.GoogleError('Watch stream idle timeout'); + error.code = google_gax_1.Status.UNKNOWN; + this.maybeReopenStream(error); + }, exports.mc); + } + /** + * Helper to restart the outgoing stream to the backend. + * @private + * @internal + */ + resetStream() { + (0, logger_1.logger)('Watch.resetStream', this.requestTag, 'Restarting stream'); + if (this.currentStream) { + this.currentStream.end(); + this.currentStream = null; + } + this.initStream(); + } + /** + * Initializes a new stream to the backend with backoff. + * @private + * @internal + */ + initStream() { + this.backoff + .backoffAndWait() + .then(async () => { + if (!this.isActive) { + (0, logger_1.logger)('Watch.initStream', this.requestTag, 'Not initializing inactive stream'); + return; + } + await this.firestore.initializeIfNeeded(this.requestTag); + const request = {}; + request.database = this.firestore.formattedName; + request.addTarget = this.getTarget(this.resumeToken); + // Note that we need to call the internal _listen API to pass additional + // header values in readWriteStream. + return this.firestore + .requestStream('listen', + /* bidirectional= */ true, request, this.requestTag) + .then(backendStream => { + if (!this.isActive) { + (0, logger_1.logger)('Watch.initStream', this.requestTag, 'Closing inactive stream'); + backendStream.emit('end'); + return; + } + (0, logger_1.logger)('Watch.initStream', this.requestTag, 'Opened new stream'); + this.currentStream = backendStream; + this.resetIdleTimeout(); + this.currentStream.on('data', (proto) => { + this.resetIdleTimeout(); + this.onData(proto); + }) + .on('error', err => { + if (this.currentStream === backendStream) { + this.currentStream = null; + this.maybeReopenStream(err); + } + }) + .on('end', () => { + if (this.currentStream === backendStream) { + this.currentStream = null; + const err = new google_gax_1.GoogleError('Stream ended unexpectedly'); + err.code = google_gax_1.Status.UNKNOWN; + this.maybeReopenStream(err); + } + }); + this.currentStream.resume(); + }); + }) + .catch(err => { + this.closeStream(err); + }); + } + /** + * Handles 'data' events and closes the stream if the response type is + * invalid. + * @private + * @internal + */ + onData(proto) { + if (proto.targetChange) { + (0, logger_1.logger)('Watch.onData', this.requestTag, 'Processing target change'); + const change = proto.targetChange; + const noTargetIds = !change.targetIds || change.targetIds.length === 0; + if (change.targetChangeType === 'NO_CHANGE') { + if (noTargetIds && change.readTime && this.current) { + // This means everything is up-to-date, so emit the current + // set of docs as a snapshot, if there were changes. + this.pushSnapshot(timestamp_1.Timestamp.fromProto(change.readTime), change.resumeToken); + } + } + else if (change.targetChangeType === 'ADD') { + if (WATCH_TARGET_ID !== change.targetIds[0]) { + this.closeStream(Error('Unexpected target ID sent by server')); + } + } + else if (change.targetChangeType === 'REMOVE') { + let code = google_gax_1.Status.INTERNAL; + let message = 'internal error'; + if (change.cause) { + code = change.cause.code; + message = change.cause.message; + } + // @todo: Surface a .code property on the exception. + this.closeStream(new Error('Error ' + code + ': ' + message)); + } + else if (change.targetChangeType === 'RESET') { + // Whatever changes have happened so far no longer matter. + this.resetDocs(); + } + else if (change.targetChangeType === 'CURRENT') { + this.current = true; + } + else { + this.closeStream(new Error('Unknown target change type: ' + JSON.stringify(change))); + } + if (change.resumeToken && + this.affectsTarget(change.targetIds, WATCH_TARGET_ID)) { + this.backoff.reset(); + } + } + else if (proto.documentChange) { + (0, logger_1.logger)('Watch.onData', this.requestTag, 'Processing change event'); + // No other targetIds can show up here, but we still need to see + // if the targetId was in the added list or removed list. + const targetIds = proto.documentChange.targetIds || []; + const removedTargetIds = proto.documentChange.removedTargetIds || []; + let changed = false; + let removed = false; + for (let i = 0; i < targetIds.length; i++) { + if (targetIds[i] === WATCH_TARGET_ID) { + changed = true; + } + } + for (let i = 0; i < removedTargetIds.length; i++) { + if (removedTargetIds[i] === WATCH_TARGET_ID) { + removed = true; + } + } + const document = proto.documentChange.document; + const name = document.name; + const relativeName = path_1.QualifiedResourcePath.fromSlashSeparatedString(name).relativeName; + if (changed) { + (0, logger_1.logger)('Watch.onData', this.requestTag, 'Received document change'); + const ref = this.firestore.doc(relativeName); + const snapshot = new document_1.DocumentSnapshotBuilder(ref.withConverter(this._converter)); + snapshot.fieldsProto = document.fields || {}; + snapshot.createTime = timestamp_1.Timestamp.fromProto(document.createTime); + snapshot.updateTime = timestamp_1.Timestamp.fromProto(document.updateTime); + this.changeMap.set(relativeName, snapshot); + } + else if (removed) { + (0, logger_1.logger)('Watch.onData', this.requestTag, 'Received document remove'); + this.changeMap.set(relativeName, REMOVED); + } + } + else if (proto.documentDelete || proto.documentRemove) { + (0, logger_1.logger)('Watch.onData', this.requestTag, 'Processing remove event'); + const name = (proto.documentDelete || proto.documentRemove).document; + const relativeName = path_1.QualifiedResourcePath.fromSlashSeparatedString(name).relativeName; + this.changeMap.set(relativeName, REMOVED); + } + else if (proto.filter) { + (0, logger_1.logger)('Watch.onData', this.requestTag, 'Processing filter update'); + if (proto.filter.count !== this.currentSize()) { + // We need to remove all the current results. + this.resetDocs(); + // The filter didn't match, so re-issue the query. + this.resetStream(); + } + } + else { + this.closeStream(new Error('Unknown listen response type: ' + JSON.stringify(proto))); + } + } + /** + * Checks if the current target id is included in the list of target ids. + * If no targetIds are provided, returns true. + * @private + * @internal + */ + affectsTarget(targetIds, currentId) { + if (targetIds === undefined || targetIds.length === 0) { + return true; + } + for (const targetId of targetIds) { + if (targetId === currentId) { + return true; + } + } + return false; + } + /** + * Assembles a new snapshot from the current set of changes and invokes the + * user's callback. Clears the current changes on completion. + * @private + * @internal + */ + pushSnapshot(readTime, nextResumeToken) { + const appliedChanges = this.computeSnapshot(readTime); + if (!this.hasPushed || appliedChanges.length > 0) { + (0, logger_1.logger)('Watch.pushSnapshot', this.requestTag, 'Sending snapshot with %d changes and %d documents', String(appliedChanges.length), this.docTree.length); + // We pass the current set of changes, even if `docTree` is modified later. + const currentTree = this.docTree; + this.onNext(readTime, currentTree.length, () => currentTree.keys, () => appliedChanges); + this.hasPushed = true; + } + this.changeMap.clear(); + this.resumeToken = nextResumeToken; + } + /** + * Applies a document delete to the document tree and the document map. + * Returns the corresponding DocumentChange event. + * @private + * @internal + */ + deleteDoc(name) { + assert(this.docMap.has(name), 'Document to delete does not exist'); + const oldDocument = this.docMap.get(name); + const existing = this.docTree.find(oldDocument); + const oldIndex = existing.index; + this.docTree = existing.remove(); + this.docMap.delete(name); + return new document_change_1.DocumentChange(ChangeType.removed, oldDocument, oldIndex, -1); + } + /** + * Applies a document add to the document tree and the document map. Returns + * the corresponding DocumentChange event. + * @private + * @internal + */ + addDoc(newDocument) { + const name = newDocument.ref.path; + assert(!this.docMap.has(name), 'Document to add already exists'); + this.docTree = this.docTree.insert(newDocument, null); + const newIndex = this.docTree.find(newDocument).index; + this.docMap.set(name, newDocument); + return new document_change_1.DocumentChange(ChangeType.added, newDocument, -1, newIndex); + } + /** + * Applies a document modification to the document tree and the document map. + * Returns the DocumentChange event for successful modifications. + * @private + * @internal + */ + modifyDoc(newDocument) { + const name = newDocument.ref.path; + assert(this.docMap.has(name), 'Document to modify does not exist'); + const oldDocument = this.docMap.get(name); + if (!oldDocument.updateTime.isEqual(newDocument.updateTime)) { + const removeChange = this.deleteDoc(name); + const addChange = this.addDoc(newDocument); + return new document_change_1.DocumentChange(ChangeType.modified, newDocument, removeChange.oldIndex, addChange.newIndex); + } + return null; + } + /** + * Applies the mutations in changeMap to both the document tree and the + * document lookup map. Modified docMap in-place and returns the updated + * state. + * @private + * @internal + */ + computeSnapshot(readTime) { + const changeSet = this.extractCurrentChanges(readTime); + const appliedChanges = []; + // Process the sorted changes in the order that is expected by our clients + // (removals, additions, and then modifications). We also need to sort the + // individual changes to assure that oldIndex/newIndex keep incrementing. + changeSet.deletes.sort((name1, name2) => { + // Deletes are sorted based on the order of the existing document. + return this.getComparator()(this.docMap.get(name1), this.docMap.get(name2)); + }); + changeSet.deletes.forEach(name => { + const change = this.deleteDoc(name); + appliedChanges.push(change); + }); + changeSet.adds.sort(this.getComparator()); + changeSet.adds.forEach(snapshot => { + const change = this.addDoc(snapshot); + appliedChanges.push(change); + }); + changeSet.updates.sort(this.getComparator()); + changeSet.updates.forEach(snapshot => { + const change = this.modifyDoc(snapshot); + if (change) { + appliedChanges.push(change); + } + }); + assert(this.docTree.length === this.docMap.size, 'The update document ' + + 'tree and document map should have the same number of entries.'); + return appliedChanges; + } + /** + * Determines whether a watch error is considered permanent and should not be + * retried. Errors that don't provide a GRPC error code are always considered + * transient in this context. + * + * @private + * @internal + * @param error An error object. + * @return Whether the error is permanent. + */ + isPermanentWatchError(error) { + if (error.code === undefined) { + (0, logger_1.logger)('Watch.isPermanentError', this.requestTag, 'Unable to determine error code: ', error); + return false; + } + switch (error.code) { + case google_gax_1.Status.ABORTED: + case google_gax_1.Status.CANCELLED: + case google_gax_1.Status.UNKNOWN: + case google_gax_1.Status.DEADLINE_EXCEEDED: + case google_gax_1.Status.RESOURCE_EXHAUSTED: + case google_gax_1.Status.INTERNAL: + case google_gax_1.Status.UNAVAILABLE: + case google_gax_1.Status.UNAUTHENTICATED: + return false; + default: + return true; + } + } + /** + * Determines whether we need to initiate a longer backoff due to system + * overload. + * + * @private + * @internal + * @param error A GRPC Error object that exposes an error code. + * @return Whether we need to back off our retries. + */ + isResourceExhaustedError(error) { + return error.code === google_gax_1.Status.RESOURCE_EXHAUSTED; + } + /** Closes the stream and clears all timeouts. */ + shutdown() { + var _a; + if (this.isActive) { + this.isActive = false; + if (this.idleTimeoutHandle) { + clearTimeout(this.idleTimeoutHandle); + this.idleTimeoutHandle = undefined; + } + this.firestore.unregisterListener(); + } + (_a = this.currentStream) === null || _a === void 0 ? void 0 : _a.end(); + this.currentStream = null; + } +} +/** + * Creates a new Watch instance to listen on DocumentReferences. + * + * @private + * @internal + */ +class DocumentWatch extends Watch { + constructor(firestore, ref) { + super(firestore, ref._converter); + this.ref = ref; + } + getComparator() { + return DOCUMENT_WATCH_COMPARATOR; + } + getTarget(resumeToken) { + const formattedName = this.ref.formattedName; + return { + documents: { + documents: [formattedName], + }, + targetId: WATCH_TARGET_ID, + resumeToken, + }; + } +} +exports.i9 = DocumentWatch; +/** + * Creates a new Watch instance to listen on Queries. + * + * @private + * @internal + */ +class QueryWatch extends Watch { + constructor(firestore, query, converter) { + super(firestore, converter); + this.query = query; + this.comparator = query.comparator(); + } + getComparator() { + return this.query.comparator(); + } + getTarget(resumeToken) { + const query = this.query.toProto(); + return { query, targetId: WATCH_TARGET_ID, resumeToken }; + } +} +exports.mI = QueryWatch; +//# sourceMappingURL=watch.js.map + +/***/ }), + +/***/ 76012: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.WriteBatch = exports.WriteResult = void 0; +exports.validateSetOptions = validateSetOptions; +exports.validateDocumentData = validateDocumentData; +exports.validateFieldValue = validateFieldValue; +const document_1 = __nccwpck_require__(98912); +const logger_1 = __nccwpck_require__(42718); +const path_1 = __nccwpck_require__(34908); +const helpers_1 = __nccwpck_require__(13823); +const serializer_1 = __nccwpck_require__(49170); +const timestamp_1 = __nccwpck_require__(29061); +const util_1 = __nccwpck_require__(15468); +const validate_1 = __nccwpck_require__(33822); +const trace_util_1 = __nccwpck_require__(2693); +/** + * A WriteResult wraps the write time set by the Firestore servers on sets(), + * updates(), and creates(). + * + * @class WriteResult + */ +class WriteResult { + /** + * @private + * + * @param _writeTime The time of the corresponding document write. + */ + constructor(_writeTime) { + this._writeTime = _writeTime; + } + /** + * The write time as set by the Firestore servers. + * + * @type {Timestamp} + * @name WriteResult#writeTime + * @readonly + * + * @example + * ``` + * let documentRef = firestore.doc('col/doc'); + * + * documentRef.set({foo: 'bar'}).then(writeResult => { + * console.log(`Document written at: ${writeResult.writeTime.toDate()}`); + * }); + * ``` + */ + get writeTime() { + return this._writeTime; + } + /** + * Returns true if this `WriteResult` is equal to the provided value. + * + * @param {*} other The value to compare against. + * @return true if this `WriteResult` is equal to the provided value. + */ + isEqual(other) { + return (this === other || + (other instanceof WriteResult && + this._writeTime.isEqual(other._writeTime))); + } +} +exports.WriteResult = WriteResult; +/** + * A Firestore WriteBatch that can be used to atomically commit multiple write + * operations at once. + * + * @class WriteBatch + */ +class WriteBatch { + /** + * The number of writes in this batch. + * @private + * @internal + */ + get _opCount() { + return this._ops.length; + } + /** @private */ + constructor(firestore) { + /** + * An array of document paths and the corresponding write operations that are + * executed as part of the commit. The resulting `api.IWrite` will be sent to + * the backend. + * + * @private + * @internal + */ + this._ops = []; + this._committed = false; + this._firestore = firestore; + this._serializer = new serializer_1.Serializer(firestore); + this._allowUndefined = !!firestore._settings.ignoreUndefinedProperties; + } + /** + * Checks if this write batch has any pending operations. + * + * @private + * @internal + */ + get isEmpty() { + return this._ops.length === 0; + } + /** + * Throws an error if this batch has already been committed. + * + * @private + * @internal + */ + verifyNotCommitted() { + if (this._committed) { + throw new Error('Cannot modify a WriteBatch that has been committed.'); + } + } + /** + * Create a document with the provided object values. This will fail the batch + * if a document exists at its location. + * + * @param {DocumentReference} documentRef A reference to the document to be + * created. + * @param {T} data The object to serialize as the document. + * @throws {Error} If the provided input is not a valid Firestore document. + * @returns {WriteBatch} This WriteBatch instance. Used for chaining + * method calls. + * + * @example + * ``` + * let writeBatch = firestore.batch(); + * let documentRef = firestore.collection('col').doc(); + * + * writeBatch.create(documentRef, {foo: 'bar'}); + * + * writeBatch.commit().then(() => { + * console.log('Successfully executed batch.'); + * }); + * ``` + */ + create(documentRef, data) { + const ref = (0, helpers_1.validateDocumentReference)('documentRef', documentRef); + const firestoreData = ref._converter.toFirestore(data); + validateDocumentData('data', firestoreData, + /* allowDeletes= */ false, this._allowUndefined); + this.verifyNotCommitted(); + const transform = document_1.DocumentTransform.fromObject(ref, firestoreData); + transform.validate(); + const precondition = new document_1.Precondition({ exists: false }); + const op = () => { + const document = document_1.DocumentSnapshot.fromObject(ref, firestoreData); + const write = document.toWriteProto(); + if (!transform.isEmpty) { + write.updateTransforms = transform.toProto(this._serializer); + } + write.currentDocument = precondition.toProto(); + return write; + }; + this._ops.push({ docPath: documentRef.path, op }); + return this; + } + /** + * Deletes a document from the database. + * + * @param {DocumentReference} documentRef A reference to the document to be + * deleted. + * @param {Precondition=} precondition A precondition to enforce for this + * delete. + * @param {Timestamp=} precondition.lastUpdateTime If set, enforces that the + * document was last updated at lastUpdateTime. Fails the batch if the + * document doesn't exist or was last updated at a different time. + * @param {boolean= } precondition.exists If set to true, enforces that the target + * document must or must not exist. + * @returns {WriteBatch} This WriteBatch instance. Used for chaining + * method calls. + * + * @example + * ``` + * let writeBatch = firestore.batch(); + * let documentRef = firestore.doc('col/doc'); + * + * writeBatch.delete(documentRef); + * + * writeBatch.commit().then(() => { + * console.log('Successfully executed batch.'); + * }); + * ``` + */ + delete( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + documentRef, precondition) { + const ref = (0, helpers_1.validateDocumentReference)('documentRef', documentRef); + validateDeletePrecondition('precondition', precondition, { optional: true }); + this.verifyNotCommitted(); + const conditions = new document_1.Precondition(precondition); + const op = () => { + const write = { delete: ref.formattedName }; + if (!conditions.isEmpty) { + write.currentDocument = conditions.toProto(); + } + return write; + }; + this._ops.push({ docPath: documentRef.path, op }); + return this; + } + /** + * Write to the document referred to by the provided + * [DocumentReference]{@link DocumentReference}. If the document does not + * exist yet, it will be created. If you pass [SetOptions]{@link SetOptions}, + * the provided data can be merged into the existing document. + * + * @param {DocumentReference} documentRef A reference to the document to be + * set. + * @param {T|Partial} data The object to serialize as the document. + * @param {SetOptions=} options An object to configure the set behavior. + * @param {boolean=} options.merge - If true, set() merges the values + * specified in its data argument. Fields omitted from this set() call + * remain untouched. If your input sets any field to an empty map, all nested + * fields are overwritten. + * @param {Array.=} options.mergeFields - If provided, + * set() only replaces the specified field paths. Any field path that is no + * specified is ignored and remains untouched. If your input sets any field to + * an empty map, all nested fields are overwritten. + * @throws {Error} If the provided input is not a valid Firestore document. + * @returns {WriteBatch} This WriteBatch instance. Used for chaining + * method calls. + * + * @example + * ``` + * let writeBatch = firestore.batch(); + * let documentRef = firestore.doc('col/doc'); + * + * writeBatch.set(documentRef, {foo: 'bar'}); + * + * writeBatch.commit().then(() => { + * console.log('Successfully executed batch.'); + * }); + * ``` + */ + set(documentRef, data, options) { + validateSetOptions('options', options, { optional: true }); + const mergeLeaves = options && 'merge' in options && options.merge; + const mergePaths = options && 'mergeFields' in options; + const ref = (0, helpers_1.validateDocumentReference)('documentRef', documentRef); + let firestoreData; + if (mergeLeaves || mergePaths) { + firestoreData = ref._converter.toFirestore(data, options); + } + else { + firestoreData = ref._converter.toFirestore(data); + } + validateDocumentData('data', firestoreData, + /* allowDeletes= */ !!(mergePaths || mergeLeaves), this._allowUndefined); + this.verifyNotCommitted(); + let documentMask; + if (mergePaths) { + documentMask = document_1.DocumentMask.fromFieldMask(options.mergeFields); + firestoreData = documentMask.applyTo(firestoreData); + } + const transform = document_1.DocumentTransform.fromObject(ref, firestoreData); + transform.validate(); + const op = () => { + const document = document_1.DocumentSnapshot.fromObject(ref, firestoreData); + if (mergePaths) { + documentMask.removeFields(transform.fields); + } + else if (mergeLeaves) { + documentMask = document_1.DocumentMask.fromObject(firestoreData); + } + const write = document.toWriteProto(); + if (!transform.isEmpty) { + write.updateTransforms = transform.toProto(this._serializer); + } + if (mergePaths || mergeLeaves) { + write.updateMask = documentMask.toProto(); + } + return write; + }; + this._ops.push({ docPath: documentRef.path, op }); + return this; + } + /** + * Update fields of the document referred to by the provided + * [DocumentReference]{@link DocumentReference}. If the document + * doesn't yet exist, the update fails and the entire batch will be rejected. + * + * The update() method accepts either an object with field paths encoded as + * keys and field values encoded as values, or a variable number of arguments + * that alternate between field paths and field values. Nested fields can be + * updated by providing dot-separated field path strings or by providing + * FieldPath objects. + * + * A Precondition restricting this update can be specified as the last + * argument. + * + * @param {DocumentReference} documentRef A reference to the document to be + * updated. + * @param {UpdateData|string|FieldPath} dataOrField An object + * containing the fields and values with which to update the document + * or the path of the first field to update. + * @param { + * ...(Precondition|*|string|FieldPath)} preconditionOrValues - + * An alternating list of field paths and values to update or a Precondition + * to restrict this update. + * @throws {Error} If the provided input is not valid Firestore data. + * @returns {WriteBatch} This WriteBatch instance. Used for chaining + * method calls. + * + * @example + * ``` + * let writeBatch = firestore.batch(); + * let documentRef = firestore.doc('col/doc'); + * + * writeBatch.update(documentRef, {foo: 'bar'}); + * + * writeBatch.commit().then(() => { + * console.log('Successfully executed batch.'); + * }); + * ``` + */ + update(documentRef, dataOrField, ...preconditionOrValues) { + // eslint-disable-next-line prefer-rest-params + (0, validate_1.validateMinNumberOfArguments)('WriteBatch.update', arguments, 2); + (0, helpers_1.validateDocumentReference)('documentRef', documentRef); + this.verifyNotCommitted(); + const updateMap = new Map(); + let precondition = new document_1.Precondition({ exists: true }); + const argumentError = 'Update() requires either a single JavaScript ' + + 'object or an alternating list of field/value pairs that can be ' + + 'followed by an optional precondition.'; + const usesVarargs = typeof dataOrField === 'string' || dataOrField instanceof path_1.FieldPath; + if (usesVarargs) { + const argumentOffset = 1; // Respect 'documentRef' in the error message + const fieldOrValues = [dataOrField, ...preconditionOrValues]; + try { + for (let i = 0; i < fieldOrValues.length; i += 2) { + if (i === fieldOrValues.length - 1) { + const maybePrecondition = fieldOrValues[i]; + validateUpdatePrecondition(i + argumentOffset, maybePrecondition); + precondition = new document_1.Precondition(maybePrecondition); + } + else { + const maybeFieldPath = fieldOrValues[i]; + (0, path_1.validateFieldPath)(i + argumentOffset, maybeFieldPath); + // Unlike the `validateMinNumberOfArguments` invocation above, this + // validation can be triggered both from `WriteBatch.update()` and + // `DocumentReference.update()`. Hence, we don't use the fully + // qualified API name in the error message. + (0, validate_1.validateMinNumberOfArguments)('update', fieldOrValues, i + 1); + const fieldPath = path_1.FieldPath.fromArgument(maybeFieldPath); + validateFieldValue(i + argumentOffset, fieldOrValues[i + 1], this._allowUndefined, fieldPath); + updateMap.set(fieldPath, fieldOrValues[i + 1]); + } + } + } + catch (err) { + (0, logger_1.logger)('WriteBatch.update', null, 'Varargs validation failed:', err); + // We catch the validation error here and re-throw to provide a better + // error message. + throw new Error(`${argumentError} ${err.message}`); + } + } + else { + try { + validateUpdateMap('dataOrField', dataOrField, this._allowUndefined); + // eslint-disable-next-line prefer-rest-params + (0, validate_1.validateMaxNumberOfArguments)('update', arguments, 3); + Object.entries(dataOrField).forEach(([key, value]) => { + // Skip `undefined` values (can be hit if `ignoreUndefinedProperties` + // is set) + if (value !== undefined) { + (0, path_1.validateFieldPath)(key, key); + updateMap.set(path_1.FieldPath.fromArgument(key), value); + } + }); + if (preconditionOrValues.length > 0) { + validateUpdatePrecondition('preconditionOrValues', preconditionOrValues[0]); + precondition = new document_1.Precondition(preconditionOrValues[0]); + } + } + catch (err) { + (0, logger_1.logger)('WriteBatch.update', null, 'Non-varargs validation failed:', err); + // We catch the validation error here and prefix the error with a custom + // message to describe the usage of update() better. + throw new Error(`${argumentError} ${err.message}`); + } + } + validateNoConflictingFields('dataOrField', updateMap); + const transform = document_1.DocumentTransform.fromUpdateMap(documentRef, updateMap); + transform.validate(); + const documentMask = document_1.DocumentMask.fromUpdateMap(updateMap); + const op = () => { + const document = document_1.DocumentSnapshot.fromUpdateMap(documentRef, updateMap); + const write = document.toWriteProto(); + write.updateMask = documentMask.toProto(); + if (!transform.isEmpty) { + write.updateTransforms = transform.toProto(this._serializer); + } + write.currentDocument = precondition.toProto(); + return write; + }; + this._ops.push({ docPath: documentRef.path, op }); + return this; + } + /** + * Atomically commits all pending operations to the database and verifies all + * preconditions. Fails the entire write if any precondition is not met. + * + * @returns {Promise.>} A Promise that resolves + * when this batch completes. + * + * @example + * ``` + * let writeBatch = firestore.batch(); + * let documentRef = firestore.doc('col/doc'); + * + * writeBatch.set(documentRef, {foo: 'bar'}); + * + * writeBatch.commit().then(() => { + * console.log('Successfully executed batch.'); + * }); + * ``` + */ + commit() { + return this._firestore._traceUtil.startActiveSpan(trace_util_1.SPAN_NAME_BATCH_COMMIT, async () => { + // Capture the error stack to preserve stack tracing across async calls. + const stack = Error().stack; + // Commits should also be retried when they fail with status code ABORTED. + const retryCodes = [10 /* StatusCode.ABORTED */, ...(0, util_1.getRetryCodes)('commit')]; + return this._commit({ retryCodes }) + .then(response => { + return (response.writeResults || []).map(writeResult => new WriteResult(timestamp_1.Timestamp.fromProto(writeResult.updateTime || response.commitTime))); + }) + .catch(err => { + throw (0, util_1.wrapError)(err, stack); + }); + }, { + [trace_util_1.ATTRIBUTE_KEY_IS_TRANSACTIONAL]: false, + [trace_util_1.ATTRIBUTE_KEY_DOC_COUNT]: this._opCount, + }); + } + /** + * Commit method that takes an optional transaction ID. + * + * @private + * @internal + * @param commitOptions Options to use for this commit. + * @param commitOptions.transactionId The transaction ID of this commit. + * @param commitOptions.requestTag A unique client-assigned identifier for + * this request. + * @returns A Promise that resolves when this batch completes. + */ + async _commit(commitOptions) { + var _a; + // Note: We don't call `verifyNotCommitted()` to allow for retries. + this._committed = true; + const tag = (_a = commitOptions === null || commitOptions === void 0 ? void 0 : commitOptions.requestTag) !== null && _a !== void 0 ? _a : (0, util_1.requestTag)(); + await this._firestore.initializeIfNeeded(tag); + // Note that the request may not always be of type ICommitRequest. This is + // just here to ensure type safety. + const request = { + database: this._firestore.formattedName, + writes: this._ops.map(op => op.op()), + }; + if (commitOptions === null || commitOptions === void 0 ? void 0 : commitOptions.transactionId) { + request.transaction = commitOptions.transactionId; + } + (0, logger_1.logger)('WriteBatch.commit', tag, 'Sending %d writes', request.writes.length); + return this._firestore.request((commitOptions === null || commitOptions === void 0 ? void 0 : commitOptions.methodName) || 'commit', request, tag, commitOptions === null || commitOptions === void 0 ? void 0 : commitOptions.retryCodes); + } + /** + * Resets the WriteBatch and dequeues all pending operations. + * @private + * @internal + */ + _reset() { + this._ops.splice(0); + this._committed = false; + } +} +exports.WriteBatch = WriteBatch; +/** + * Validates the use of 'value' as a Precondition and enforces that 'exists' + * and 'lastUpdateTime' use valid types. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The object to validate + * @param options Options describing other things for this function to validate. + */ +function validatePrecondition(arg, value, options) { + if (typeof value !== 'object' || value === null) { + throw new Error('Input is not an object.'); + } + const precondition = value; + let conditions = 0; + if (precondition.exists !== undefined) { + ++conditions; + if (typeof precondition.exists !== 'boolean') { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'precondition')} "exists" is not a boolean.'`); + } + if ((options === null || options === void 0 ? void 0 : options.allowedExistsValues) && + options.allowedExistsValues.indexOf(precondition.exists) < 0) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'precondition')} ` + + `"exists" is not allowed to have the value ${precondition.exists} ` + + `(allowed values: ${options.allowedExistsValues.join(', ')})`); + } + } + if (precondition.lastUpdateTime !== undefined) { + ++conditions; + if (!(precondition.lastUpdateTime instanceof timestamp_1.Timestamp)) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'precondition')} "lastUpdateTime" is not a Firestore Timestamp.`); + } + } + if (conditions > 1) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'precondition')} Input specifies more than one precondition.`); + } +} +/** + * Validates the use of 'value' as an update Precondition. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The object to validate. + * @param options Optional validation options specifying whether the value can + * be omitted. + */ +function validateUpdatePrecondition(arg, value, options) { + if (!(0, validate_1.validateOptional)(value, options)) { + validatePrecondition(arg, value, { allowedExistsValues: [true] }); + } +} +/** + * Validates the use of 'value' as a delete Precondition. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The object to validate. + * @param options Optional validation options specifying whether the value can + * be omitted. + */ +function validateDeletePrecondition(arg, value, options) { + if (!(0, validate_1.validateOptional)(value, options)) { + validatePrecondition(arg, value); + } +} +/** + * Validates the use of 'value' as SetOptions and enforces that 'merge' is a + * boolean. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param value The object to validate. + * @param options Optional validation options specifying whether the value can + * be omitted. + * @throws if the input is not a valid SetOptions object. + */ +function validateSetOptions(arg, value, options) { + if (!(0, validate_1.validateOptional)(value, options)) { + if (!(0, util_1.isObject)(value)) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'set() options argument')} Input is not an object.`); + } + const setOptions = value; + if ('mergeFields' in setOptions) { + for (let i = 0; i < setOptions.mergeFields.length; ++i) { + try { + (0, path_1.validateFieldPath)(i, setOptions.mergeFields[i]); + } + catch (err) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'set() options argument')} "mergeFields" is not valid: ${err.message}`); + } + } + } + if ('merge' in setOptions && 'mergeFields' in setOptions) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'set() options argument')} You cannot specify both "merge" and "mergeFields".`); + } + } +} +/** + * Validates a JavaScript object for usage as a Firestore document. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param obj JavaScript object to validate. + * @param allowDeletes Whether to allow FieldValue.delete() sentinels. + * @param allowUndefined Whether to allow nested properties that are `undefined`. + * @throws when the object is invalid. + */ +function validateDocumentData(arg, obj, allowDeletes, allowUndefined) { + if (!(0, util_1.isPlainObject)(obj)) { + throw new Error((0, validate_1.customObjectMessage)(arg, obj)); + } + (0, serializer_1.validateUserInput)(arg, obj, 'Firestore document', { + allowDeletes: allowDeletes ? 'all' : 'none', + allowTransforms: true, + allowUndefined, + }); +} +/** + * Validates that a value can be used as field value during an update. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param val The value to verify. + * @param allowUndefined Whether to allow nested properties that are `undefined`. + * @param path The path to show in the error message. + */ +function validateFieldValue(arg, val, allowUndefined, path) { + (0, serializer_1.validateUserInput)(arg, val, 'Firestore value', { allowDeletes: 'root', allowTransforms: true, allowUndefined }, path); +} +/** + * Validates that the update data does not contain any ambiguous field + * definitions (such as 'a.b' and 'a'). + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param data An update map with field/value pairs. + */ +function validateNoConflictingFields(arg, data) { + const fields = []; + data.forEach((value, key) => { + fields.push(key); + }); + fields.sort((left, right) => left.compareTo(right)); + for (let i = 1; i < fields.length; ++i) { + if (fields[i - 1].isPrefixOf(fields[i])) { + throw new Error(`${(0, validate_1.invalidArgumentMessage)(arg, 'update map')} Field "${fields[i - 1]}" was specified multiple times.`); + } + } +} +/** + * Validates that a JavaScript object is a map of field paths to field values. + * + * @private + * @internal + * @param arg The argument name or argument index (for varargs methods). + * @param obj JavaScript object to validate. + * @param allowUndefined Whether to allow nested properties that are `undefined`. + * @throws when the object is invalid. + */ +function validateUpdateMap(arg, obj, allowUndefined) { + if (!(0, util_1.isPlainObject)(obj)) { + throw new Error((0, validate_1.customObjectMessage)(arg, obj)); + } + if (Object.keys(obj).length === 0) { + throw new Error('At least one field must be updated.'); + } + validateFieldValue(arg, obj, allowUndefined); +} +//# sourceMappingURL=write-batch.js.map + +/***/ }), + +/***/ 46412: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = exports.paginator = exports.Paginator = void 0; +/*! + * @module common/paginator + */ +const arrify = __nccwpck_require__(61546); +const extend = __nccwpck_require__(38171); +const resource_stream_1 = __nccwpck_require__(72199); +Object.defineProperty(exports, "ResourceStream", ({ enumerable: true, get: function () { return resource_stream_1.ResourceStream; } })); +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + // tslint:disable-next-line:variable-name + extend(Class, methodNames) { + methodNames = arrify(methodNames); + methodNames.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + // map the original method to a private member + Class.prototype[methodName + '_'] = originalMethod; + // overwrite the original to auto-paginate + /* eslint-disable @typescript-eslint/no-explicit-any */ + Class.prototype[methodName] = function (...args) { + const parsedArguments = paginator.parseArguments_(args); + return paginator.run_(parsedArguments, originalMethod.bind(this)); + }; + }); + } + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + streamify(methodName) { + return function ( + /* eslint-disable @typescript-eslint/no-explicit-any */ + ...args) { + const parsedArguments = paginator.parseArguments_(args); + const originalMethod = this[methodName + '_'] || this[methodName]; + return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); + }; + } + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + parseArguments_(args) { + let query; + let autoPaginate = true; + let maxApiCalls = -1; + let maxResults = -1; + let callback; + const firstArgument = args[0]; + const lastArgument = args[args.length - 1]; + if (typeof firstArgument === 'function') { + callback = firstArgument; + } + else { + query = firstArgument; + } + if (typeof lastArgument === 'function') { + callback = lastArgument; + } + if (typeof query === 'object') { + query = extend(true, {}, query); + // Check if the user only asked for a certain amount of results. + if (query.maxResults && typeof query.maxResults === 'number') { + // `maxResults` is used API-wide. + maxResults = query.maxResults; + } + else if (typeof query.pageSize === 'number') { + // `pageSize` is Pub/Sub's `maxResults`. + maxResults = query.pageSize; + } + if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { + maxApiCalls = query.maxApiCalls; + delete query.maxApiCalls; + } + // maxResults is the user specified limit. + if (maxResults !== -1 || query.autoPaginate === false) { + autoPaginate = false; + } + } + const parsedArguments = { + query: query || {}, + autoPaginate, + maxApiCalls, + maxResults, + callback, + }; + parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); + delete parsedArguments.streamOptions.autoPaginate; + delete parsedArguments.streamOptions.maxResults; + delete parsedArguments.streamOptions.pageSize; + return parsedArguments; + } + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments, originalMethod) { + const query = parsedArguments.query; + const callback = parsedArguments.callback; + if (!parsedArguments.autoPaginate) { + return originalMethod(query, callback); + } + const results = new Array(); + let otherArgs = []; + const promise = new Promise((resolve, reject) => { + const stream = paginator.runAsStream_(parsedArguments, originalMethod); + stream + .on('error', reject) + .on('data', (data) => results.push(data)) + .on('end', () => { + otherArgs = stream._otherArgs || []; + resolve(results); + }); + }); + if (!callback) { + return promise.then(results => [results, query, ...otherArgs]); + } + promise.then(results => callback(null, results, query, ...otherArgs), (err) => callback(err)); + } + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + runAsStream_(parsedArguments, originalMethod) { + return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); + } +} +exports.Paginator = Paginator; +const paginator = new Paginator(); +exports.paginator = paginator; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 72199: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = void 0; +const stream_1 = __nccwpck_require__(12781); +class ResourceStream extends stream_1.Transform { + constructor(args, requestFn) { + const options = Object.assign({ objectMode: true }, args.streamOptions); + super(options); + this._ended = false; + this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; + this._nextQuery = args.query; + this._reading = false; + this._requestFn = requestFn; + this._requestsMade = 0; + this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; + this._otherArgs = []; + } + /* eslint-disable @typescript-eslint/no-explicit-any */ + end(...args) { + this._ended = true; + return super.end(...args); + } + _read() { + if (this._reading) { + return; + } + this._reading = true; + // Wrap in a try/catch to catch input linting errors, e.g. + // an invalid BigQuery query. These errors are thrown in an + // async fashion, which makes them un-catchable by the user. + try { + this._requestFn(this._nextQuery, (err, results, nextQuery, ...otherArgs) => { + if (err) { + this.destroy(err); + return; + } + this._otherArgs = otherArgs; + this._nextQuery = nextQuery; + if (this._resultsToSend !== Infinity) { + results = results.splice(0, this._resultsToSend); + this._resultsToSend -= results.length; + } + let more = true; + for (const result of results) { + if (this._ended) { + break; + } + more = this.push(result); + } + const isFinished = !this._nextQuery || this._resultsToSend < 1; + const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; + if (isFinished || madeMaxCalls) { + this.end(); + } + if (more && !this._ended) { + setImmediate(() => this._read()); + } + this._reading = false; + }); + } + catch (e) { + this.destroy(e); + } + } +} +exports.ResourceStream = ResourceStream; +//# sourceMappingURL=resource-stream.js.map + +/***/ }), + +/***/ 3497: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; +const stream_1 = __nccwpck_require__(12781); +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/** + * Populate the `{{projectId}}` placeholder. + * + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function replaceProjectIdToken(value, projectId) { + if (Array.isArray(value)) { + value = value.map(v => replaceProjectIdToken(v, projectId)); + } + if (value !== null && + typeof value === 'object' && + !(value instanceof Buffer) && + !(value instanceof stream_1.Stream) && + typeof value.hasOwnProperty === 'function') { + for (const opt in value) { + // eslint-disable-next-line no-prototype-builtins + if (value.hasOwnProperty(opt)) { + value[opt] = replaceProjectIdToken(value[opt], projectId); + } + } + } + if (typeof value === 'string' && + value.indexOf('{{projectId}}') > -1) { + if (!projectId || projectId === '{{projectId}}') { + throw new MissingProjectIdError(); + } + value = value.replace(/{{projectId}}/g, projectId); + } + return value; +} +exports.replaceProjectIdToken = replaceProjectIdToken; +/** + * Custom error type for missing project ID errors. + */ +class MissingProjectIdError extends Error { + constructor() { + super(...arguments); + this.message = `Sorry, we cannot connect to Cloud Services without a project + ID. You may specify one with an environment variable named + "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); + } +} +exports.MissingProjectIdError = MissingProjectIdError; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 19203: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* eslint-disable prefer-rest-params */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; +/** + * Wraps a callback style function to conditionally return a promise. + * + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped + */ +function promisify(originalMethod, options) { + if (originalMethod.promisified_) { + return originalMethod; + } + options = options || {}; + const slice = Array.prototype.slice; + // tslint:disable-next-line:no-any + const wrapper = function () { + let last; + for (last = arguments.length - 1; last >= 0; last--) { + const arg = arguments[last]; + if (typeof arg === 'undefined') { + continue; // skip trailing undefined. + } + if (typeof arg !== 'function') { + break; // non-callback last argument found. + } + return originalMethod.apply(this, arguments); + } + // peel trailing undefined. + const args = slice.call(arguments, 0, last + 1); + // tslint:disable-next-line:variable-name + let PromiseCtor = Promise; + // Because dedupe will likely create a single install of + // @google-cloud/common to be shared amongst all modules, we need to + // localize it at the Service level. + if (this && this.Promise) { + PromiseCtor = this.Promise; + } + return new PromiseCtor((resolve, reject) => { + // tslint:disable-next-line:no-any + args.push((...args) => { + const callbackArgs = slice.call(args); + const err = callbackArgs.shift(); + if (err) { + return reject(err); + } + if (options.singular && callbackArgs.length === 1) { + resolve(callbackArgs[0]); + } + else { + resolve(callbackArgs); + } + }); + originalMethod.apply(this, args); + }); + }; + wrapper.promisified_ = true; + return wrapper; +} +exports.promisify = promisify; +/** + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +// tslint:disable-next-line:variable-name +function promisifyAll(Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.promisified_) { + Class.prototype[methodName] = exports.promisify(originalMethod, options); + } + }); +} +exports.promisifyAll = promisifyAll; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +function callbackify(originalMethod) { + if (originalMethod.callbackified_) { + return originalMethod; + } + // tslint:disable-next-line:no-any + const wrapper = function () { + if (typeof arguments[arguments.length - 1] !== 'function') { + return originalMethod.apply(this, arguments); + } + const cb = Array.prototype.pop.call(arguments); + originalMethod.apply(this, arguments).then( + // tslint:disable-next-line:no-any + (res) => { + res = Array.isArray(res) ? res : [res]; + cb(null, ...res); + }, (err) => cb(err)); + }; + wrapper.callbackified_ = true; + return wrapper; +} +exports.callbackify = callbackify; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +function callbackifyAll( +// tslint:disable-next-line:variable-name +Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.callbackified_) { + Class.prototype[methodName] = exports.callbackify(originalMethod); + } + }); +} +exports.callbackifyAll = callbackifyAll; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 16747: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const validator = __nccwpck_require__(4408); +const XMLParser = __nccwpck_require__(81013); +const XMLBuilder = __nccwpck_require__(46874); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} + +/***/ }), + +/***/ 28689: +/***/ ((module) => { + +function getIgnoreAttributesFn(ignoreAttributes) { + if (typeof ignoreAttributes === 'function') { + return ignoreAttributes + } + if (Array.isArray(ignoreAttributes)) { + return (attrName) => { + for (const pattern of ignoreAttributes) { + if (typeof pattern === 'string' && attrName === pattern) { + return true + } + if (pattern instanceof RegExp && pattern.test(attrName)) { + return true + } + } + } + } + return () => false +} + +module.exports = getIgnoreAttributesFn + +/***/ }), + +/***/ 32939: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; + + +/***/ }), + +/***/ 4408: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(32939); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} + + +/***/ }), + +/***/ 46874: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(82338); +const getIgnoreAttributesFn = __nccwpck_require__(28689) + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes === true || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0, []).val; + } +}; + +Builder.prototype.j2x = function(jObj, level, ajPath) { + let attrStr = ''; + let val = ''; + const jPath = ajPath.join('.') + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr && !this.ignoreAttributesFn(attr, jPath)) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + } else if (!attr) { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1, ajPath.concat(key)); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level, ajPath) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level, ajPath) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level, ajPath) { + const result = this.j2x(object, level + 1, ajPath.concat(key)); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { + +const EOL = "\n"; + +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; + + +/***/ }), + +/***/ 41794: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(32939); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; + + +/***/ }), + +/***/ 87617: +/***/ ((__unused_webpack_module, exports) => { + + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; + +/***/ }), + +/***/ 4624: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +///@ts-check + +const util = __nccwpck_require__(32939); +const xmlNode = __nccwpck_require__(71401); +const readDocType = __nccwpck_require__(41794); +const toNumber = __nccwpck_require__(14526); +const getIgnoreAttributesFn = __nccwpck_require__(28689) + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (this.options.ignoreAttributes !== true && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + if (this.ignoreAttributesFn(attrName, jPath)) { + continue + } + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; + + +/***/ }), + +/***/ 81013: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { buildOptions} = __nccwpck_require__(87617); +const OrderedObjParser = __nccwpck_require__(4624); +const { prettify} = __nccwpck_require__(83262); +const validator = __nccwpck_require__(4408); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; + +/***/ }), + +/***/ 83262: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; + + +/***/ }), + +/***/ 71401: +/***/ ((module) => { + +"use strict"; + + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; + +/***/ }), + +/***/ 44458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(33542)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(29411)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(83424)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(64051)); + +var _nil = _interopRequireDefault(__nccwpck_require__(46570)); + +var _version = _interopRequireDefault(__nccwpck_require__(35611)); + +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); + +var _parse = _interopRequireDefault(__nccwpck_require__(99645)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 84953: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 46570: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 99645: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 94323: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 91430: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 77416: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 32122: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 33542: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(91430)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 29411: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(74624)); + +var _md = _interopRequireDefault(__nccwpck_require__(84953)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 74624: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); + +var _parse = _interopRequireDefault(__nccwpck_require__(99645)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 83424: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(91430)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 64051: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(74624)); + +var _sha = _interopRequireDefault(__nccwpck_require__(77416)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 20937: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(94323)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 35611: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 8258: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2021 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.addAdminServicesToServer = exports.registerAdminService = void 0; +const registeredAdminServices = []; +function registerAdminService(getServiceDefinition, getHandlers) { + registeredAdminServices.push({ getServiceDefinition, getHandlers }); +} +exports.registerAdminService = registerAdminService; +function addAdminServicesToServer(server) { + for (const { getServiceDefinition, getHandlers } of registeredAdminServices) { + server.addService(getServiceDefinition(), getHandlers()); + } +} +exports.addAdminServicesToServer = addAdminServicesToServer; +//# sourceMappingURL=admin.js.map + +/***/ }), + +/***/ 34186: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BackoffTimeout = void 0; +const INITIAL_BACKOFF_MS = 1000; +const BACKOFF_MULTIPLIER = 1.6; +const MAX_BACKOFF_MS = 120000; +const BACKOFF_JITTER = 0.2; +/** + * Get a number uniformly at random in the range [min, max) + * @param min + * @param max + */ +function uniformRandom(min, max) { + return Math.random() * (max - min) + min; +} +class BackoffTimeout { + constructor(callback, options) { + this.callback = callback; + /** + * The delay time at the start, and after each reset. + */ + this.initialDelay = INITIAL_BACKOFF_MS; + /** + * The exponential backoff multiplier. + */ + this.multiplier = BACKOFF_MULTIPLIER; + /** + * The maximum delay time + */ + this.maxDelay = MAX_BACKOFF_MS; + /** + * The maximum fraction by which the delay time can randomly vary after + * applying the multiplier. + */ + this.jitter = BACKOFF_JITTER; + /** + * Indicates whether the timer is currently running. + */ + this.running = false; + /** + * Indicates whether the timer should keep the Node process running if no + * other async operation is doing so. + */ + this.hasRef = true; + /** + * The time that the currently running timer was started. Only valid if + * running is true. + */ + this.startTime = new Date(); + /** + * The approximate time that the currently running timer will end. Only valid + * if running is true. + */ + this.endTime = new Date(); + if (options) { + if (options.initialDelay) { + this.initialDelay = options.initialDelay; + } + if (options.multiplier) { + this.multiplier = options.multiplier; + } + if (options.jitter) { + this.jitter = options.jitter; + } + if (options.maxDelay) { + this.maxDelay = options.maxDelay; + } + } + this.nextDelay = this.initialDelay; + this.timerId = setTimeout(() => { }, 0); + clearTimeout(this.timerId); + } + runTimer(delay) { + var _a, _b; + this.endTime = this.startTime; + this.endTime.setMilliseconds(this.endTime.getMilliseconds() + this.nextDelay); + clearTimeout(this.timerId); + this.timerId = setTimeout(() => { + this.callback(); + this.running = false; + }, delay); + if (!this.hasRef) { + (_b = (_a = this.timerId).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + } + /** + * Call the callback after the current amount of delay time + */ + runOnce() { + this.running = true; + this.startTime = new Date(); + this.runTimer(this.nextDelay); + const nextBackoff = Math.min(this.nextDelay * this.multiplier, this.maxDelay); + const jitterMagnitude = nextBackoff * this.jitter; + this.nextDelay = + nextBackoff + uniformRandom(-jitterMagnitude, jitterMagnitude); + } + /** + * Stop the timer. The callback will not be called until `runOnce` is called + * again. + */ + stop() { + clearTimeout(this.timerId); + this.running = false; + } + /** + * Reset the delay time to its initial value. If the timer is still running, + * retroactively apply that reset to the current timer. + */ + reset() { + this.nextDelay = this.initialDelay; + if (this.running) { + const now = new Date(); + const newEndTime = this.startTime; + newEndTime.setMilliseconds(newEndTime.getMilliseconds() + this.nextDelay); + clearTimeout(this.timerId); + if (now < newEndTime) { + this.runTimer(newEndTime.getTime() - now.getTime()); + } + else { + this.running = false; + } + } + } + /** + * Check whether the timer is currently running. + */ + isRunning() { + return this.running; + } + /** + * Set that while the timer is running, it should keep the Node process + * running. + */ + ref() { + var _a, _b; + this.hasRef = true; + (_b = (_a = this.timerId).ref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + /** + * Set that while the timer is running, it should not keep the Node process + * running. + */ + unref() { + var _a, _b; + this.hasRef = false; + (_b = (_a = this.timerId).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + /** + * Get the approximate timestamp of when the timer will fire. Only valid if + * this.isRunning() is true. + */ + getEndTime() { + return this.endTime; + } +} +exports.BackoffTimeout = BackoffTimeout; +//# sourceMappingURL=backoff-timeout.js.map + +/***/ }), + +/***/ 21426: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CallCredentials = void 0; +const metadata_1 = __nccwpck_require__(83665); +function isCurrentOauth2Client(client) { + return ('getRequestHeaders' in client && + typeof client.getRequestHeaders === 'function'); +} +/** + * A class that represents a generic method of adding authentication-related + * metadata on a per-request basis. + */ +class CallCredentials { + /** + * Creates a new CallCredentials object from a given function that generates + * Metadata objects. + * @param metadataGenerator A function that accepts a set of options, and + * generates a Metadata object based on these options, which is passed back + * to the caller via a supplied (err, metadata) callback. + */ + static createFromMetadataGenerator(metadataGenerator) { + return new SingleCallCredentials(metadataGenerator); + } + /** + * Create a gRPC credential from a Google credential object. + * @param googleCredentials The authentication client to use. + * @return The resulting CallCredentials object. + */ + static createFromGoogleCredential(googleCredentials) { + return CallCredentials.createFromMetadataGenerator((options, callback) => { + let getHeaders; + if (isCurrentOauth2Client(googleCredentials)) { + getHeaders = googleCredentials.getRequestHeaders(options.service_url); + } + else { + getHeaders = new Promise((resolve, reject) => { + googleCredentials.getRequestMetadata(options.service_url, (err, headers) => { + if (err) { + reject(err); + return; + } + if (!headers) { + reject(new Error('Headers not set by metadata plugin')); + return; + } + resolve(headers); + }); + }); + } + getHeaders.then(headers => { + const metadata = new metadata_1.Metadata(); + for (const key of Object.keys(headers)) { + metadata.add(key, headers[key]); + } + callback(null, metadata); + }, err => { + callback(err); + }); + }); + } + static createEmpty() { + return new EmptyCallCredentials(); + } +} +exports.CallCredentials = CallCredentials; +class ComposedCallCredentials extends CallCredentials { + constructor(creds) { + super(); + this.creds = creds; + } + async generateMetadata(options) { + const base = new metadata_1.Metadata(); + const generated = await Promise.all(this.creds.map(cred => cred.generateMetadata(options))); + for (const gen of generated) { + base.merge(gen); + } + return base; + } + compose(other) { + return new ComposedCallCredentials(this.creds.concat([other])); + } + _equals(other) { + if (this === other) { + return true; + } + if (other instanceof ComposedCallCredentials) { + return this.creds.every((value, index) => value._equals(other.creds[index])); + } + else { + return false; + } + } +} +class SingleCallCredentials extends CallCredentials { + constructor(metadataGenerator) { + super(); + this.metadataGenerator = metadataGenerator; + } + generateMetadata(options) { + return new Promise((resolve, reject) => { + this.metadataGenerator(options, (err, metadata) => { + if (metadata !== undefined) { + resolve(metadata); + } + else { + reject(err); + } + }); + }); + } + compose(other) { + return new ComposedCallCredentials([this, other]); + } + _equals(other) { + if (this === other) { + return true; + } + if (other instanceof SingleCallCredentials) { + return this.metadataGenerator === other.metadataGenerator; + } + else { + return false; + } + } +} +class EmptyCallCredentials extends CallCredentials { + generateMetadata(options) { + return Promise.resolve(new metadata_1.Metadata()); + } + compose(other) { + return other; + } + _equals(other) { + return other instanceof EmptyCallCredentials; + } +} +//# sourceMappingURL=call-credentials.js.map + +/***/ }), + +/***/ 78710: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2022 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.InterceptingListenerImpl = exports.isInterceptingListener = void 0; +function isInterceptingListener(listener) { + return (listener.onReceiveMetadata !== undefined && + listener.onReceiveMetadata.length === 1); +} +exports.isInterceptingListener = isInterceptingListener; +class InterceptingListenerImpl { + constructor(listener, nextListener) { + this.listener = listener; + this.nextListener = nextListener; + this.processingMetadata = false; + this.hasPendingMessage = false; + this.processingMessage = false; + this.pendingStatus = null; + } + processPendingMessage() { + if (this.hasPendingMessage) { + this.nextListener.onReceiveMessage(this.pendingMessage); + this.pendingMessage = null; + this.hasPendingMessage = false; + } + } + processPendingStatus() { + if (this.pendingStatus) { + this.nextListener.onReceiveStatus(this.pendingStatus); + } + } + onReceiveMetadata(metadata) { + this.processingMetadata = true; + this.listener.onReceiveMetadata(metadata, metadata => { + this.processingMetadata = false; + this.nextListener.onReceiveMetadata(metadata); + this.processPendingMessage(); + this.processPendingStatus(); + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + onReceiveMessage(message) { + /* If this listener processes messages asynchronously, the last message may + * be reordered with respect to the status */ + this.processingMessage = true; + this.listener.onReceiveMessage(message, msg => { + this.processingMessage = false; + if (this.processingMetadata) { + this.pendingMessage = msg; + this.hasPendingMessage = true; + } + else { + this.nextListener.onReceiveMessage(msg); + this.processPendingStatus(); + } + }); + } + onReceiveStatus(status) { + this.listener.onReceiveStatus(status, processedStatus => { + if (this.processingMetadata || this.processingMessage) { + this.pendingStatus = processedStatus; + } + else { + this.nextListener.onReceiveStatus(processedStatus); + } + }); + } +} +exports.InterceptingListenerImpl = InterceptingListenerImpl; +//# sourceMappingURL=call-interface.js.map + +/***/ }), + +/***/ 70380: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2022 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getNextCallNumber = void 0; +let nextCallNumber = 0; +function getNextCallNumber() { + return nextCallNumber++; +} +exports.getNextCallNumber = getNextCallNumber; +//# sourceMappingURL=call-number.js.map + +/***/ }), + +/***/ 97453: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ClientDuplexStreamImpl = exports.ClientWritableStreamImpl = exports.ClientReadableStreamImpl = exports.ClientUnaryCallImpl = exports.callErrorFromStatus = void 0; +const events_1 = __nccwpck_require__(82361); +const stream_1 = __nccwpck_require__(12781); +const constants_1 = __nccwpck_require__(90634); +/** + * Construct a ServiceError from a StatusObject. This function exists primarily + * as an attempt to make the error stack trace clearly communicate that the + * error is not necessarily a problem in gRPC itself. + * @param status + */ +function callErrorFromStatus(status, callerStack) { + const message = `${status.code} ${constants_1.Status[status.code]}: ${status.details}`; + const error = new Error(message); + const stack = `${error.stack}\nfor call at\n${callerStack}`; + return Object.assign(new Error(message), status, { stack }); +} +exports.callErrorFromStatus = callErrorFromStatus; +class ClientUnaryCallImpl extends events_1.EventEmitter { + constructor() { + super(); + } + cancel() { + var _a; + (_a = this.call) === null || _a === void 0 ? void 0 : _a.cancelWithStatus(constants_1.Status.CANCELLED, 'Cancelled on client'); + } + getPeer() { + var _a, _b; + return (_b = (_a = this.call) === null || _a === void 0 ? void 0 : _a.getPeer()) !== null && _b !== void 0 ? _b : 'unknown'; + } +} +exports.ClientUnaryCallImpl = ClientUnaryCallImpl; +class ClientReadableStreamImpl extends stream_1.Readable { + constructor(deserialize) { + super({ objectMode: true }); + this.deserialize = deserialize; + } + cancel() { + var _a; + (_a = this.call) === null || _a === void 0 ? void 0 : _a.cancelWithStatus(constants_1.Status.CANCELLED, 'Cancelled on client'); + } + getPeer() { + var _a, _b; + return (_b = (_a = this.call) === null || _a === void 0 ? void 0 : _a.getPeer()) !== null && _b !== void 0 ? _b : 'unknown'; + } + _read(_size) { + var _a; + (_a = this.call) === null || _a === void 0 ? void 0 : _a.startRead(); + } +} +exports.ClientReadableStreamImpl = ClientReadableStreamImpl; +class ClientWritableStreamImpl extends stream_1.Writable { + constructor(serialize) { + super({ objectMode: true }); + this.serialize = serialize; + } + cancel() { + var _a; + (_a = this.call) === null || _a === void 0 ? void 0 : _a.cancelWithStatus(constants_1.Status.CANCELLED, 'Cancelled on client'); + } + getPeer() { + var _a, _b; + return (_b = (_a = this.call) === null || _a === void 0 ? void 0 : _a.getPeer()) !== null && _b !== void 0 ? _b : 'unknown'; + } + _write(chunk, encoding, cb) { + var _a; + const context = { + callback: cb, + }; + const flags = Number(encoding); + if (!Number.isNaN(flags)) { + context.flags = flags; + } + (_a = this.call) === null || _a === void 0 ? void 0 : _a.sendMessageWithContext(context, chunk); + } + _final(cb) { + var _a; + (_a = this.call) === null || _a === void 0 ? void 0 : _a.halfClose(); + cb(); + } +} +exports.ClientWritableStreamImpl = ClientWritableStreamImpl; +class ClientDuplexStreamImpl extends stream_1.Duplex { + constructor(serialize, deserialize) { + super({ objectMode: true }); + this.serialize = serialize; + this.deserialize = deserialize; + } + cancel() { + var _a; + (_a = this.call) === null || _a === void 0 ? void 0 : _a.cancelWithStatus(constants_1.Status.CANCELLED, 'Cancelled on client'); + } + getPeer() { + var _a, _b; + return (_b = (_a = this.call) === null || _a === void 0 ? void 0 : _a.getPeer()) !== null && _b !== void 0 ? _b : 'unknown'; + } + _read(_size) { + var _a; + (_a = this.call) === null || _a === void 0 ? void 0 : _a.startRead(); + } + _write(chunk, encoding, cb) { + var _a; + const context = { + callback: cb, + }; + const flags = Number(encoding); + if (!Number.isNaN(flags)) { + context.flags = flags; + } + (_a = this.call) === null || _a === void 0 ? void 0 : _a.sendMessageWithContext(context, chunk); + } + _final(cb) { + var _a; + (_a = this.call) === null || _a === void 0 ? void 0 : _a.halfClose(); + cb(); + } +} +exports.ClientDuplexStreamImpl = ClientDuplexStreamImpl; +//# sourceMappingURL=call.js.map + +/***/ }), + +/***/ 25649: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2024 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FileWatcherCertificateProvider = void 0; +const fs = __nccwpck_require__(57147); +const logging = __nccwpck_require__(35993); +const constants_1 = __nccwpck_require__(90634); +const util_1 = __nccwpck_require__(73837); +const TRACER_NAME = 'certificate_provider'; +function trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, text); +} +const readFilePromise = (0, util_1.promisify)(fs.readFile); +class FileWatcherCertificateProvider { + constructor(config) { + this.config = config; + this.refreshTimer = null; + this.fileResultPromise = null; + this.latestCaUpdate = null; + this.caListeners = new Set(); + this.latestIdentityUpdate = null; + this.identityListeners = new Set(); + this.lastUpdateTime = null; + if ((config.certificateFile === undefined) !== (config.privateKeyFile === undefined)) { + throw new Error('certificateFile and privateKeyFile must be set or unset together'); + } + if (config.certificateFile === undefined && config.caCertificateFile === undefined) { + throw new Error('At least one of certificateFile and caCertificateFile must be set'); + } + trace('File watcher constructed with config ' + JSON.stringify(config)); + } + updateCertificates() { + if (this.fileResultPromise) { + return; + } + this.fileResultPromise = Promise.allSettled([ + this.config.certificateFile ? readFilePromise(this.config.certificateFile) : Promise.reject(), + this.config.privateKeyFile ? readFilePromise(this.config.privateKeyFile) : Promise.reject(), + this.config.caCertificateFile ? readFilePromise(this.config.caCertificateFile) : Promise.reject() + ]); + this.fileResultPromise.then(([certificateResult, privateKeyResult, caCertificateResult]) => { + if (!this.refreshTimer) { + return; + } + trace('File watcher read certificates certificate' + (certificateResult ? '!=' : '==') + 'null, privateKey' + (privateKeyResult ? '!=' : '==') + 'null, CA certificate' + (caCertificateResult ? '!=' : '==') + 'null'); + this.lastUpdateTime = new Date(); + this.fileResultPromise = null; + if (certificateResult.status === 'fulfilled' && privateKeyResult.status === 'fulfilled') { + this.latestIdentityUpdate = { + certificate: certificateResult.value, + privateKey: privateKeyResult.value + }; + } + else { + this.latestIdentityUpdate = null; + } + if (caCertificateResult.status === 'fulfilled') { + this.latestCaUpdate = { + caCertificate: caCertificateResult.value + }; + } + for (const listener of this.identityListeners) { + listener(this.latestIdentityUpdate); + } + for (const listener of this.caListeners) { + listener(this.latestCaUpdate); + } + }); + trace('File watcher initiated certificate update'); + } + maybeStartWatchingFiles() { + if (!this.refreshTimer) { + /* Perform the first read immediately, but only if there was not already + * a recent read, to avoid reading from the filesystem significantly more + * frequently than configured if the provider quickly switches between + * used and unused. */ + const timeSinceLastUpdate = this.lastUpdateTime ? (new Date()).getTime() - this.lastUpdateTime.getTime() : Infinity; + if (timeSinceLastUpdate > this.config.refreshIntervalMs) { + this.updateCertificates(); + } + if (timeSinceLastUpdate > this.config.refreshIntervalMs * 2) { + // Clear out old updates if they are definitely stale + this.latestCaUpdate = null; + this.latestIdentityUpdate = null; + } + this.refreshTimer = setInterval(() => this.updateCertificates(), this.config.refreshIntervalMs); + trace('File watcher started watching'); + } + } + maybeStopWatchingFiles() { + if (this.caListeners.size === 0 && this.identityListeners.size === 0) { + this.fileResultPromise = null; + if (this.refreshTimer) { + clearInterval(this.refreshTimer); + this.refreshTimer = null; + } + } + } + addCaCertificateListener(listener) { + this.caListeners.add(listener); + this.maybeStartWatchingFiles(); + process.nextTick(listener, this.latestCaUpdate); + } + removeCaCertificateListener(listener) { + this.caListeners.delete(listener); + this.maybeStopWatchingFiles(); + } + addIdentityCertificateListener(listener) { + this.identityListeners.add(listener); + this.maybeStartWatchingFiles(); + process.nextTick(listener, this.latestIdentityUpdate); + } + removeIdentityCertificateListener(listener) { + this.identityListeners.delete(listener); + this.maybeStopWatchingFiles(); + } +} +exports.FileWatcherCertificateProvider = FileWatcherCertificateProvider; +//# sourceMappingURL=certificate-provider.js.map + +/***/ }), + +/***/ 44030: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCertificateProviderChannelCredentials = exports.ChannelCredentials = void 0; +const tls_1 = __nccwpck_require__(24404); +const call_credentials_1 = __nccwpck_require__(21426); +const tls_helpers_1 = __nccwpck_require__(86581); +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function verifyIsBufferOrNull(obj, friendlyName) { + if (obj && !(obj instanceof Buffer)) { + throw new TypeError(`${friendlyName}, if provided, must be a Buffer.`); + } +} +/** + * A class that contains credentials for communicating over a channel, as well + * as a set of per-call credentials, which are applied to every method call made + * over a channel initialized with an instance of this class. + */ +class ChannelCredentials { + constructor(callCredentials) { + this.callCredentials = callCredentials || call_credentials_1.CallCredentials.createEmpty(); + } + /** + * Gets the set of per-call credentials associated with this instance. + */ + _getCallCredentials() { + return this.callCredentials; + } + _ref() { + // Do nothing by default + } + _unref() { + // Do nothing by default + } + /** + * Return a new ChannelCredentials instance with a given set of credentials. + * The resulting instance can be used to construct a Channel that communicates + * over TLS. + * @param rootCerts The root certificate data. + * @param privateKey The client certificate private key, if available. + * @param certChain The client certificate key chain, if available. + * @param verifyOptions Additional options to modify certificate verification + */ + static createSsl(rootCerts, privateKey, certChain, verifyOptions) { + var _a; + verifyIsBufferOrNull(rootCerts, 'Root certificate'); + verifyIsBufferOrNull(privateKey, 'Private key'); + verifyIsBufferOrNull(certChain, 'Certificate chain'); + if (privateKey && !certChain) { + throw new Error('Private key must be given with accompanying certificate chain'); + } + if (!privateKey && certChain) { + throw new Error('Certificate chain must be given with accompanying private key'); + } + const secureContext = (0, tls_1.createSecureContext)({ + ca: (_a = rootCerts !== null && rootCerts !== void 0 ? rootCerts : (0, tls_helpers_1.getDefaultRootsData)()) !== null && _a !== void 0 ? _a : undefined, + key: privateKey !== null && privateKey !== void 0 ? privateKey : undefined, + cert: certChain !== null && certChain !== void 0 ? certChain : undefined, + ciphers: tls_helpers_1.CIPHER_SUITES, + }); + return new SecureChannelCredentialsImpl(secureContext, verifyOptions !== null && verifyOptions !== void 0 ? verifyOptions : {}); + } + /** + * Return a new ChannelCredentials instance with credentials created using + * the provided secureContext. The resulting instances can be used to + * construct a Channel that communicates over TLS. gRPC will not override + * anything in the provided secureContext, so the environment variables + * GRPC_SSL_CIPHER_SUITES and GRPC_DEFAULT_SSL_ROOTS_FILE_PATH will + * not be applied. + * @param secureContext The return value of tls.createSecureContext() + * @param verifyOptions Additional options to modify certificate verification + */ + static createFromSecureContext(secureContext, verifyOptions) { + return new SecureChannelCredentialsImpl(secureContext, verifyOptions !== null && verifyOptions !== void 0 ? verifyOptions : {}); + } + /** + * Return a new ChannelCredentials instance with no credentials. + */ + static createInsecure() { + return new InsecureChannelCredentialsImpl(); + } +} +exports.ChannelCredentials = ChannelCredentials; +class InsecureChannelCredentialsImpl extends ChannelCredentials { + constructor() { + super(); + } + compose(callCredentials) { + throw new Error('Cannot compose insecure credentials'); + } + _getConnectionOptions() { + return {}; + } + _isSecure() { + return false; + } + _equals(other) { + return other instanceof InsecureChannelCredentialsImpl; + } +} +class SecureChannelCredentialsImpl extends ChannelCredentials { + constructor(secureContext, verifyOptions) { + super(); + this.secureContext = secureContext; + this.verifyOptions = verifyOptions; + this.connectionOptions = { + secureContext, + }; + // Node asserts that this option is a function, so we cannot pass undefined + if (verifyOptions === null || verifyOptions === void 0 ? void 0 : verifyOptions.checkServerIdentity) { + this.connectionOptions.checkServerIdentity = + verifyOptions.checkServerIdentity; + } + if ((verifyOptions === null || verifyOptions === void 0 ? void 0 : verifyOptions.rejectUnauthorized) !== undefined) { + this.connectionOptions.rejectUnauthorized = + verifyOptions.rejectUnauthorized; + } + } + compose(callCredentials) { + const combinedCallCredentials = this.callCredentials.compose(callCredentials); + return new ComposedChannelCredentialsImpl(this, combinedCallCredentials); + } + _getConnectionOptions() { + // Copy to prevent callers from mutating this.connectionOptions + return Object.assign({}, this.connectionOptions); + } + _isSecure() { + return true; + } + _equals(other) { + if (this === other) { + return true; + } + if (other instanceof SecureChannelCredentialsImpl) { + return (this.secureContext === other.secureContext && + this.verifyOptions.checkServerIdentity === + other.verifyOptions.checkServerIdentity); + } + else { + return false; + } + } +} +class CertificateProviderChannelCredentialsImpl extends ChannelCredentials { + constructor(caCertificateProvider, identityCertificateProvider, verifyOptions) { + super(); + this.caCertificateProvider = caCertificateProvider; + this.identityCertificateProvider = identityCertificateProvider; + this.verifyOptions = verifyOptions; + this.refcount = 0; + this.latestCaUpdate = null; + this.latestIdentityUpdate = null; + this.caCertificateUpdateListener = this.handleCaCertificateUpdate.bind(this); + this.identityCertificateUpdateListener = this.handleIdentityCertitificateUpdate.bind(this); + } + compose(callCredentials) { + const combinedCallCredentials = this.callCredentials.compose(callCredentials); + return new ComposedChannelCredentialsImpl(this, combinedCallCredentials); + } + _getConnectionOptions() { + var _a, _b, _c; + if (this.latestCaUpdate === null) { + return null; + } + if (this.identityCertificateProvider !== null && this.latestIdentityUpdate === null) { + return null; + } + const secureContext = (0, tls_1.createSecureContext)({ + ca: this.latestCaUpdate.caCertificate, + key: (_a = this.latestIdentityUpdate) === null || _a === void 0 ? void 0 : _a.privateKey, + cert: (_b = this.latestIdentityUpdate) === null || _b === void 0 ? void 0 : _b.certificate, + ciphers: tls_helpers_1.CIPHER_SUITES + }); + const options = { + secureContext: secureContext + }; + if ((_c = this.verifyOptions) === null || _c === void 0 ? void 0 : _c.checkServerIdentity) { + options.checkServerIdentity = this.verifyOptions.checkServerIdentity; + } + return options; + } + _isSecure() { + return true; + } + _equals(other) { + var _a, _b; + if (this === other) { + return true; + } + if (other instanceof CertificateProviderChannelCredentialsImpl) { + return this.caCertificateProvider === other.caCertificateProvider && + this.identityCertificateProvider === other.identityCertificateProvider && + ((_a = this.verifyOptions) === null || _a === void 0 ? void 0 : _a.checkServerIdentity) === ((_b = other.verifyOptions) === null || _b === void 0 ? void 0 : _b.checkServerIdentity); + } + else { + return false; + } + } + _ref() { + var _a; + if (this.refcount === 0) { + this.caCertificateProvider.addCaCertificateListener(this.caCertificateUpdateListener); + (_a = this.identityCertificateProvider) === null || _a === void 0 ? void 0 : _a.addIdentityCertificateListener(this.identityCertificateUpdateListener); + } + this.refcount += 1; + } + _unref() { + var _a; + this.refcount -= 1; + if (this.refcount === 0) { + this.caCertificateProvider.removeCaCertificateListener(this.caCertificateUpdateListener); + (_a = this.identityCertificateProvider) === null || _a === void 0 ? void 0 : _a.removeIdentityCertificateListener(this.identityCertificateUpdateListener); + } + } + handleCaCertificateUpdate(update) { + this.latestCaUpdate = update; + } + handleIdentityCertitificateUpdate(update) { + this.latestIdentityUpdate = update; + } +} +function createCertificateProviderChannelCredentials(caCertificateProvider, identityCertificateProvider, verifyOptions) { + return new CertificateProviderChannelCredentialsImpl(caCertificateProvider, identityCertificateProvider, verifyOptions !== null && verifyOptions !== void 0 ? verifyOptions : null); +} +exports.createCertificateProviderChannelCredentials = createCertificateProviderChannelCredentials; +class ComposedChannelCredentialsImpl extends ChannelCredentials { + constructor(channelCredentials, callCreds) { + super(callCreds); + this.channelCredentials = channelCredentials; + if (!channelCredentials._isSecure()) { + throw new Error('Cannot compose insecure credentials'); + } + } + compose(callCredentials) { + const combinedCallCredentials = this.callCredentials.compose(callCredentials); + return new ComposedChannelCredentialsImpl(this.channelCredentials, combinedCallCredentials); + } + _getConnectionOptions() { + return this.channelCredentials._getConnectionOptions(); + } + _isSecure() { + return true; + } + _equals(other) { + if (this === other) { + return true; + } + if (other instanceof ComposedChannelCredentialsImpl) { + return (this.channelCredentials._equals(other.channelCredentials) && + this.callCredentials._equals(other.callCredentials)); + } + else { + return false; + } + } +} +//# sourceMappingURL=channel-credentials.js.map + +/***/ }), + +/***/ 99810: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.channelOptionsEqual = exports.recognizedOptions = void 0; +/** + * This is for checking provided options at runtime. This is an object for + * easier membership checking. + */ +exports.recognizedOptions = { + 'grpc.ssl_target_name_override': true, + 'grpc.primary_user_agent': true, + 'grpc.secondary_user_agent': true, + 'grpc.default_authority': true, + 'grpc.keepalive_time_ms': true, + 'grpc.keepalive_timeout_ms': true, + 'grpc.keepalive_permit_without_calls': true, + 'grpc.service_config': true, + 'grpc.max_concurrent_streams': true, + 'grpc.initial_reconnect_backoff_ms': true, + 'grpc.max_reconnect_backoff_ms': true, + 'grpc.use_local_subchannel_pool': true, + 'grpc.max_send_message_length': true, + 'grpc.max_receive_message_length': true, + 'grpc.enable_http_proxy': true, + 'grpc.enable_channelz': true, + 'grpc.dns_min_time_between_resolutions_ms': true, + 'grpc.enable_retries': true, + 'grpc.per_rpc_retry_buffer_size': true, + 'grpc.retry_buffer_size': true, + 'grpc.max_connection_age_ms': true, + 'grpc.max_connection_age_grace_ms': true, + 'grpc-node.max_session_memory': true, + 'grpc.service_config_disable_resolution': true, + 'grpc.client_idle_timeout_ms': true, + 'grpc-node.tls_enable_trace': true, + 'grpc.lb.ring_hash.ring_size_cap': true, + 'grpc-node.retry_max_attempts_limit': true, +}; +function channelOptionsEqual(options1, options2) { + const keys1 = Object.keys(options1).sort(); + const keys2 = Object.keys(options2).sort(); + if (keys1.length !== keys2.length) { + return false; + } + for (let i = 0; i < keys1.length; i += 1) { + if (keys1[i] !== keys2[i]) { + return false; + } + if (options1[keys1[i]] !== options2[keys2[i]]) { + return false; + } + } + return true; +} +exports.channelOptionsEqual = channelOptionsEqual; +//# sourceMappingURL=channel-options.js.map + +/***/ }), + +/***/ 13860: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ChannelImplementation = void 0; +const channel_credentials_1 = __nccwpck_require__(44030); +const internal_channel_1 = __nccwpck_require__(69672); +class ChannelImplementation { + constructor(target, credentials, options) { + if (typeof target !== 'string') { + throw new TypeError('Channel target must be a string'); + } + if (!(credentials instanceof channel_credentials_1.ChannelCredentials)) { + throw new TypeError('Channel credentials must be a ChannelCredentials object'); + } + if (options) { + if (typeof options !== 'object') { + throw new TypeError('Channel options must be an object'); + } + } + this.internalChannel = new internal_channel_1.InternalChannel(target, credentials, options); + } + close() { + this.internalChannel.close(); + } + getTarget() { + return this.internalChannel.getTarget(); + } + getConnectivityState(tryToConnect) { + return this.internalChannel.getConnectivityState(tryToConnect); + } + watchConnectivityState(currentState, deadline, callback) { + this.internalChannel.watchConnectivityState(currentState, deadline, callback); + } + /** + * Get the channelz reference object for this channel. The returned value is + * garbage if channelz is disabled for this channel. + * @returns + */ + getChannelzRef() { + return this.internalChannel.getChannelzRef(); + } + createCall(method, deadline, host, parentCall, propagateFlags) { + if (typeof method !== 'string') { + throw new TypeError('Channel#createCall: method must be a string'); + } + if (!(typeof deadline === 'number' || deadline instanceof Date)) { + throw new TypeError('Channel#createCall: deadline must be a number or Date'); + } + return this.internalChannel.createCall(method, deadline, host, parentCall, propagateFlags); + } +} +exports.ChannelImplementation = ChannelImplementation; +//# sourceMappingURL=channel.js.map + +/***/ }), + +/***/ 79975: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2021 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setup = exports.getChannelzServiceDefinition = exports.getChannelzHandlers = exports.unregisterChannelzRef = exports.registerChannelzSocket = exports.registerChannelzServer = exports.registerChannelzSubchannel = exports.registerChannelzChannel = exports.ChannelzCallTrackerStub = exports.ChannelzCallTracker = exports.ChannelzChildrenTrackerStub = exports.ChannelzChildrenTracker = exports.ChannelzTrace = exports.ChannelzTraceStub = void 0; +const net_1 = __nccwpck_require__(41808); +const ordered_map_1 = __nccwpck_require__(12592); +const connectivity_state_1 = __nccwpck_require__(80878); +const constants_1 = __nccwpck_require__(90634); +const subchannel_address_1 = __nccwpck_require__(78021); +const admin_1 = __nccwpck_require__(8258); +const make_client_1 = __nccwpck_require__(38541); +function channelRefToMessage(ref) { + return { + channel_id: ref.id, + name: ref.name, + }; +} +function subchannelRefToMessage(ref) { + return { + subchannel_id: ref.id, + name: ref.name, + }; +} +function serverRefToMessage(ref) { + return { + server_id: ref.id, + }; +} +function socketRefToMessage(ref) { + return { + socket_id: ref.id, + name: ref.name, + }; +} +/** + * The loose upper bound on the number of events that should be retained in a + * trace. This may be exceeded by up to a factor of 2. Arbitrarily chosen as a + * number that should be large enough to contain the recent relevant + * information, but small enough to not use excessive memory. + */ +const TARGET_RETAINED_TRACES = 32; +/** + * Default number of sockets/servers/channels/subchannels to return + */ +const DEFAULT_MAX_RESULTS = 100; +class ChannelzTraceStub { + constructor() { + this.events = []; + this.creationTimestamp = new Date(); + this.eventsLogged = 0; + } + addTrace() { } + getTraceMessage() { + return { + creation_timestamp: dateToProtoTimestamp(this.creationTimestamp), + num_events_logged: this.eventsLogged, + events: [], + }; + } +} +exports.ChannelzTraceStub = ChannelzTraceStub; +class ChannelzTrace { + constructor() { + this.events = []; + this.eventsLogged = 0; + this.creationTimestamp = new Date(); + } + addTrace(severity, description, child) { + const timestamp = new Date(); + this.events.push({ + description: description, + severity: severity, + timestamp: timestamp, + childChannel: (child === null || child === void 0 ? void 0 : child.kind) === 'channel' ? child : undefined, + childSubchannel: (child === null || child === void 0 ? void 0 : child.kind) === 'subchannel' ? child : undefined, + }); + // Whenever the trace array gets too large, discard the first half + if (this.events.length >= TARGET_RETAINED_TRACES * 2) { + this.events = this.events.slice(TARGET_RETAINED_TRACES); + } + this.eventsLogged += 1; + } + getTraceMessage() { + return { + creation_timestamp: dateToProtoTimestamp(this.creationTimestamp), + num_events_logged: this.eventsLogged, + events: this.events.map(event => { + return { + description: event.description, + severity: event.severity, + timestamp: dateToProtoTimestamp(event.timestamp), + channel_ref: event.childChannel + ? channelRefToMessage(event.childChannel) + : null, + subchannel_ref: event.childSubchannel + ? subchannelRefToMessage(event.childSubchannel) + : null, + }; + }), + }; + } +} +exports.ChannelzTrace = ChannelzTrace; +class ChannelzChildrenTracker { + constructor() { + this.channelChildren = new ordered_map_1.OrderedMap(); + this.subchannelChildren = new ordered_map_1.OrderedMap(); + this.socketChildren = new ordered_map_1.OrderedMap(); + this.trackerMap = { + ["channel" /* EntityTypes.channel */]: this.channelChildren, + ["subchannel" /* EntityTypes.subchannel */]: this.subchannelChildren, + ["socket" /* EntityTypes.socket */]: this.socketChildren, + }; + } + refChild(child) { + const tracker = this.trackerMap[child.kind]; + const trackedChild = tracker.find(child.id); + if (trackedChild.equals(tracker.end())) { + tracker.setElement(child.id, { + ref: child, + count: 1, + }, trackedChild); + } + else { + trackedChild.pointer[1].count += 1; + } + } + unrefChild(child) { + const tracker = this.trackerMap[child.kind]; + const trackedChild = tracker.getElementByKey(child.id); + if (trackedChild !== undefined) { + trackedChild.count -= 1; + if (trackedChild.count === 0) { + tracker.eraseElementByKey(child.id); + } + } + } + getChildLists() { + return { + channels: this.channelChildren, + subchannels: this.subchannelChildren, + sockets: this.socketChildren, + }; + } +} +exports.ChannelzChildrenTracker = ChannelzChildrenTracker; +class ChannelzChildrenTrackerStub extends ChannelzChildrenTracker { + refChild() { } + unrefChild() { } +} +exports.ChannelzChildrenTrackerStub = ChannelzChildrenTrackerStub; +class ChannelzCallTracker { + constructor() { + this.callsStarted = 0; + this.callsSucceeded = 0; + this.callsFailed = 0; + this.lastCallStartedTimestamp = null; + } + addCallStarted() { + this.callsStarted += 1; + this.lastCallStartedTimestamp = new Date(); + } + addCallSucceeded() { + this.callsSucceeded += 1; + } + addCallFailed() { + this.callsFailed += 1; + } +} +exports.ChannelzCallTracker = ChannelzCallTracker; +class ChannelzCallTrackerStub extends ChannelzCallTracker { + addCallStarted() { } + addCallSucceeded() { } + addCallFailed() { } +} +exports.ChannelzCallTrackerStub = ChannelzCallTrackerStub; +const entityMaps = { + ["channel" /* EntityTypes.channel */]: new ordered_map_1.OrderedMap(), + ["subchannel" /* EntityTypes.subchannel */]: new ordered_map_1.OrderedMap(), + ["server" /* EntityTypes.server */]: new ordered_map_1.OrderedMap(), + ["socket" /* EntityTypes.socket */]: new ordered_map_1.OrderedMap(), +}; +const generateRegisterFn = (kind) => { + let nextId = 1; + function getNextId() { + return nextId++; + } + const entityMap = entityMaps[kind]; + return (name, getInfo, channelzEnabled) => { + const id = getNextId(); + const ref = { id, name, kind }; + if (channelzEnabled) { + entityMap.setElement(id, { ref, getInfo }); + } + return ref; + }; +}; +exports.registerChannelzChannel = generateRegisterFn("channel" /* EntityTypes.channel */); +exports.registerChannelzSubchannel = generateRegisterFn("subchannel" /* EntityTypes.subchannel */); +exports.registerChannelzServer = generateRegisterFn("server" /* EntityTypes.server */); +exports.registerChannelzSocket = generateRegisterFn("socket" /* EntityTypes.socket */); +function unregisterChannelzRef(ref) { + entityMaps[ref.kind].eraseElementByKey(ref.id); +} +exports.unregisterChannelzRef = unregisterChannelzRef; +/** + * Parse a single section of an IPv6 address as two bytes + * @param addressSection A hexadecimal string of length up to 4 + * @returns The pair of bytes representing this address section + */ +function parseIPv6Section(addressSection) { + const numberValue = Number.parseInt(addressSection, 16); + return [(numberValue / 256) | 0, numberValue % 256]; +} +/** + * Parse a chunk of an IPv6 address string to some number of bytes + * @param addressChunk Some number of segments of up to 4 hexadecimal + * characters each, joined by colons. + * @returns The list of bytes representing this address chunk + */ +function parseIPv6Chunk(addressChunk) { + if (addressChunk === '') { + return []; + } + const bytePairs = addressChunk + .split(':') + .map(section => parseIPv6Section(section)); + const result = []; + return result.concat(...bytePairs); +} +/** + * Converts an IPv4 or IPv6 address from string representation to binary + * representation + * @param ipAddress an IP address in standard IPv4 or IPv6 text format + * @returns + */ +function ipAddressStringToBuffer(ipAddress) { + if ((0, net_1.isIPv4)(ipAddress)) { + return Buffer.from(Uint8Array.from(ipAddress.split('.').map(segment => Number.parseInt(segment)))); + } + else if ((0, net_1.isIPv6)(ipAddress)) { + let leftSection; + let rightSection; + const doubleColonIndex = ipAddress.indexOf('::'); + if (doubleColonIndex === -1) { + leftSection = ipAddress; + rightSection = ''; + } + else { + leftSection = ipAddress.substring(0, doubleColonIndex); + rightSection = ipAddress.substring(doubleColonIndex + 2); + } + const leftBuffer = Buffer.from(parseIPv6Chunk(leftSection)); + const rightBuffer = Buffer.from(parseIPv6Chunk(rightSection)); + const middleBuffer = Buffer.alloc(16 - leftBuffer.length - rightBuffer.length, 0); + return Buffer.concat([leftBuffer, middleBuffer, rightBuffer]); + } + else { + return null; + } +} +function connectivityStateToMessage(state) { + switch (state) { + case connectivity_state_1.ConnectivityState.CONNECTING: + return { + state: 'CONNECTING', + }; + case connectivity_state_1.ConnectivityState.IDLE: + return { + state: 'IDLE', + }; + case connectivity_state_1.ConnectivityState.READY: + return { + state: 'READY', + }; + case connectivity_state_1.ConnectivityState.SHUTDOWN: + return { + state: 'SHUTDOWN', + }; + case connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE: + return { + state: 'TRANSIENT_FAILURE', + }; + default: + return { + state: 'UNKNOWN', + }; + } +} +function dateToProtoTimestamp(date) { + if (!date) { + return null; + } + const millisSinceEpoch = date.getTime(); + return { + seconds: (millisSinceEpoch / 1000) | 0, + nanos: (millisSinceEpoch % 1000) * 1000000, + }; +} +function getChannelMessage(channelEntry) { + const resolvedInfo = channelEntry.getInfo(); + const channelRef = []; + const subchannelRef = []; + resolvedInfo.children.channels.forEach(el => { + channelRef.push(channelRefToMessage(el[1].ref)); + }); + resolvedInfo.children.subchannels.forEach(el => { + subchannelRef.push(subchannelRefToMessage(el[1].ref)); + }); + return { + ref: channelRefToMessage(channelEntry.ref), + data: { + target: resolvedInfo.target, + state: connectivityStateToMessage(resolvedInfo.state), + calls_started: resolvedInfo.callTracker.callsStarted, + calls_succeeded: resolvedInfo.callTracker.callsSucceeded, + calls_failed: resolvedInfo.callTracker.callsFailed, + last_call_started_timestamp: dateToProtoTimestamp(resolvedInfo.callTracker.lastCallStartedTimestamp), + trace: resolvedInfo.trace.getTraceMessage(), + }, + channel_ref: channelRef, + subchannel_ref: subchannelRef, + }; +} +function GetChannel(call, callback) { + const channelId = parseInt(call.request.channel_id, 10); + const channelEntry = entityMaps["channel" /* EntityTypes.channel */].getElementByKey(channelId); + if (channelEntry === undefined) { + callback({ + code: constants_1.Status.NOT_FOUND, + details: 'No channel data found for id ' + channelId, + }); + return; + } + callback(null, { channel: getChannelMessage(channelEntry) }); +} +function GetTopChannels(call, callback) { + const maxResults = parseInt(call.request.max_results, 10) || DEFAULT_MAX_RESULTS; + const resultList = []; + const startId = parseInt(call.request.start_channel_id, 10); + const channelEntries = entityMaps["channel" /* EntityTypes.channel */]; + let i; + for (i = channelEntries.lowerBound(startId); !i.equals(channelEntries.end()) && resultList.length < maxResults; i = i.next()) { + resultList.push(getChannelMessage(i.pointer[1])); + } + callback(null, { + channel: resultList, + end: i.equals(channelEntries.end()), + }); +} +function getServerMessage(serverEntry) { + const resolvedInfo = serverEntry.getInfo(); + const listenSocket = []; + resolvedInfo.listenerChildren.sockets.forEach(el => { + listenSocket.push(socketRefToMessage(el[1].ref)); + }); + return { + ref: serverRefToMessage(serverEntry.ref), + data: { + calls_started: resolvedInfo.callTracker.callsStarted, + calls_succeeded: resolvedInfo.callTracker.callsSucceeded, + calls_failed: resolvedInfo.callTracker.callsFailed, + last_call_started_timestamp: dateToProtoTimestamp(resolvedInfo.callTracker.lastCallStartedTimestamp), + trace: resolvedInfo.trace.getTraceMessage(), + }, + listen_socket: listenSocket, + }; +} +function GetServer(call, callback) { + const serverId = parseInt(call.request.server_id, 10); + const serverEntries = entityMaps["server" /* EntityTypes.server */]; + const serverEntry = serverEntries.getElementByKey(serverId); + if (serverEntry === undefined) { + callback({ + code: constants_1.Status.NOT_FOUND, + details: 'No server data found for id ' + serverId, + }); + return; + } + callback(null, { server: getServerMessage(serverEntry) }); +} +function GetServers(call, callback) { + const maxResults = parseInt(call.request.max_results, 10) || DEFAULT_MAX_RESULTS; + const startId = parseInt(call.request.start_server_id, 10); + const serverEntries = entityMaps["server" /* EntityTypes.server */]; + const resultList = []; + let i; + for (i = serverEntries.lowerBound(startId); !i.equals(serverEntries.end()) && resultList.length < maxResults; i = i.next()) { + resultList.push(getServerMessage(i.pointer[1])); + } + callback(null, { + server: resultList, + end: i.equals(serverEntries.end()), + }); +} +function GetSubchannel(call, callback) { + const subchannelId = parseInt(call.request.subchannel_id, 10); + const subchannelEntry = entityMaps["subchannel" /* EntityTypes.subchannel */].getElementByKey(subchannelId); + if (subchannelEntry === undefined) { + callback({ + code: constants_1.Status.NOT_FOUND, + details: 'No subchannel data found for id ' + subchannelId, + }); + return; + } + const resolvedInfo = subchannelEntry.getInfo(); + const listenSocket = []; + resolvedInfo.children.sockets.forEach(el => { + listenSocket.push(socketRefToMessage(el[1].ref)); + }); + const subchannelMessage = { + ref: subchannelRefToMessage(subchannelEntry.ref), + data: { + target: resolvedInfo.target, + state: connectivityStateToMessage(resolvedInfo.state), + calls_started: resolvedInfo.callTracker.callsStarted, + calls_succeeded: resolvedInfo.callTracker.callsSucceeded, + calls_failed: resolvedInfo.callTracker.callsFailed, + last_call_started_timestamp: dateToProtoTimestamp(resolvedInfo.callTracker.lastCallStartedTimestamp), + trace: resolvedInfo.trace.getTraceMessage(), + }, + socket_ref: listenSocket, + }; + callback(null, { subchannel: subchannelMessage }); +} +function subchannelAddressToAddressMessage(subchannelAddress) { + var _a; + if ((0, subchannel_address_1.isTcpSubchannelAddress)(subchannelAddress)) { + return { + address: 'tcpip_address', + tcpip_address: { + ip_address: (_a = ipAddressStringToBuffer(subchannelAddress.host)) !== null && _a !== void 0 ? _a : undefined, + port: subchannelAddress.port, + }, + }; + } + else { + return { + address: 'uds_address', + uds_address: { + filename: subchannelAddress.path, + }, + }; + } +} +function GetSocket(call, callback) { + var _a, _b, _c, _d, _e; + const socketId = parseInt(call.request.socket_id, 10); + const socketEntry = entityMaps["socket" /* EntityTypes.socket */].getElementByKey(socketId); + if (socketEntry === undefined) { + callback({ + code: constants_1.Status.NOT_FOUND, + details: 'No socket data found for id ' + socketId, + }); + return; + } + const resolvedInfo = socketEntry.getInfo(); + const securityMessage = resolvedInfo.security + ? { + model: 'tls', + tls: { + cipher_suite: resolvedInfo.security.cipherSuiteStandardName + ? 'standard_name' + : 'other_name', + standard_name: (_a = resolvedInfo.security.cipherSuiteStandardName) !== null && _a !== void 0 ? _a : undefined, + other_name: (_b = resolvedInfo.security.cipherSuiteOtherName) !== null && _b !== void 0 ? _b : undefined, + local_certificate: (_c = resolvedInfo.security.localCertificate) !== null && _c !== void 0 ? _c : undefined, + remote_certificate: (_d = resolvedInfo.security.remoteCertificate) !== null && _d !== void 0 ? _d : undefined, + }, + } + : null; + const socketMessage = { + ref: socketRefToMessage(socketEntry.ref), + local: resolvedInfo.localAddress + ? subchannelAddressToAddressMessage(resolvedInfo.localAddress) + : null, + remote: resolvedInfo.remoteAddress + ? subchannelAddressToAddressMessage(resolvedInfo.remoteAddress) + : null, + remote_name: (_e = resolvedInfo.remoteName) !== null && _e !== void 0 ? _e : undefined, + security: securityMessage, + data: { + keep_alives_sent: resolvedInfo.keepAlivesSent, + streams_started: resolvedInfo.streamsStarted, + streams_succeeded: resolvedInfo.streamsSucceeded, + streams_failed: resolvedInfo.streamsFailed, + last_local_stream_created_timestamp: dateToProtoTimestamp(resolvedInfo.lastLocalStreamCreatedTimestamp), + last_remote_stream_created_timestamp: dateToProtoTimestamp(resolvedInfo.lastRemoteStreamCreatedTimestamp), + messages_received: resolvedInfo.messagesReceived, + messages_sent: resolvedInfo.messagesSent, + last_message_received_timestamp: dateToProtoTimestamp(resolvedInfo.lastMessageReceivedTimestamp), + last_message_sent_timestamp: dateToProtoTimestamp(resolvedInfo.lastMessageSentTimestamp), + local_flow_control_window: resolvedInfo.localFlowControlWindow + ? { value: resolvedInfo.localFlowControlWindow } + : null, + remote_flow_control_window: resolvedInfo.remoteFlowControlWindow + ? { value: resolvedInfo.remoteFlowControlWindow } + : null, + }, + }; + callback(null, { socket: socketMessage }); +} +function GetServerSockets(call, callback) { + const serverId = parseInt(call.request.server_id, 10); + const serverEntry = entityMaps["server" /* EntityTypes.server */].getElementByKey(serverId); + if (serverEntry === undefined) { + callback({ + code: constants_1.Status.NOT_FOUND, + details: 'No server data found for id ' + serverId, + }); + return; + } + const startId = parseInt(call.request.start_socket_id, 10); + const maxResults = parseInt(call.request.max_results, 10) || DEFAULT_MAX_RESULTS; + const resolvedInfo = serverEntry.getInfo(); + // If we wanted to include listener sockets in the result, this line would + // instead say + // const allSockets = resolvedInfo.listenerChildren.sockets.concat(resolvedInfo.sessionChildren.sockets).sort((ref1, ref2) => ref1.id - ref2.id); + const allSockets = resolvedInfo.sessionChildren.sockets; + const resultList = []; + let i; + for (i = allSockets.lowerBound(startId); !i.equals(allSockets.end()) && resultList.length < maxResults; i = i.next()) { + resultList.push(socketRefToMessage(i.pointer[1].ref)); + } + callback(null, { + socket_ref: resultList, + end: i.equals(allSockets.end()), + }); +} +function getChannelzHandlers() { + return { + GetChannel, + GetTopChannels, + GetServer, + GetServers, + GetSubchannel, + GetSocket, + GetServerSockets, + }; +} +exports.getChannelzHandlers = getChannelzHandlers; +let loadedChannelzDefinition = null; +function getChannelzServiceDefinition() { + if (loadedChannelzDefinition) { + return loadedChannelzDefinition; + } + /* The purpose of this complexity is to avoid loading @grpc/proto-loader at + * runtime for users who will not use/enable channelz. */ + const loaderLoadSync = (__nccwpck_require__(98171).loadSync); + const loadedProto = loaderLoadSync('channelz.proto', { + keepCase: true, + longs: String, + enums: String, + defaults: true, + oneofs: true, + includeDirs: [__nccwpck_require__.ab + "proto"], + }); + const channelzGrpcObject = (0, make_client_1.loadPackageDefinition)(loadedProto); + loadedChannelzDefinition = + channelzGrpcObject.grpc.channelz.v1.Channelz.service; + return loadedChannelzDefinition; +} +exports.getChannelzServiceDefinition = getChannelzServiceDefinition; +function setup() { + (0, admin_1.registerAdminService)(getChannelzServiceDefinition, getChannelzHandlers); +} +exports.setup = setup; +//# sourceMappingURL=channelz.js.map + +/***/ }), + +/***/ 26597: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getInterceptingCall = exports.InterceptingCall = exports.RequesterBuilder = exports.ListenerBuilder = exports.InterceptorConfigurationError = void 0; +const metadata_1 = __nccwpck_require__(83665); +const call_interface_1 = __nccwpck_require__(78710); +const constants_1 = __nccwpck_require__(90634); +const error_1 = __nccwpck_require__(22336); +/** + * Error class associated with passing both interceptors and interceptor + * providers to a client constructor or as call options. + */ +class InterceptorConfigurationError extends Error { + constructor(message) { + super(message); + this.name = 'InterceptorConfigurationError'; + Error.captureStackTrace(this, InterceptorConfigurationError); + } +} +exports.InterceptorConfigurationError = InterceptorConfigurationError; +class ListenerBuilder { + constructor() { + this.metadata = undefined; + this.message = undefined; + this.status = undefined; + } + withOnReceiveMetadata(onReceiveMetadata) { + this.metadata = onReceiveMetadata; + return this; + } + withOnReceiveMessage(onReceiveMessage) { + this.message = onReceiveMessage; + return this; + } + withOnReceiveStatus(onReceiveStatus) { + this.status = onReceiveStatus; + return this; + } + build() { + return { + onReceiveMetadata: this.metadata, + onReceiveMessage: this.message, + onReceiveStatus: this.status, + }; + } +} +exports.ListenerBuilder = ListenerBuilder; +class RequesterBuilder { + constructor() { + this.start = undefined; + this.message = undefined; + this.halfClose = undefined; + this.cancel = undefined; + } + withStart(start) { + this.start = start; + return this; + } + withSendMessage(sendMessage) { + this.message = sendMessage; + return this; + } + withHalfClose(halfClose) { + this.halfClose = halfClose; + return this; + } + withCancel(cancel) { + this.cancel = cancel; + return this; + } + build() { + return { + start: this.start, + sendMessage: this.message, + halfClose: this.halfClose, + cancel: this.cancel, + }; + } +} +exports.RequesterBuilder = RequesterBuilder; +/** + * A Listener with a default pass-through implementation of each method. Used + * for filling out Listeners with some methods omitted. + */ +const defaultListener = { + onReceiveMetadata: (metadata, next) => { + next(metadata); + }, + onReceiveMessage: (message, next) => { + next(message); + }, + onReceiveStatus: (status, next) => { + next(status); + }, +}; +/** + * A Requester with a default pass-through implementation of each method. Used + * for filling out Requesters with some methods omitted. + */ +const defaultRequester = { + start: (metadata, listener, next) => { + next(metadata, listener); + }, + sendMessage: (message, next) => { + next(message); + }, + halfClose: next => { + next(); + }, + cancel: next => { + next(); + }, +}; +class InterceptingCall { + constructor(nextCall, requester) { + var _a, _b, _c, _d; + this.nextCall = nextCall; + /** + * Indicates that metadata has been passed to the requester's start + * method but it has not been passed to the corresponding next callback + */ + this.processingMetadata = false; + /** + * Message context for a pending message that is waiting for + */ + this.pendingMessageContext = null; + /** + * Indicates that a message has been passed to the requester's sendMessage + * method but it has not been passed to the corresponding next callback + */ + this.processingMessage = false; + /** + * Indicates that a status was received but could not be propagated because + * a message was still being processed. + */ + this.pendingHalfClose = false; + if (requester) { + this.requester = { + start: (_a = requester.start) !== null && _a !== void 0 ? _a : defaultRequester.start, + sendMessage: (_b = requester.sendMessage) !== null && _b !== void 0 ? _b : defaultRequester.sendMessage, + halfClose: (_c = requester.halfClose) !== null && _c !== void 0 ? _c : defaultRequester.halfClose, + cancel: (_d = requester.cancel) !== null && _d !== void 0 ? _d : defaultRequester.cancel, + }; + } + else { + this.requester = defaultRequester; + } + } + cancelWithStatus(status, details) { + this.requester.cancel(() => { + this.nextCall.cancelWithStatus(status, details); + }); + } + getPeer() { + return this.nextCall.getPeer(); + } + processPendingMessage() { + if (this.pendingMessageContext) { + this.nextCall.sendMessageWithContext(this.pendingMessageContext, this.pendingMessage); + this.pendingMessageContext = null; + this.pendingMessage = null; + } + } + processPendingHalfClose() { + if (this.pendingHalfClose) { + this.nextCall.halfClose(); + } + } + start(metadata, interceptingListener) { + var _a, _b, _c, _d, _e, _f; + const fullInterceptingListener = { + onReceiveMetadata: (_b = (_a = interceptingListener === null || interceptingListener === void 0 ? void 0 : interceptingListener.onReceiveMetadata) === null || _a === void 0 ? void 0 : _a.bind(interceptingListener)) !== null && _b !== void 0 ? _b : (metadata => { }), + onReceiveMessage: (_d = (_c = interceptingListener === null || interceptingListener === void 0 ? void 0 : interceptingListener.onReceiveMessage) === null || _c === void 0 ? void 0 : _c.bind(interceptingListener)) !== null && _d !== void 0 ? _d : (message => { }), + onReceiveStatus: (_f = (_e = interceptingListener === null || interceptingListener === void 0 ? void 0 : interceptingListener.onReceiveStatus) === null || _e === void 0 ? void 0 : _e.bind(interceptingListener)) !== null && _f !== void 0 ? _f : (status => { }), + }; + this.processingMetadata = true; + this.requester.start(metadata, fullInterceptingListener, (md, listener) => { + var _a, _b, _c; + this.processingMetadata = false; + let finalInterceptingListener; + if ((0, call_interface_1.isInterceptingListener)(listener)) { + finalInterceptingListener = listener; + } + else { + const fullListener = { + onReceiveMetadata: (_a = listener.onReceiveMetadata) !== null && _a !== void 0 ? _a : defaultListener.onReceiveMetadata, + onReceiveMessage: (_b = listener.onReceiveMessage) !== null && _b !== void 0 ? _b : defaultListener.onReceiveMessage, + onReceiveStatus: (_c = listener.onReceiveStatus) !== null && _c !== void 0 ? _c : defaultListener.onReceiveStatus, + }; + finalInterceptingListener = new call_interface_1.InterceptingListenerImpl(fullListener, fullInterceptingListener); + } + this.nextCall.start(md, finalInterceptingListener); + this.processPendingMessage(); + this.processPendingHalfClose(); + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sendMessageWithContext(context, message) { + this.processingMessage = true; + this.requester.sendMessage(message, finalMessage => { + this.processingMessage = false; + if (this.processingMetadata) { + this.pendingMessageContext = context; + this.pendingMessage = message; + } + else { + this.nextCall.sendMessageWithContext(context, finalMessage); + this.processPendingHalfClose(); + } + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sendMessage(message) { + this.sendMessageWithContext({}, message); + } + startRead() { + this.nextCall.startRead(); + } + halfClose() { + this.requester.halfClose(() => { + if (this.processingMetadata || this.processingMessage) { + this.pendingHalfClose = true; + } + else { + this.nextCall.halfClose(); + } + }); + } +} +exports.InterceptingCall = InterceptingCall; +function getCall(channel, path, options) { + var _a, _b; + const deadline = (_a = options.deadline) !== null && _a !== void 0 ? _a : Infinity; + const host = options.host; + const parent = (_b = options.parent) !== null && _b !== void 0 ? _b : null; + const propagateFlags = options.propagate_flags; + const credentials = options.credentials; + const call = channel.createCall(path, deadline, host, parent, propagateFlags); + if (credentials) { + call.setCredentials(credentials); + } + return call; +} +/** + * InterceptingCall implementation that directly owns the underlying Call + * object and handles serialization and deseraizliation. + */ +class BaseInterceptingCall { + constructor(call, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + methodDefinition) { + this.call = call; + this.methodDefinition = methodDefinition; + } + cancelWithStatus(status, details) { + this.call.cancelWithStatus(status, details); + } + getPeer() { + return this.call.getPeer(); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sendMessageWithContext(context, message) { + let serialized; + try { + serialized = this.methodDefinition.requestSerialize(message); + } + catch (e) { + this.call.cancelWithStatus(constants_1.Status.INTERNAL, `Request message serialization failure: ${(0, error_1.getErrorMessage)(e)}`); + return; + } + this.call.sendMessageWithContext(context, serialized); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sendMessage(message) { + this.sendMessageWithContext({}, message); + } + start(metadata, interceptingListener) { + let readError = null; + this.call.start(metadata, { + onReceiveMetadata: metadata => { + var _a; + (_a = interceptingListener === null || interceptingListener === void 0 ? void 0 : interceptingListener.onReceiveMetadata) === null || _a === void 0 ? void 0 : _a.call(interceptingListener, metadata); + }, + onReceiveMessage: message => { + var _a; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let deserialized; + try { + deserialized = this.methodDefinition.responseDeserialize(message); + } + catch (e) { + readError = { + code: constants_1.Status.INTERNAL, + details: `Response message parsing error: ${(0, error_1.getErrorMessage)(e)}`, + metadata: new metadata_1.Metadata(), + }; + this.call.cancelWithStatus(readError.code, readError.details); + return; + } + (_a = interceptingListener === null || interceptingListener === void 0 ? void 0 : interceptingListener.onReceiveMessage) === null || _a === void 0 ? void 0 : _a.call(interceptingListener, deserialized); + }, + onReceiveStatus: status => { + var _a, _b; + if (readError) { + (_a = interceptingListener === null || interceptingListener === void 0 ? void 0 : interceptingListener.onReceiveStatus) === null || _a === void 0 ? void 0 : _a.call(interceptingListener, readError); + } + else { + (_b = interceptingListener === null || interceptingListener === void 0 ? void 0 : interceptingListener.onReceiveStatus) === null || _b === void 0 ? void 0 : _b.call(interceptingListener, status); + } + }, + }); + } + startRead() { + this.call.startRead(); + } + halfClose() { + this.call.halfClose(); + } +} +/** + * BaseInterceptingCall with special-cased behavior for methods with unary + * responses. + */ +class BaseUnaryInterceptingCall extends BaseInterceptingCall { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + constructor(call, methodDefinition) { + super(call, methodDefinition); + } + start(metadata, listener) { + var _a, _b; + let receivedMessage = false; + const wrapperListener = { + onReceiveMetadata: (_b = (_a = listener === null || listener === void 0 ? void 0 : listener.onReceiveMetadata) === null || _a === void 0 ? void 0 : _a.bind(listener)) !== null && _b !== void 0 ? _b : (metadata => { }), + // eslint-disable-next-line @typescript-eslint/no-explicit-any + onReceiveMessage: (message) => { + var _a; + receivedMessage = true; + (_a = listener === null || listener === void 0 ? void 0 : listener.onReceiveMessage) === null || _a === void 0 ? void 0 : _a.call(listener, message); + }, + onReceiveStatus: (status) => { + var _a, _b; + if (!receivedMessage) { + (_a = listener === null || listener === void 0 ? void 0 : listener.onReceiveMessage) === null || _a === void 0 ? void 0 : _a.call(listener, null); + } + (_b = listener === null || listener === void 0 ? void 0 : listener.onReceiveStatus) === null || _b === void 0 ? void 0 : _b.call(listener, status); + }, + }; + super.start(metadata, wrapperListener); + this.call.startRead(); + } +} +/** + * BaseInterceptingCall with special-cased behavior for methods with streaming + * responses. + */ +class BaseStreamingInterceptingCall extends BaseInterceptingCall { +} +function getBottomInterceptingCall(channel, options, +// eslint-disable-next-line @typescript-eslint/no-explicit-any +methodDefinition) { + const call = getCall(channel, methodDefinition.path, options); + if (methodDefinition.responseStream) { + return new BaseStreamingInterceptingCall(call, methodDefinition); + } + else { + return new BaseUnaryInterceptingCall(call, methodDefinition); + } +} +function getInterceptingCall(interceptorArgs, +// eslint-disable-next-line @typescript-eslint/no-explicit-any +methodDefinition, options, channel) { + if (interceptorArgs.clientInterceptors.length > 0 && + interceptorArgs.clientInterceptorProviders.length > 0) { + throw new InterceptorConfigurationError('Both interceptors and interceptor_providers were passed as options ' + + 'to the client constructor. Only one of these is allowed.'); + } + if (interceptorArgs.callInterceptors.length > 0 && + interceptorArgs.callInterceptorProviders.length > 0) { + throw new InterceptorConfigurationError('Both interceptors and interceptor_providers were passed as call ' + + 'options. Only one of these is allowed.'); + } + let interceptors = []; + // Interceptors passed to the call override interceptors passed to the client constructor + if (interceptorArgs.callInterceptors.length > 0 || + interceptorArgs.callInterceptorProviders.length > 0) { + interceptors = [] + .concat(interceptorArgs.callInterceptors, interceptorArgs.callInterceptorProviders.map(provider => provider(methodDefinition))) + .filter(interceptor => interceptor); + // Filter out falsy values when providers return nothing + } + else { + interceptors = [] + .concat(interceptorArgs.clientInterceptors, interceptorArgs.clientInterceptorProviders.map(provider => provider(methodDefinition))) + .filter(interceptor => interceptor); + // Filter out falsy values when providers return nothing + } + const interceptorOptions = Object.assign({}, options, { + method_definition: methodDefinition, + }); + /* For each interceptor in the list, the nextCall function passed to it is + * based on the next interceptor in the list, using a nextCall function + * constructed with the following interceptor in the list, and so on. The + * initialValue, which is effectively at the end of the list, is a nextCall + * function that invokes getBottomInterceptingCall, the result of which + * handles (de)serialization and also gets the underlying call from the + * channel. */ + const getCall = interceptors.reduceRight((nextCall, nextInterceptor) => { + return currentOptions => nextInterceptor(currentOptions, nextCall); + }, (finalOptions) => getBottomInterceptingCall(channel, finalOptions, methodDefinition)); + return getCall(interceptorOptions); +} +exports.getInterceptingCall = getInterceptingCall; +//# sourceMappingURL=client-interceptors.js.map + +/***/ }), + +/***/ 87172: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Client = void 0; +const call_1 = __nccwpck_require__(97453); +const channel_1 = __nccwpck_require__(13860); +const connectivity_state_1 = __nccwpck_require__(80878); +const constants_1 = __nccwpck_require__(90634); +const metadata_1 = __nccwpck_require__(83665); +const client_interceptors_1 = __nccwpck_require__(26597); +const CHANNEL_SYMBOL = Symbol(); +const INTERCEPTOR_SYMBOL = Symbol(); +const INTERCEPTOR_PROVIDER_SYMBOL = Symbol(); +const CALL_INVOCATION_TRANSFORMER_SYMBOL = Symbol(); +function isFunction(arg) { + return typeof arg === 'function'; +} +function getErrorStackString(error) { + var _a; + return ((_a = error.stack) === null || _a === void 0 ? void 0 : _a.split('\n').slice(1).join('\n')) || 'no stack trace available'; +} +/** + * A generic gRPC client. Primarily useful as a base class for all generated + * clients. + */ +class Client { + constructor(address, credentials, options = {}) { + var _a, _b; + options = Object.assign({}, options); + this[INTERCEPTOR_SYMBOL] = (_a = options.interceptors) !== null && _a !== void 0 ? _a : []; + delete options.interceptors; + this[INTERCEPTOR_PROVIDER_SYMBOL] = (_b = options.interceptor_providers) !== null && _b !== void 0 ? _b : []; + delete options.interceptor_providers; + if (this[INTERCEPTOR_SYMBOL].length > 0 && + this[INTERCEPTOR_PROVIDER_SYMBOL].length > 0) { + throw new Error('Both interceptors and interceptor_providers were passed as options ' + + 'to the client constructor. Only one of these is allowed.'); + } + this[CALL_INVOCATION_TRANSFORMER_SYMBOL] = + options.callInvocationTransformer; + delete options.callInvocationTransformer; + if (options.channelOverride) { + this[CHANNEL_SYMBOL] = options.channelOverride; + } + else if (options.channelFactoryOverride) { + const channelFactoryOverride = options.channelFactoryOverride; + delete options.channelFactoryOverride; + this[CHANNEL_SYMBOL] = channelFactoryOverride(address, credentials, options); + } + else { + this[CHANNEL_SYMBOL] = new channel_1.ChannelImplementation(address, credentials, options); + } + } + close() { + this[CHANNEL_SYMBOL].close(); + } + getChannel() { + return this[CHANNEL_SYMBOL]; + } + waitForReady(deadline, callback) { + const checkState = (err) => { + if (err) { + callback(new Error('Failed to connect before the deadline')); + return; + } + let newState; + try { + newState = this[CHANNEL_SYMBOL].getConnectivityState(true); + } + catch (e) { + callback(new Error('The channel has been closed')); + return; + } + if (newState === connectivity_state_1.ConnectivityState.READY) { + callback(); + } + else { + try { + this[CHANNEL_SYMBOL].watchConnectivityState(newState, deadline, checkState); + } + catch (e) { + callback(new Error('The channel has been closed')); + } + } + }; + setImmediate(checkState); + } + checkOptionalUnaryResponseArguments(arg1, arg2, arg3) { + if (isFunction(arg1)) { + return { metadata: new metadata_1.Metadata(), options: {}, callback: arg1 }; + } + else if (isFunction(arg2)) { + if (arg1 instanceof metadata_1.Metadata) { + return { metadata: arg1, options: {}, callback: arg2 }; + } + else { + return { metadata: new metadata_1.Metadata(), options: arg1, callback: arg2 }; + } + } + else { + if (!(arg1 instanceof metadata_1.Metadata && + arg2 instanceof Object && + isFunction(arg3))) { + throw new Error('Incorrect arguments passed'); + } + return { metadata: arg1, options: arg2, callback: arg3 }; + } + } + makeUnaryRequest(method, serialize, deserialize, argument, metadata, options, callback) { + var _a, _b; + const checkedArguments = this.checkOptionalUnaryResponseArguments(metadata, options, callback); + const methodDefinition = { + path: method, + requestStream: false, + responseStream: false, + requestSerialize: serialize, + responseDeserialize: deserialize, + }; + let callProperties = { + argument: argument, + metadata: checkedArguments.metadata, + call: new call_1.ClientUnaryCallImpl(), + channel: this[CHANNEL_SYMBOL], + methodDefinition: methodDefinition, + callOptions: checkedArguments.options, + callback: checkedArguments.callback, + }; + if (this[CALL_INVOCATION_TRANSFORMER_SYMBOL]) { + callProperties = this[CALL_INVOCATION_TRANSFORMER_SYMBOL](callProperties); + } + const emitter = callProperties.call; + const interceptorArgs = { + clientInterceptors: this[INTERCEPTOR_SYMBOL], + clientInterceptorProviders: this[INTERCEPTOR_PROVIDER_SYMBOL], + callInterceptors: (_a = callProperties.callOptions.interceptors) !== null && _a !== void 0 ? _a : [], + callInterceptorProviders: (_b = callProperties.callOptions.interceptor_providers) !== null && _b !== void 0 ? _b : [], + }; + const call = (0, client_interceptors_1.getInterceptingCall)(interceptorArgs, callProperties.methodDefinition, callProperties.callOptions, callProperties.channel); + /* This needs to happen before the emitter is used. Unfortunately we can't + * enforce this with the type system. We need to construct this emitter + * before calling the CallInvocationTransformer, and we need to create the + * call after that. */ + emitter.call = call; + let responseMessage = null; + let receivedStatus = false; + let callerStackError = new Error(); + call.start(callProperties.metadata, { + onReceiveMetadata: metadata => { + emitter.emit('metadata', metadata); + }, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + onReceiveMessage(message) { + if (responseMessage !== null) { + call.cancelWithStatus(constants_1.Status.UNIMPLEMENTED, 'Too many responses received'); + } + responseMessage = message; + }, + onReceiveStatus(status) { + if (receivedStatus) { + return; + } + receivedStatus = true; + if (status.code === constants_1.Status.OK) { + if (responseMessage === null) { + const callerStack = getErrorStackString(callerStackError); + callProperties.callback((0, call_1.callErrorFromStatus)({ + code: constants_1.Status.UNIMPLEMENTED, + details: 'No message received', + metadata: status.metadata, + }, callerStack)); + } + else { + callProperties.callback(null, responseMessage); + } + } + else { + const callerStack = getErrorStackString(callerStackError); + callProperties.callback((0, call_1.callErrorFromStatus)(status, callerStack)); + } + /* Avoid retaining the callerStackError object in the call context of + * the status event handler. */ + callerStackError = null; + emitter.emit('status', status); + }, + }); + call.sendMessage(argument); + call.halfClose(); + return emitter; + } + makeClientStreamRequest(method, serialize, deserialize, metadata, options, callback) { + var _a, _b; + const checkedArguments = this.checkOptionalUnaryResponseArguments(metadata, options, callback); + const methodDefinition = { + path: method, + requestStream: true, + responseStream: false, + requestSerialize: serialize, + responseDeserialize: deserialize, + }; + let callProperties = { + metadata: checkedArguments.metadata, + call: new call_1.ClientWritableStreamImpl(serialize), + channel: this[CHANNEL_SYMBOL], + methodDefinition: methodDefinition, + callOptions: checkedArguments.options, + callback: checkedArguments.callback, + }; + if (this[CALL_INVOCATION_TRANSFORMER_SYMBOL]) { + callProperties = this[CALL_INVOCATION_TRANSFORMER_SYMBOL](callProperties); + } + const emitter = callProperties.call; + const interceptorArgs = { + clientInterceptors: this[INTERCEPTOR_SYMBOL], + clientInterceptorProviders: this[INTERCEPTOR_PROVIDER_SYMBOL], + callInterceptors: (_a = callProperties.callOptions.interceptors) !== null && _a !== void 0 ? _a : [], + callInterceptorProviders: (_b = callProperties.callOptions.interceptor_providers) !== null && _b !== void 0 ? _b : [], + }; + const call = (0, client_interceptors_1.getInterceptingCall)(interceptorArgs, callProperties.methodDefinition, callProperties.callOptions, callProperties.channel); + /* This needs to happen before the emitter is used. Unfortunately we can't + * enforce this with the type system. We need to construct this emitter + * before calling the CallInvocationTransformer, and we need to create the + * call after that. */ + emitter.call = call; + let responseMessage = null; + let receivedStatus = false; + let callerStackError = new Error(); + call.start(callProperties.metadata, { + onReceiveMetadata: metadata => { + emitter.emit('metadata', metadata); + }, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + onReceiveMessage(message) { + if (responseMessage !== null) { + call.cancelWithStatus(constants_1.Status.UNIMPLEMENTED, 'Too many responses received'); + } + responseMessage = message; + call.startRead(); + }, + onReceiveStatus(status) { + if (receivedStatus) { + return; + } + receivedStatus = true; + if (status.code === constants_1.Status.OK) { + if (responseMessage === null) { + const callerStack = getErrorStackString(callerStackError); + callProperties.callback((0, call_1.callErrorFromStatus)({ + code: constants_1.Status.UNIMPLEMENTED, + details: 'No message received', + metadata: status.metadata, + }, callerStack)); + } + else { + callProperties.callback(null, responseMessage); + } + } + else { + const callerStack = getErrorStackString(callerStackError); + callProperties.callback((0, call_1.callErrorFromStatus)(status, callerStack)); + } + /* Avoid retaining the callerStackError object in the call context of + * the status event handler. */ + callerStackError = null; + emitter.emit('status', status); + }, + }); + return emitter; + } + checkMetadataAndOptions(arg1, arg2) { + let metadata; + let options; + if (arg1 instanceof metadata_1.Metadata) { + metadata = arg1; + if (arg2) { + options = arg2; + } + else { + options = {}; + } + } + else { + if (arg1) { + options = arg1; + } + else { + options = {}; + } + metadata = new metadata_1.Metadata(); + } + return { metadata, options }; + } + makeServerStreamRequest(method, serialize, deserialize, argument, metadata, options) { + var _a, _b; + const checkedArguments = this.checkMetadataAndOptions(metadata, options); + const methodDefinition = { + path: method, + requestStream: false, + responseStream: true, + requestSerialize: serialize, + responseDeserialize: deserialize, + }; + let callProperties = { + argument: argument, + metadata: checkedArguments.metadata, + call: new call_1.ClientReadableStreamImpl(deserialize), + channel: this[CHANNEL_SYMBOL], + methodDefinition: methodDefinition, + callOptions: checkedArguments.options, + }; + if (this[CALL_INVOCATION_TRANSFORMER_SYMBOL]) { + callProperties = this[CALL_INVOCATION_TRANSFORMER_SYMBOL](callProperties); + } + const stream = callProperties.call; + const interceptorArgs = { + clientInterceptors: this[INTERCEPTOR_SYMBOL], + clientInterceptorProviders: this[INTERCEPTOR_PROVIDER_SYMBOL], + callInterceptors: (_a = callProperties.callOptions.interceptors) !== null && _a !== void 0 ? _a : [], + callInterceptorProviders: (_b = callProperties.callOptions.interceptor_providers) !== null && _b !== void 0 ? _b : [], + }; + const call = (0, client_interceptors_1.getInterceptingCall)(interceptorArgs, callProperties.methodDefinition, callProperties.callOptions, callProperties.channel); + /* This needs to happen before the emitter is used. Unfortunately we can't + * enforce this with the type system. We need to construct this emitter + * before calling the CallInvocationTransformer, and we need to create the + * call after that. */ + stream.call = call; + let receivedStatus = false; + let callerStackError = new Error(); + call.start(callProperties.metadata, { + onReceiveMetadata(metadata) { + stream.emit('metadata', metadata); + }, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + onReceiveMessage(message) { + stream.push(message); + }, + onReceiveStatus(status) { + if (receivedStatus) { + return; + } + receivedStatus = true; + stream.push(null); + if (status.code !== constants_1.Status.OK) { + const callerStack = getErrorStackString(callerStackError); + stream.emit('error', (0, call_1.callErrorFromStatus)(status, callerStack)); + } + /* Avoid retaining the callerStackError object in the call context of + * the status event handler. */ + callerStackError = null; + stream.emit('status', status); + }, + }); + call.sendMessage(argument); + call.halfClose(); + return stream; + } + makeBidiStreamRequest(method, serialize, deserialize, metadata, options) { + var _a, _b; + const checkedArguments = this.checkMetadataAndOptions(metadata, options); + const methodDefinition = { + path: method, + requestStream: true, + responseStream: true, + requestSerialize: serialize, + responseDeserialize: deserialize, + }; + let callProperties = { + metadata: checkedArguments.metadata, + call: new call_1.ClientDuplexStreamImpl(serialize, deserialize), + channel: this[CHANNEL_SYMBOL], + methodDefinition: methodDefinition, + callOptions: checkedArguments.options, + }; + if (this[CALL_INVOCATION_TRANSFORMER_SYMBOL]) { + callProperties = this[CALL_INVOCATION_TRANSFORMER_SYMBOL](callProperties); + } + const stream = callProperties.call; + const interceptorArgs = { + clientInterceptors: this[INTERCEPTOR_SYMBOL], + clientInterceptorProviders: this[INTERCEPTOR_PROVIDER_SYMBOL], + callInterceptors: (_a = callProperties.callOptions.interceptors) !== null && _a !== void 0 ? _a : [], + callInterceptorProviders: (_b = callProperties.callOptions.interceptor_providers) !== null && _b !== void 0 ? _b : [], + }; + const call = (0, client_interceptors_1.getInterceptingCall)(interceptorArgs, callProperties.methodDefinition, callProperties.callOptions, callProperties.channel); + /* This needs to happen before the emitter is used. Unfortunately we can't + * enforce this with the type system. We need to construct this emitter + * before calling the CallInvocationTransformer, and we need to create the + * call after that. */ + stream.call = call; + let receivedStatus = false; + let callerStackError = new Error(); + call.start(callProperties.metadata, { + onReceiveMetadata(metadata) { + stream.emit('metadata', metadata); + }, + onReceiveMessage(message) { + stream.push(message); + }, + onReceiveStatus(status) { + if (receivedStatus) { + return; + } + receivedStatus = true; + stream.push(null); + if (status.code !== constants_1.Status.OK) { + const callerStack = getErrorStackString(callerStackError); + stream.emit('error', (0, call_1.callErrorFromStatus)(status, callerStack)); + } + /* Avoid retaining the callerStackError object in the call context of + * the status event handler. */ + callerStackError = null; + stream.emit('status', status); + }, + }); + return stream; + } +} +exports.Client = Client; +//# sourceMappingURL=client.js.map + +/***/ }), + +/***/ 54789: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2021 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CompressionAlgorithms = void 0; +var CompressionAlgorithms; +(function (CompressionAlgorithms) { + CompressionAlgorithms[CompressionAlgorithms["identity"] = 0] = "identity"; + CompressionAlgorithms[CompressionAlgorithms["deflate"] = 1] = "deflate"; + CompressionAlgorithms[CompressionAlgorithms["gzip"] = 2] = "gzip"; +})(CompressionAlgorithms || (exports.CompressionAlgorithms = CompressionAlgorithms = {})); +//# sourceMappingURL=compression-algorithms.js.map + +/***/ }), + +/***/ 47616: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CompressionFilterFactory = exports.CompressionFilter = void 0; +const zlib = __nccwpck_require__(59796); +const compression_algorithms_1 = __nccwpck_require__(54789); +const constants_1 = __nccwpck_require__(90634); +const filter_1 = __nccwpck_require__(43392); +const logging = __nccwpck_require__(35993); +const isCompressionAlgorithmKey = (key) => { + return (typeof key === 'number' && typeof compression_algorithms_1.CompressionAlgorithms[key] === 'string'); +}; +class CompressionHandler { + /** + * @param message Raw uncompressed message bytes + * @param compress Indicates whether the message should be compressed + * @return Framed message, compressed if applicable + */ + async writeMessage(message, compress) { + let messageBuffer = message; + if (compress) { + messageBuffer = await this.compressMessage(messageBuffer); + } + const output = Buffer.allocUnsafe(messageBuffer.length + 5); + output.writeUInt8(compress ? 1 : 0, 0); + output.writeUInt32BE(messageBuffer.length, 1); + messageBuffer.copy(output, 5); + return output; + } + /** + * @param data Framed message, possibly compressed + * @return Uncompressed message + */ + async readMessage(data) { + const compressed = data.readUInt8(0) === 1; + let messageBuffer = data.slice(5); + if (compressed) { + messageBuffer = await this.decompressMessage(messageBuffer); + } + return messageBuffer; + } +} +class IdentityHandler extends CompressionHandler { + async compressMessage(message) { + return message; + } + async writeMessage(message, compress) { + const output = Buffer.allocUnsafe(message.length + 5); + /* With "identity" compression, messages should always be marked as + * uncompressed */ + output.writeUInt8(0, 0); + output.writeUInt32BE(message.length, 1); + message.copy(output, 5); + return output; + } + decompressMessage(message) { + return Promise.reject(new Error('Received compressed message but "grpc-encoding" header was identity')); + } +} +class DeflateHandler extends CompressionHandler { + constructor(maxRecvMessageLength) { + super(); + this.maxRecvMessageLength = maxRecvMessageLength; + } + compressMessage(message) { + return new Promise((resolve, reject) => { + zlib.deflate(message, (err, output) => { + if (err) { + reject(err); + } + else { + resolve(output); + } + }); + }); + } + decompressMessage(message) { + return new Promise((resolve, reject) => { + let totalLength = 0; + const messageParts = []; + const decompresser = zlib.createInflate(); + decompresser.on('data', (chunk) => { + messageParts.push(chunk); + totalLength += chunk.byteLength; + if (this.maxRecvMessageLength !== -1 && totalLength > this.maxRecvMessageLength) { + decompresser.destroy(); + reject({ + code: constants_1.Status.RESOURCE_EXHAUSTED, + details: `Received message that decompresses to a size larger than ${this.maxRecvMessageLength}` + }); + } + }); + decompresser.on('end', () => { + resolve(Buffer.concat(messageParts)); + }); + decompresser.write(message); + decompresser.end(); + }); + } +} +class GzipHandler extends CompressionHandler { + constructor(maxRecvMessageLength) { + super(); + this.maxRecvMessageLength = maxRecvMessageLength; + } + compressMessage(message) { + return new Promise((resolve, reject) => { + zlib.gzip(message, (err, output) => { + if (err) { + reject(err); + } + else { + resolve(output); + } + }); + }); + } + decompressMessage(message) { + return new Promise((resolve, reject) => { + let totalLength = 0; + const messageParts = []; + const decompresser = zlib.createGunzip(); + decompresser.on('data', (chunk) => { + messageParts.push(chunk); + totalLength += chunk.byteLength; + if (this.maxRecvMessageLength !== -1 && totalLength > this.maxRecvMessageLength) { + decompresser.destroy(); + reject({ + code: constants_1.Status.RESOURCE_EXHAUSTED, + details: `Received message that decompresses to a size larger than ${this.maxRecvMessageLength}` + }); + } + }); + decompresser.on('end', () => { + resolve(Buffer.concat(messageParts)); + }); + decompresser.write(message); + decompresser.end(); + }); + } +} +class UnknownHandler extends CompressionHandler { + constructor(compressionName) { + super(); + this.compressionName = compressionName; + } + compressMessage(message) { + return Promise.reject(new Error(`Received message compressed with unsupported compression method ${this.compressionName}`)); + } + decompressMessage(message) { + // This should be unreachable + return Promise.reject(new Error(`Compression method not supported: ${this.compressionName}`)); + } +} +function getCompressionHandler(compressionName, maxReceiveMessageSize) { + switch (compressionName) { + case 'identity': + return new IdentityHandler(); + case 'deflate': + return new DeflateHandler(maxReceiveMessageSize); + case 'gzip': + return new GzipHandler(maxReceiveMessageSize); + default: + return new UnknownHandler(compressionName); + } +} +class CompressionFilter extends filter_1.BaseFilter { + constructor(channelOptions, sharedFilterConfig) { + var _a, _b, _c; + super(); + this.sharedFilterConfig = sharedFilterConfig; + this.sendCompression = new IdentityHandler(); + this.receiveCompression = new IdentityHandler(); + this.currentCompressionAlgorithm = 'identity'; + const compressionAlgorithmKey = channelOptions['grpc.default_compression_algorithm']; + this.maxReceiveMessageLength = (_a = channelOptions['grpc.max_receive_message_length']) !== null && _a !== void 0 ? _a : constants_1.DEFAULT_MAX_RECEIVE_MESSAGE_LENGTH; + this.maxSendMessageLength = (_b = channelOptions['grpc.max_send_message_length']) !== null && _b !== void 0 ? _b : constants_1.DEFAULT_MAX_SEND_MESSAGE_LENGTH; + if (compressionAlgorithmKey !== undefined) { + if (isCompressionAlgorithmKey(compressionAlgorithmKey)) { + const clientSelectedEncoding = compression_algorithms_1.CompressionAlgorithms[compressionAlgorithmKey]; + const serverSupportedEncodings = (_c = sharedFilterConfig.serverSupportedEncodingHeader) === null || _c === void 0 ? void 0 : _c.split(','); + /** + * There are two possible situations here: + * 1) We don't have any info yet from the server about what compression it supports + * In that case we should just use what the client tells us to use + * 2) We've previously received a response from the server including a grpc-accept-encoding header + * In that case we only want to use the encoding chosen by the client if the server supports it + */ + if (!serverSupportedEncodings || + serverSupportedEncodings.includes(clientSelectedEncoding)) { + this.currentCompressionAlgorithm = clientSelectedEncoding; + this.sendCompression = getCompressionHandler(this.currentCompressionAlgorithm, -1); + } + } + else { + logging.log(constants_1.LogVerbosity.ERROR, `Invalid value provided for grpc.default_compression_algorithm option: ${compressionAlgorithmKey}`); + } + } + } + async sendMetadata(metadata) { + const headers = await metadata; + headers.set('grpc-accept-encoding', 'identity,deflate,gzip'); + headers.set('accept-encoding', 'identity'); + // No need to send the header if it's "identity" - behavior is identical; save the bandwidth + if (this.currentCompressionAlgorithm === 'identity') { + headers.remove('grpc-encoding'); + } + else { + headers.set('grpc-encoding', this.currentCompressionAlgorithm); + } + return headers; + } + receiveMetadata(metadata) { + const receiveEncoding = metadata.get('grpc-encoding'); + if (receiveEncoding.length > 0) { + const encoding = receiveEncoding[0]; + if (typeof encoding === 'string') { + this.receiveCompression = getCompressionHandler(encoding, this.maxReceiveMessageLength); + } + } + metadata.remove('grpc-encoding'); + /* Check to see if the compression we're using to send messages is supported by the server + * If not, reset the sendCompression filter and have it use the default IdentityHandler */ + const serverSupportedEncodingsHeader = metadata.get('grpc-accept-encoding')[0]; + if (serverSupportedEncodingsHeader) { + this.sharedFilterConfig.serverSupportedEncodingHeader = + serverSupportedEncodingsHeader; + const serverSupportedEncodings = serverSupportedEncodingsHeader.split(','); + if (!serverSupportedEncodings.includes(this.currentCompressionAlgorithm)) { + this.sendCompression = new IdentityHandler(); + this.currentCompressionAlgorithm = 'identity'; + } + } + metadata.remove('grpc-accept-encoding'); + return metadata; + } + async sendMessage(message) { + var _a; + /* This filter is special. The input message is the bare message bytes, + * and the output is a framed and possibly compressed message. For this + * reason, this filter should be at the bottom of the filter stack */ + const resolvedMessage = await message; + if (this.maxSendMessageLength !== -1 && resolvedMessage.message.length > this.maxSendMessageLength) { + throw { + code: constants_1.Status.RESOURCE_EXHAUSTED, + details: `Attempted to send message with a size larger than ${this.maxSendMessageLength}` + }; + } + let compress; + if (this.sendCompression instanceof IdentityHandler) { + compress = false; + } + else { + compress = (((_a = resolvedMessage.flags) !== null && _a !== void 0 ? _a : 0) & 2 /* WriteFlags.NoCompress */) === 0; + } + return { + message: await this.sendCompression.writeMessage(resolvedMessage.message, compress), + flags: resolvedMessage.flags, + }; + } + async receiveMessage(message) { + /* This filter is also special. The input message is framed and possibly + * compressed, and the output message is deframed and uncompressed. So + * this is another reason that this filter should be at the bottom of the + * filter stack. */ + return this.receiveCompression.readMessage(await message); + } +} +exports.CompressionFilter = CompressionFilter; +class CompressionFilterFactory { + constructor(channel, options) { + this.options = options; + this.sharedFilterConfig = {}; + } + createFilter() { + return new CompressionFilter(this.options, this.sharedFilterConfig); + } +} +exports.CompressionFilterFactory = CompressionFilterFactory; +//# sourceMappingURL=compression-filter.js.map + +/***/ }), + +/***/ 80878: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2021 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ConnectivityState = void 0; +var ConnectivityState; +(function (ConnectivityState) { + ConnectivityState[ConnectivityState["IDLE"] = 0] = "IDLE"; + ConnectivityState[ConnectivityState["CONNECTING"] = 1] = "CONNECTING"; + ConnectivityState[ConnectivityState["READY"] = 2] = "READY"; + ConnectivityState[ConnectivityState["TRANSIENT_FAILURE"] = 3] = "TRANSIENT_FAILURE"; + ConnectivityState[ConnectivityState["SHUTDOWN"] = 4] = "SHUTDOWN"; +})(ConnectivityState || (exports.ConnectivityState = ConnectivityState = {})); +//# sourceMappingURL=connectivity-state.js.map + +/***/ }), + +/***/ 90634: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DEFAULT_MAX_RECEIVE_MESSAGE_LENGTH = exports.DEFAULT_MAX_SEND_MESSAGE_LENGTH = exports.Propagate = exports.LogVerbosity = exports.Status = void 0; +var Status; +(function (Status) { + Status[Status["OK"] = 0] = "OK"; + Status[Status["CANCELLED"] = 1] = "CANCELLED"; + Status[Status["UNKNOWN"] = 2] = "UNKNOWN"; + Status[Status["INVALID_ARGUMENT"] = 3] = "INVALID_ARGUMENT"; + Status[Status["DEADLINE_EXCEEDED"] = 4] = "DEADLINE_EXCEEDED"; + Status[Status["NOT_FOUND"] = 5] = "NOT_FOUND"; + Status[Status["ALREADY_EXISTS"] = 6] = "ALREADY_EXISTS"; + Status[Status["PERMISSION_DENIED"] = 7] = "PERMISSION_DENIED"; + Status[Status["RESOURCE_EXHAUSTED"] = 8] = "RESOURCE_EXHAUSTED"; + Status[Status["FAILED_PRECONDITION"] = 9] = "FAILED_PRECONDITION"; + Status[Status["ABORTED"] = 10] = "ABORTED"; + Status[Status["OUT_OF_RANGE"] = 11] = "OUT_OF_RANGE"; + Status[Status["UNIMPLEMENTED"] = 12] = "UNIMPLEMENTED"; + Status[Status["INTERNAL"] = 13] = "INTERNAL"; + Status[Status["UNAVAILABLE"] = 14] = "UNAVAILABLE"; + Status[Status["DATA_LOSS"] = 15] = "DATA_LOSS"; + Status[Status["UNAUTHENTICATED"] = 16] = "UNAUTHENTICATED"; +})(Status || (exports.Status = Status = {})); +var LogVerbosity; +(function (LogVerbosity) { + LogVerbosity[LogVerbosity["DEBUG"] = 0] = "DEBUG"; + LogVerbosity[LogVerbosity["INFO"] = 1] = "INFO"; + LogVerbosity[LogVerbosity["ERROR"] = 2] = "ERROR"; + LogVerbosity[LogVerbosity["NONE"] = 3] = "NONE"; +})(LogVerbosity || (exports.LogVerbosity = LogVerbosity = {})); +/** + * NOTE: This enum is not currently used in any implemented API in this + * library. It is included only for type parity with the other implementation. + */ +var Propagate; +(function (Propagate) { + Propagate[Propagate["DEADLINE"] = 1] = "DEADLINE"; + Propagate[Propagate["CENSUS_STATS_CONTEXT"] = 2] = "CENSUS_STATS_CONTEXT"; + Propagate[Propagate["CENSUS_TRACING_CONTEXT"] = 4] = "CENSUS_TRACING_CONTEXT"; + Propagate[Propagate["CANCELLATION"] = 8] = "CANCELLATION"; + // https://github.com/grpc/grpc/blob/master/include/grpc/impl/codegen/propagation_bits.h#L43 + Propagate[Propagate["DEFAULTS"] = 65535] = "DEFAULTS"; +})(Propagate || (exports.Propagate = Propagate = {})); +// -1 means unlimited +exports.DEFAULT_MAX_SEND_MESSAGE_LENGTH = -1; +// 4 MB default +exports.DEFAULT_MAX_RECEIVE_MESSAGE_LENGTH = 4 * 1024 * 1024; +//# sourceMappingURL=constants.js.map + +/***/ }), + +/***/ 39129: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2022 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.restrictControlPlaneStatusCode = void 0; +const constants_1 = __nccwpck_require__(90634); +const INAPPROPRIATE_CONTROL_PLANE_CODES = [ + constants_1.Status.OK, + constants_1.Status.INVALID_ARGUMENT, + constants_1.Status.NOT_FOUND, + constants_1.Status.ALREADY_EXISTS, + constants_1.Status.FAILED_PRECONDITION, + constants_1.Status.ABORTED, + constants_1.Status.OUT_OF_RANGE, + constants_1.Status.DATA_LOSS, +]; +function restrictControlPlaneStatusCode(code, details) { + if (INAPPROPRIATE_CONTROL_PLANE_CODES.includes(code)) { + return { + code: constants_1.Status.INTERNAL, + details: `Invalid status from control plane: ${code} ${constants_1.Status[code]} ${details}`, + }; + } + else { + return { code, details }; + } +} +exports.restrictControlPlaneStatusCode = restrictControlPlaneStatusCode; +//# sourceMappingURL=control-plane-status.js.map + +/***/ }), + +/***/ 511: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.formatDateDifference = exports.deadlineToString = exports.getRelativeTimeout = exports.getDeadlineTimeoutString = exports.minDeadline = void 0; +function minDeadline(...deadlineList) { + let minValue = Infinity; + for (const deadline of deadlineList) { + const deadlineMsecs = deadline instanceof Date ? deadline.getTime() : deadline; + if (deadlineMsecs < minValue) { + minValue = deadlineMsecs; + } + } + return minValue; +} +exports.minDeadline = minDeadline; +const units = [ + ['m', 1], + ['S', 1000], + ['M', 60 * 1000], + ['H', 60 * 60 * 1000], +]; +function getDeadlineTimeoutString(deadline) { + const now = new Date().getTime(); + if (deadline instanceof Date) { + deadline = deadline.getTime(); + } + const timeoutMs = Math.max(deadline - now, 0); + for (const [unit, factor] of units) { + const amount = timeoutMs / factor; + if (amount < 1e8) { + return String(Math.ceil(amount)) + unit; + } + } + throw new Error('Deadline is too far in the future'); +} +exports.getDeadlineTimeoutString = getDeadlineTimeoutString; +/** + * See https://nodejs.org/api/timers.html#settimeoutcallback-delay-args + * In particular, "When delay is larger than 2147483647 or less than 1, the + * delay will be set to 1. Non-integer delays are truncated to an integer." + * This number of milliseconds is almost 25 days. + */ +const MAX_TIMEOUT_TIME = 2147483647; +/** + * Get the timeout value that should be passed to setTimeout now for the timer + * to end at the deadline. For any deadline before now, the timer should end + * immediately, represented by a value of 0. For any deadline more than + * MAX_TIMEOUT_TIME milliseconds in the future, a timer cannot be set that will + * end at that time, so it is treated as infinitely far in the future. + * @param deadline + * @returns + */ +function getRelativeTimeout(deadline) { + const deadlineMs = deadline instanceof Date ? deadline.getTime() : deadline; + const now = new Date().getTime(); + const timeout = deadlineMs - now; + if (timeout < 0) { + return 0; + } + else if (timeout > MAX_TIMEOUT_TIME) { + return Infinity; + } + else { + return timeout; + } +} +exports.getRelativeTimeout = getRelativeTimeout; +function deadlineToString(deadline) { + if (deadline instanceof Date) { + return deadline.toISOString(); + } + else { + const dateDeadline = new Date(deadline); + if (Number.isNaN(dateDeadline.getTime())) { + return '' + deadline; + } + else { + return dateDeadline.toISOString(); + } + } +} +exports.deadlineToString = deadlineToString; +/** + * Calculate the difference between two dates as a number of seconds and format + * it as a string. + * @param startDate + * @param endDate + * @returns + */ +function formatDateDifference(startDate, endDate) { + return ((endDate.getTime() - startDate.getTime()) / 1000).toFixed(3) + 's'; +} +exports.formatDateDifference = formatDateDifference; +//# sourceMappingURL=deadline.js.map + +/***/ }), + +/***/ 62668: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2022 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseDuration = exports.isDuration = exports.durationToMs = exports.msToDuration = void 0; +function msToDuration(millis) { + return { + seconds: (millis / 1000) | 0, + nanos: ((millis % 1000) * 1000000) | 0, + }; +} +exports.msToDuration = msToDuration; +function durationToMs(duration) { + return (duration.seconds * 1000 + duration.nanos / 1000000) | 0; +} +exports.durationToMs = durationToMs; +function isDuration(value) { + return typeof value.seconds === 'number' && typeof value.nanos === 'number'; +} +exports.isDuration = isDuration; +const durationRegex = /^(\d+)(?:\.(\d+))?s$/; +function parseDuration(value) { + const match = value.match(durationRegex); + if (!match) { + return null; + } + return { + seconds: Number.parseInt(match[1], 10), + nanos: match[2] ? Number.parseInt(match[2].padEnd(9, '0'), 10) : 0 + }; +} +exports.parseDuration = parseDuration; +//# sourceMappingURL=duration.js.map + +/***/ }), + +/***/ 29160: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2024 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GRPC_NODE_USE_ALTERNATIVE_RESOLVER = void 0; +exports.GRPC_NODE_USE_ALTERNATIVE_RESOLVER = ((_a = process.env.GRPC_NODE_USE_ALTERNATIVE_RESOLVER) !== null && _a !== void 0 ? _a : 'false') === 'true'; +//# sourceMappingURL=environment.js.map + +/***/ }), + +/***/ 22336: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2022 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getErrorCode = exports.getErrorMessage = void 0; +function getErrorMessage(error) { + if (error instanceof Error) { + return error.message; + } + else { + return String(error); + } +} +exports.getErrorMessage = getErrorMessage; +function getErrorCode(error) { + if (typeof error === 'object' && + error !== null && + 'code' in error && + typeof error.code === 'number') { + return error.code; + } + else { + return null; + } +} +exports.getErrorCode = getErrorCode; +//# sourceMappingURL=error.js.map + +/***/ }), + +/***/ 37626: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCertificateProviderChannelCredentials = exports.FileWatcherCertificateProvider = exports.createCertificateProviderServerCredentials = exports.createServerCredentialsWithInterceptors = exports.BaseSubchannelWrapper = exports.registerAdminService = exports.FilterStackFactory = exports.BaseFilter = exports.PickResultType = exports.QueuePicker = exports.UnavailablePicker = exports.ChildLoadBalancerHandler = exports.EndpointMap = exports.endpointHasAddress = exports.endpointToString = exports.subchannelAddressToString = exports.LeafLoadBalancer = exports.isLoadBalancerNameRegistered = exports.parseLoadBalancingConfig = exports.selectLbConfigFromList = exports.registerLoadBalancerType = exports.createChildChannelControlHelper = exports.BackoffTimeout = exports.parseDuration = exports.durationToMs = exports.splitHostPort = exports.uriToString = exports.createResolver = exports.registerResolver = exports.log = exports.trace = void 0; +var logging_1 = __nccwpck_require__(35993); +Object.defineProperty(exports, "trace", ({ enumerable: true, get: function () { return logging_1.trace; } })); +Object.defineProperty(exports, "log", ({ enumerable: true, get: function () { return logging_1.log; } })); +var resolver_1 = __nccwpck_require__(31594); +Object.defineProperty(exports, "registerResolver", ({ enumerable: true, get: function () { return resolver_1.registerResolver; } })); +Object.defineProperty(exports, "createResolver", ({ enumerable: true, get: function () { return resolver_1.createResolver; } })); +var uri_parser_1 = __nccwpck_require__(65974); +Object.defineProperty(exports, "uriToString", ({ enumerable: true, get: function () { return uri_parser_1.uriToString; } })); +Object.defineProperty(exports, "splitHostPort", ({ enumerable: true, get: function () { return uri_parser_1.splitHostPort; } })); +var duration_1 = __nccwpck_require__(62668); +Object.defineProperty(exports, "durationToMs", ({ enumerable: true, get: function () { return duration_1.durationToMs; } })); +Object.defineProperty(exports, "parseDuration", ({ enumerable: true, get: function () { return duration_1.parseDuration; } })); +var backoff_timeout_1 = __nccwpck_require__(34186); +Object.defineProperty(exports, "BackoffTimeout", ({ enumerable: true, get: function () { return backoff_timeout_1.BackoffTimeout; } })); +var load_balancer_1 = __nccwpck_require__(52680); +Object.defineProperty(exports, "createChildChannelControlHelper", ({ enumerable: true, get: function () { return load_balancer_1.createChildChannelControlHelper; } })); +Object.defineProperty(exports, "registerLoadBalancerType", ({ enumerable: true, get: function () { return load_balancer_1.registerLoadBalancerType; } })); +Object.defineProperty(exports, "selectLbConfigFromList", ({ enumerable: true, get: function () { return load_balancer_1.selectLbConfigFromList; } })); +Object.defineProperty(exports, "parseLoadBalancingConfig", ({ enumerable: true, get: function () { return load_balancer_1.parseLoadBalancingConfig; } })); +Object.defineProperty(exports, "isLoadBalancerNameRegistered", ({ enumerable: true, get: function () { return load_balancer_1.isLoadBalancerNameRegistered; } })); +var load_balancer_pick_first_1 = __nccwpck_require__(38977); +Object.defineProperty(exports, "LeafLoadBalancer", ({ enumerable: true, get: function () { return load_balancer_pick_first_1.LeafLoadBalancer; } })); +var subchannel_address_1 = __nccwpck_require__(78021); +Object.defineProperty(exports, "subchannelAddressToString", ({ enumerable: true, get: function () { return subchannel_address_1.subchannelAddressToString; } })); +Object.defineProperty(exports, "endpointToString", ({ enumerable: true, get: function () { return subchannel_address_1.endpointToString; } })); +Object.defineProperty(exports, "endpointHasAddress", ({ enumerable: true, get: function () { return subchannel_address_1.endpointHasAddress; } })); +Object.defineProperty(exports, "EndpointMap", ({ enumerable: true, get: function () { return subchannel_address_1.EndpointMap; } })); +var load_balancer_child_handler_1 = __nccwpck_require__(17559); +Object.defineProperty(exports, "ChildLoadBalancerHandler", ({ enumerable: true, get: function () { return load_balancer_child_handler_1.ChildLoadBalancerHandler; } })); +var picker_1 = __nccwpck_require__(81611); +Object.defineProperty(exports, "UnavailablePicker", ({ enumerable: true, get: function () { return picker_1.UnavailablePicker; } })); +Object.defineProperty(exports, "QueuePicker", ({ enumerable: true, get: function () { return picker_1.QueuePicker; } })); +Object.defineProperty(exports, "PickResultType", ({ enumerable: true, get: function () { return picker_1.PickResultType; } })); +var filter_1 = __nccwpck_require__(43392); +Object.defineProperty(exports, "BaseFilter", ({ enumerable: true, get: function () { return filter_1.BaseFilter; } })); +var filter_stack_1 = __nccwpck_require__(66450); +Object.defineProperty(exports, "FilterStackFactory", ({ enumerable: true, get: function () { return filter_stack_1.FilterStackFactory; } })); +var admin_1 = __nccwpck_require__(8258); +Object.defineProperty(exports, "registerAdminService", ({ enumerable: true, get: function () { return admin_1.registerAdminService; } })); +var subchannel_interface_1 = __nccwpck_require__(12258); +Object.defineProperty(exports, "BaseSubchannelWrapper", ({ enumerable: true, get: function () { return subchannel_interface_1.BaseSubchannelWrapper; } })); +var server_credentials_1 = __nccwpck_require__(63828); +Object.defineProperty(exports, "createServerCredentialsWithInterceptors", ({ enumerable: true, get: function () { return server_credentials_1.createServerCredentialsWithInterceptors; } })); +Object.defineProperty(exports, "createCertificateProviderServerCredentials", ({ enumerable: true, get: function () { return server_credentials_1.createCertificateProviderServerCredentials; } })); +var certificate_provider_1 = __nccwpck_require__(25649); +Object.defineProperty(exports, "FileWatcherCertificateProvider", ({ enumerable: true, get: function () { return certificate_provider_1.FileWatcherCertificateProvider; } })); +var channel_credentials_1 = __nccwpck_require__(44030); +Object.defineProperty(exports, "createCertificateProviderChannelCredentials", ({ enumerable: true, get: function () { return channel_credentials_1.createCertificateProviderChannelCredentials; } })); +//# sourceMappingURL=experimental.js.map + +/***/ }), + +/***/ 66450: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FilterStackFactory = exports.FilterStack = void 0; +class FilterStack { + constructor(filters) { + this.filters = filters; + } + sendMetadata(metadata) { + let result = metadata; + for (let i = 0; i < this.filters.length; i++) { + result = this.filters[i].sendMetadata(result); + } + return result; + } + receiveMetadata(metadata) { + let result = metadata; + for (let i = this.filters.length - 1; i >= 0; i--) { + result = this.filters[i].receiveMetadata(result); + } + return result; + } + sendMessage(message) { + let result = message; + for (let i = 0; i < this.filters.length; i++) { + result = this.filters[i].sendMessage(result); + } + return result; + } + receiveMessage(message) { + let result = message; + for (let i = this.filters.length - 1; i >= 0; i--) { + result = this.filters[i].receiveMessage(result); + } + return result; + } + receiveTrailers(status) { + let result = status; + for (let i = this.filters.length - 1; i >= 0; i--) { + result = this.filters[i].receiveTrailers(result); + } + return result; + } + push(filters) { + this.filters.unshift(...filters); + } + getFilters() { + return this.filters; + } +} +exports.FilterStack = FilterStack; +class FilterStackFactory { + constructor(factories) { + this.factories = factories; + } + push(filterFactories) { + this.factories.unshift(...filterFactories); + } + clone() { + return new FilterStackFactory([...this.factories]); + } + createFilter() { + return new FilterStack(this.factories.map(factory => factory.createFilter())); + } +} +exports.FilterStackFactory = FilterStackFactory; +//# sourceMappingURL=filter-stack.js.map + +/***/ }), + +/***/ 43392: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseFilter = void 0; +class BaseFilter { + async sendMetadata(metadata) { + return metadata; + } + receiveMetadata(metadata) { + return metadata; + } + async sendMessage(message) { + return message; + } + async receiveMessage(message) { + return message; + } + receiveTrailers(status) { + return status; + } +} +exports.BaseFilter = BaseFilter; +//# sourceMappingURL=filter.js.map + +/***/ }), + +/***/ 24000: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getProxiedConnection = exports.mapProxyName = void 0; +const logging_1 = __nccwpck_require__(35993); +const constants_1 = __nccwpck_require__(90634); +const resolver_1 = __nccwpck_require__(31594); +const http = __nccwpck_require__(13685); +const tls = __nccwpck_require__(24404); +const logging = __nccwpck_require__(35993); +const subchannel_address_1 = __nccwpck_require__(78021); +const uri_parser_1 = __nccwpck_require__(65974); +const url_1 = __nccwpck_require__(57310); +const resolver_dns_1 = __nccwpck_require__(49421); +const TRACER_NAME = 'proxy'; +function trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, text); +} +function getProxyInfo() { + let proxyEnv = ''; + let envVar = ''; + /* Prefer using 'grpc_proxy'. Fallback on 'http_proxy' if it is not set. + * Also prefer using 'https_proxy' with fallback on 'http_proxy'. The + * fallback behavior can be removed if there's a demand for it. + */ + if (process.env.grpc_proxy) { + envVar = 'grpc_proxy'; + proxyEnv = process.env.grpc_proxy; + } + else if (process.env.https_proxy) { + envVar = 'https_proxy'; + proxyEnv = process.env.https_proxy; + } + else if (process.env.http_proxy) { + envVar = 'http_proxy'; + proxyEnv = process.env.http_proxy; + } + else { + return {}; + } + let proxyUrl; + try { + proxyUrl = new url_1.URL(proxyEnv); + } + catch (e) { + (0, logging_1.log)(constants_1.LogVerbosity.ERROR, `cannot parse value of "${envVar}" env var`); + return {}; + } + if (proxyUrl.protocol !== 'http:') { + (0, logging_1.log)(constants_1.LogVerbosity.ERROR, `"${proxyUrl.protocol}" scheme not supported in proxy URI`); + return {}; + } + let userCred = null; + if (proxyUrl.username) { + if (proxyUrl.password) { + (0, logging_1.log)(constants_1.LogVerbosity.INFO, 'userinfo found in proxy URI'); + userCred = decodeURIComponent(`${proxyUrl.username}:${proxyUrl.password}`); + } + else { + userCred = proxyUrl.username; + } + } + const hostname = proxyUrl.hostname; + let port = proxyUrl.port; + /* The proxy URL uses the scheme "http:", which has a default port number of + * 80. We need to set that explicitly here if it is omitted because otherwise + * it will use gRPC's default port 443. */ + if (port === '') { + port = '80'; + } + const result = { + address: `${hostname}:${port}`, + }; + if (userCred) { + result.creds = userCred; + } + trace('Proxy server ' + result.address + ' set by environment variable ' + envVar); + return result; +} +function getNoProxyHostList() { + /* Prefer using 'no_grpc_proxy'. Fallback on 'no_proxy' if it is not set. */ + let noProxyStr = process.env.no_grpc_proxy; + let envVar = 'no_grpc_proxy'; + if (!noProxyStr) { + noProxyStr = process.env.no_proxy; + envVar = 'no_proxy'; + } + if (noProxyStr) { + trace('No proxy server list set by environment variable ' + envVar); + return noProxyStr.split(','); + } + else { + return []; + } +} +function mapProxyName(target, options) { + var _a; + const noProxyResult = { + target: target, + extraOptions: {}, + }; + if (((_a = options['grpc.enable_http_proxy']) !== null && _a !== void 0 ? _a : 1) === 0) { + return noProxyResult; + } + if (target.scheme === 'unix') { + return noProxyResult; + } + const proxyInfo = getProxyInfo(); + if (!proxyInfo.address) { + return noProxyResult; + } + const hostPort = (0, uri_parser_1.splitHostPort)(target.path); + if (!hostPort) { + return noProxyResult; + } + const serverHost = hostPort.host; + for (const host of getNoProxyHostList()) { + if (host === serverHost) { + trace('Not using proxy for target in no_proxy list: ' + (0, uri_parser_1.uriToString)(target)); + return noProxyResult; + } + } + const extraOptions = { + 'grpc.http_connect_target': (0, uri_parser_1.uriToString)(target), + }; + if (proxyInfo.creds) { + extraOptions['grpc.http_connect_creds'] = proxyInfo.creds; + } + return { + target: { + scheme: 'dns', + path: proxyInfo.address, + }, + extraOptions: extraOptions, + }; +} +exports.mapProxyName = mapProxyName; +function getProxiedConnection(address, channelOptions, connectionOptions) { + var _a; + if (!('grpc.http_connect_target' in channelOptions)) { + return Promise.resolve({}); + } + const realTarget = channelOptions['grpc.http_connect_target']; + const parsedTarget = (0, uri_parser_1.parseUri)(realTarget); + if (parsedTarget === null) { + return Promise.resolve({}); + } + const splitHostPost = (0, uri_parser_1.splitHostPort)(parsedTarget.path); + if (splitHostPost === null) { + return Promise.resolve({}); + } + const hostPort = `${splitHostPost.host}:${(_a = splitHostPost.port) !== null && _a !== void 0 ? _a : resolver_dns_1.DEFAULT_PORT}`; + const options = { + method: 'CONNECT', + path: hostPort, + }; + const headers = { + Host: hostPort, + }; + // Connect to the subchannel address as a proxy + if ((0, subchannel_address_1.isTcpSubchannelAddress)(address)) { + options.host = address.host; + options.port = address.port; + } + else { + options.socketPath = address.path; + } + if ('grpc.http_connect_creds' in channelOptions) { + headers['Proxy-Authorization'] = + 'Basic ' + + Buffer.from(channelOptions['grpc.http_connect_creds']).toString('base64'); + } + options.headers = headers; + const proxyAddressString = (0, subchannel_address_1.subchannelAddressToString)(address); + trace('Using proxy ' + proxyAddressString + ' to connect to ' + options.path); + return new Promise((resolve, reject) => { + const request = http.request(options); + request.once('connect', (res, socket, head) => { + var _a; + request.removeAllListeners(); + socket.removeAllListeners(); + if (res.statusCode === 200) { + trace('Successfully connected to ' + + options.path + + ' through proxy ' + + proxyAddressString); + // The HTTP client may have already read a few bytes of the proxied + // connection. If that's the case, put them back into the socket. + // See https://github.com/grpc/grpc-node/issues/2744. + if (head.length > 0) { + socket.unshift(head); + } + if ('secureContext' in connectionOptions) { + /* The proxy is connecting to a TLS server, so upgrade this socket + * connection to a TLS connection. + * This is a workaround for https://github.com/nodejs/node/issues/32922 + * See https://github.com/grpc/grpc-node/pull/1369 for more info. */ + const targetPath = (0, resolver_1.getDefaultAuthority)(parsedTarget); + const hostPort = (0, uri_parser_1.splitHostPort)(targetPath); + const remoteHost = (_a = hostPort === null || hostPort === void 0 ? void 0 : hostPort.host) !== null && _a !== void 0 ? _a : targetPath; + const cts = tls.connect(Object.assign({ host: remoteHost, servername: remoteHost, socket: socket }, connectionOptions), () => { + trace('Successfully established a TLS connection to ' + + options.path + + ' through proxy ' + + proxyAddressString); + resolve({ socket: cts, realTarget: parsedTarget }); + }); + cts.on('error', (error) => { + trace('Failed to establish a TLS connection to ' + + options.path + + ' through proxy ' + + proxyAddressString + + ' with error ' + + error.message); + reject(); + }); + } + else { + trace('Successfully established a plaintext connection to ' + + options.path + + ' through proxy ' + + proxyAddressString); + resolve({ + socket, + realTarget: parsedTarget, + }); + } + } + else { + (0, logging_1.log)(constants_1.LogVerbosity.ERROR, 'Failed to connect to ' + + options.path + + ' through proxy ' + + proxyAddressString + + ' with status ' + + res.statusCode); + reject(); + } + }); + request.once('error', err => { + request.removeAllListeners(); + (0, logging_1.log)(constants_1.LogVerbosity.ERROR, 'Failed to connect to proxy ' + + proxyAddressString + + ' with error ' + + err.message); + reject(); + }); + request.end(); + }); +} +exports.getProxiedConnection = getProxiedConnection; +//# sourceMappingURL=http_proxy.js.map + +/***/ }), + +/***/ 7025: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.experimental = exports.ServerInterceptingCall = exports.ResponderBuilder = exports.ServerListenerBuilder = exports.addAdminServicesToServer = exports.getChannelzHandlers = exports.getChannelzServiceDefinition = exports.InterceptorConfigurationError = exports.InterceptingCall = exports.RequesterBuilder = exports.ListenerBuilder = exports.StatusBuilder = exports.getClientChannel = exports.ServerCredentials = exports.Server = exports.setLogVerbosity = exports.setLogger = exports.load = exports.loadObject = exports.CallCredentials = exports.ChannelCredentials = exports.waitForClientReady = exports.closeClient = exports.Channel = exports.makeGenericClientConstructor = exports.makeClientConstructor = exports.loadPackageDefinition = exports.Client = exports.compressionAlgorithms = exports.propagate = exports.connectivityState = exports.status = exports.logVerbosity = exports.Metadata = exports.credentials = void 0; +const call_credentials_1 = __nccwpck_require__(21426); +Object.defineProperty(exports, "CallCredentials", ({ enumerable: true, get: function () { return call_credentials_1.CallCredentials; } })); +const channel_1 = __nccwpck_require__(13860); +Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_1.ChannelImplementation; } })); +const compression_algorithms_1 = __nccwpck_require__(54789); +Object.defineProperty(exports, "compressionAlgorithms", ({ enumerable: true, get: function () { return compression_algorithms_1.CompressionAlgorithms; } })); +const connectivity_state_1 = __nccwpck_require__(80878); +Object.defineProperty(exports, "connectivityState", ({ enumerable: true, get: function () { return connectivity_state_1.ConnectivityState; } })); +const channel_credentials_1 = __nccwpck_require__(44030); +Object.defineProperty(exports, "ChannelCredentials", ({ enumerable: true, get: function () { return channel_credentials_1.ChannelCredentials; } })); +const client_1 = __nccwpck_require__(87172); +Object.defineProperty(exports, "Client", ({ enumerable: true, get: function () { return client_1.Client; } })); +const constants_1 = __nccwpck_require__(90634); +Object.defineProperty(exports, "logVerbosity", ({ enumerable: true, get: function () { return constants_1.LogVerbosity; } })); +Object.defineProperty(exports, "status", ({ enumerable: true, get: function () { return constants_1.Status; } })); +Object.defineProperty(exports, "propagate", ({ enumerable: true, get: function () { return constants_1.Propagate; } })); +const logging = __nccwpck_require__(35993); +const make_client_1 = __nccwpck_require__(38541); +Object.defineProperty(exports, "loadPackageDefinition", ({ enumerable: true, get: function () { return make_client_1.loadPackageDefinition; } })); +Object.defineProperty(exports, "makeClientConstructor", ({ enumerable: true, get: function () { return make_client_1.makeClientConstructor; } })); +Object.defineProperty(exports, "makeGenericClientConstructor", ({ enumerable: true, get: function () { return make_client_1.makeClientConstructor; } })); +const metadata_1 = __nccwpck_require__(83665); +Object.defineProperty(exports, "Metadata", ({ enumerable: true, get: function () { return metadata_1.Metadata; } })); +const server_1 = __nccwpck_require__(33389); +Object.defineProperty(exports, "Server", ({ enumerable: true, get: function () { return server_1.Server; } })); +const server_credentials_1 = __nccwpck_require__(63828); +Object.defineProperty(exports, "ServerCredentials", ({ enumerable: true, get: function () { return server_credentials_1.ServerCredentials; } })); +const status_builder_1 = __nccwpck_require__(73155); +Object.defineProperty(exports, "StatusBuilder", ({ enumerable: true, get: function () { return status_builder_1.StatusBuilder; } })); +/**** Client Credentials ****/ +// Using assign only copies enumerable properties, which is what we want +exports.credentials = { + /** + * Combine a ChannelCredentials with any number of CallCredentials into a + * single ChannelCredentials object. + * @param channelCredentials The ChannelCredentials object. + * @param callCredentials Any number of CallCredentials objects. + * @return The resulting ChannelCredentials object. + */ + combineChannelCredentials: (channelCredentials, ...callCredentials) => { + return callCredentials.reduce((acc, other) => acc.compose(other), channelCredentials); + }, + /** + * Combine any number of CallCredentials into a single CallCredentials + * object. + * @param first The first CallCredentials object. + * @param additional Any number of additional CallCredentials objects. + * @return The resulting CallCredentials object. + */ + combineCallCredentials: (first, ...additional) => { + return additional.reduce((acc, other) => acc.compose(other), first); + }, + // from channel-credentials.ts + createInsecure: channel_credentials_1.ChannelCredentials.createInsecure, + createSsl: channel_credentials_1.ChannelCredentials.createSsl, + createFromSecureContext: channel_credentials_1.ChannelCredentials.createFromSecureContext, + // from call-credentials.ts + createFromMetadataGenerator: call_credentials_1.CallCredentials.createFromMetadataGenerator, + createFromGoogleCredential: call_credentials_1.CallCredentials.createFromGoogleCredential, + createEmpty: call_credentials_1.CallCredentials.createEmpty, +}; +/** + * Close a Client object. + * @param client The client to close. + */ +const closeClient = (client) => client.close(); +exports.closeClient = closeClient; +const waitForClientReady = (client, deadline, callback) => client.waitForReady(deadline, callback); +exports.waitForClientReady = waitForClientReady; +/* eslint-enable @typescript-eslint/no-explicit-any */ +/**** Unimplemented function stubs ****/ +/* eslint-disable @typescript-eslint/no-explicit-any */ +const loadObject = (value, options) => { + throw new Error('Not available in this library. Use @grpc/proto-loader and loadPackageDefinition instead'); +}; +exports.loadObject = loadObject; +const load = (filename, format, options) => { + throw new Error('Not available in this library. Use @grpc/proto-loader and loadPackageDefinition instead'); +}; +exports.load = load; +const setLogger = (logger) => { + logging.setLogger(logger); +}; +exports.setLogger = setLogger; +const setLogVerbosity = (verbosity) => { + logging.setLoggerVerbosity(verbosity); +}; +exports.setLogVerbosity = setLogVerbosity; +const getClientChannel = (client) => { + return client_1.Client.prototype.getChannel.call(client); +}; +exports.getClientChannel = getClientChannel; +var client_interceptors_1 = __nccwpck_require__(26597); +Object.defineProperty(exports, "ListenerBuilder", ({ enumerable: true, get: function () { return client_interceptors_1.ListenerBuilder; } })); +Object.defineProperty(exports, "RequesterBuilder", ({ enumerable: true, get: function () { return client_interceptors_1.RequesterBuilder; } })); +Object.defineProperty(exports, "InterceptingCall", ({ enumerable: true, get: function () { return client_interceptors_1.InterceptingCall; } })); +Object.defineProperty(exports, "InterceptorConfigurationError", ({ enumerable: true, get: function () { return client_interceptors_1.InterceptorConfigurationError; } })); +var channelz_1 = __nccwpck_require__(79975); +Object.defineProperty(exports, "getChannelzServiceDefinition", ({ enumerable: true, get: function () { return channelz_1.getChannelzServiceDefinition; } })); +Object.defineProperty(exports, "getChannelzHandlers", ({ enumerable: true, get: function () { return channelz_1.getChannelzHandlers; } })); +var admin_1 = __nccwpck_require__(8258); +Object.defineProperty(exports, "addAdminServicesToServer", ({ enumerable: true, get: function () { return admin_1.addAdminServicesToServer; } })); +var server_interceptors_1 = __nccwpck_require__(20998); +Object.defineProperty(exports, "ServerListenerBuilder", ({ enumerable: true, get: function () { return server_interceptors_1.ServerListenerBuilder; } })); +Object.defineProperty(exports, "ResponderBuilder", ({ enumerable: true, get: function () { return server_interceptors_1.ResponderBuilder; } })); +Object.defineProperty(exports, "ServerInterceptingCall", ({ enumerable: true, get: function () { return server_interceptors_1.ServerInterceptingCall; } })); +const experimental = __nccwpck_require__(37626); +exports.experimental = experimental; +const resolver_dns = __nccwpck_require__(49421); +const resolver_uds = __nccwpck_require__(5252); +const resolver_ip = __nccwpck_require__(97902); +const load_balancer_pick_first = __nccwpck_require__(38977); +const load_balancer_round_robin = __nccwpck_require__(92787); +const load_balancer_outlier_detection = __nccwpck_require__(76828); +const channelz = __nccwpck_require__(79975); +(() => { + resolver_dns.setup(); + resolver_uds.setup(); + resolver_ip.setup(); + load_balancer_pick_first.setup(); + load_balancer_round_robin.setup(); + load_balancer_outlier_detection.setup(); + channelz.setup(); +})(); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 69672: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.InternalChannel = void 0; +const channel_credentials_1 = __nccwpck_require__(44030); +const resolving_load_balancer_1 = __nccwpck_require__(19192); +const subchannel_pool_1 = __nccwpck_require__(39780); +const picker_1 = __nccwpck_require__(81611); +const metadata_1 = __nccwpck_require__(83665); +const constants_1 = __nccwpck_require__(90634); +const filter_stack_1 = __nccwpck_require__(66450); +const compression_filter_1 = __nccwpck_require__(47616); +const resolver_1 = __nccwpck_require__(31594); +const logging_1 = __nccwpck_require__(35993); +const http_proxy_1 = __nccwpck_require__(24000); +const uri_parser_1 = __nccwpck_require__(65974); +const connectivity_state_1 = __nccwpck_require__(80878); +const channelz_1 = __nccwpck_require__(79975); +const load_balancing_call_1 = __nccwpck_require__(776); +const deadline_1 = __nccwpck_require__(511); +const resolving_call_1 = __nccwpck_require__(39909); +const call_number_1 = __nccwpck_require__(70380); +const control_plane_status_1 = __nccwpck_require__(39129); +const retrying_call_1 = __nccwpck_require__(48159); +const subchannel_interface_1 = __nccwpck_require__(12258); +/** + * See https://nodejs.org/api/timers.html#timers_setinterval_callback_delay_args + */ +const MAX_TIMEOUT_TIME = 2147483647; +const MIN_IDLE_TIMEOUT_MS = 1000; +// 30 minutes +const DEFAULT_IDLE_TIMEOUT_MS = 30 * 60 * 1000; +const RETRY_THROTTLER_MAP = new Map(); +const DEFAULT_RETRY_BUFFER_SIZE_BYTES = 1 << 24; // 16 MB +const DEFAULT_PER_RPC_RETRY_BUFFER_SIZE_BYTES = 1 << 20; // 1 MB +class ChannelSubchannelWrapper extends subchannel_interface_1.BaseSubchannelWrapper { + constructor(childSubchannel, channel) { + super(childSubchannel); + this.channel = channel; + this.refCount = 0; + this.subchannelStateListener = (subchannel, previousState, newState, keepaliveTime) => { + channel.throttleKeepalive(keepaliveTime); + }; + childSubchannel.addConnectivityStateListener(this.subchannelStateListener); + } + ref() { + this.child.ref(); + this.refCount += 1; + } + unref() { + this.child.unref(); + this.refCount -= 1; + if (this.refCount <= 0) { + this.child.removeConnectivityStateListener(this.subchannelStateListener); + this.channel.removeWrappedSubchannel(this); + } + } +} +class ShutdownPicker { + pick(pickArgs) { + return { + pickResultType: picker_1.PickResultType.DROP, + status: { + code: constants_1.Status.UNAVAILABLE, + details: 'Channel closed before call started', + metadata: new metadata_1.Metadata() + }, + subchannel: null, + onCallStarted: null, + onCallEnded: null + }; + } +} +class InternalChannel { + constructor(target, credentials, options) { + var _a, _b, _c, _d, _e, _f, _g, _h; + this.credentials = credentials; + this.options = options; + this.connectivityState = connectivity_state_1.ConnectivityState.IDLE; + this.currentPicker = new picker_1.UnavailablePicker(); + /** + * Calls queued up to get a call config. Should only be populated before the + * first time the resolver returns a result, which includes the ConfigSelector. + */ + this.configSelectionQueue = []; + this.pickQueue = []; + this.connectivityStateWatchers = []; + this.configSelector = null; + /** + * This is the error from the name resolver if it failed most recently. It + * is only used to end calls that start while there is no config selector + * and the name resolver is in backoff, so it should be nulled if + * configSelector becomes set or the channel state becomes anything other + * than TRANSIENT_FAILURE. + */ + this.currentResolutionError = null; + this.wrappedSubchannels = new Set(); + this.callCount = 0; + this.idleTimer = null; + // Channelz info + this.channelzEnabled = true; + this.callTracker = new channelz_1.ChannelzCallTracker(); + this.childrenTracker = new channelz_1.ChannelzChildrenTracker(); + /** + * Randomly generated ID to be passed to the config selector, for use by + * ring_hash in xDS. An integer distributed approximately uniformly between + * 0 and MAX_SAFE_INTEGER. + */ + this.randomChannelId = Math.floor(Math.random() * Number.MAX_SAFE_INTEGER); + if (typeof target !== 'string') { + throw new TypeError('Channel target must be a string'); + } + if (!(credentials instanceof channel_credentials_1.ChannelCredentials)) { + throw new TypeError('Channel credentials must be a ChannelCredentials object'); + } + if (options) { + if (typeof options !== 'object') { + throw new TypeError('Channel options must be an object'); + } + } + this.originalTarget = target; + const originalTargetUri = (0, uri_parser_1.parseUri)(target); + if (originalTargetUri === null) { + throw new Error(`Could not parse target name "${target}"`); + } + /* This ensures that the target has a scheme that is registered with the + * resolver */ + const defaultSchemeMapResult = (0, resolver_1.mapUriDefaultScheme)(originalTargetUri); + if (defaultSchemeMapResult === null) { + throw new Error(`Could not find a default scheme for target name "${target}"`); + } + this.callRefTimer = setInterval(() => { }, MAX_TIMEOUT_TIME); + (_b = (_a = this.callRefTimer).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + if (this.options['grpc.enable_channelz'] === 0) { + this.channelzEnabled = false; + } + this.channelzTrace = new channelz_1.ChannelzTrace(); + this.channelzRef = (0, channelz_1.registerChannelzChannel)(target, () => this.getChannelzInfo(), this.channelzEnabled); + if (this.channelzEnabled) { + this.channelzTrace.addTrace('CT_INFO', 'Channel created'); + } + if (this.options['grpc.default_authority']) { + this.defaultAuthority = this.options['grpc.default_authority']; + } + else { + this.defaultAuthority = (0, resolver_1.getDefaultAuthority)(defaultSchemeMapResult); + } + const proxyMapResult = (0, http_proxy_1.mapProxyName)(defaultSchemeMapResult, options); + this.target = proxyMapResult.target; + this.options = Object.assign({}, this.options, proxyMapResult.extraOptions); + /* The global boolean parameter to getSubchannelPool has the inverse meaning to what + * the grpc.use_local_subchannel_pool channel option means. */ + this.subchannelPool = (0, subchannel_pool_1.getSubchannelPool)(((_c = options['grpc.use_local_subchannel_pool']) !== null && _c !== void 0 ? _c : 0) === 0); + this.retryBufferTracker = new retrying_call_1.MessageBufferTracker((_d = options['grpc.retry_buffer_size']) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_BUFFER_SIZE_BYTES, (_e = options['grpc.per_rpc_retry_buffer_size']) !== null && _e !== void 0 ? _e : DEFAULT_PER_RPC_RETRY_BUFFER_SIZE_BYTES); + this.keepaliveTime = (_f = options['grpc.keepalive_time_ms']) !== null && _f !== void 0 ? _f : -1; + this.idleTimeoutMs = Math.max((_g = options['grpc.client_idle_timeout_ms']) !== null && _g !== void 0 ? _g : DEFAULT_IDLE_TIMEOUT_MS, MIN_IDLE_TIMEOUT_MS); + const channelControlHelper = { + createSubchannel: (subchannelAddress, subchannelArgs, credentialsOverride) => { + const subchannel = this.subchannelPool.getOrCreateSubchannel(this.target, subchannelAddress, Object.assign({}, this.options, subchannelArgs), credentialsOverride !== null && credentialsOverride !== void 0 ? credentialsOverride : this.credentials); + subchannel.throttleKeepalive(this.keepaliveTime); + if (this.channelzEnabled) { + this.channelzTrace.addTrace('CT_INFO', 'Created subchannel or used existing subchannel', subchannel.getChannelzRef()); + } + const wrappedSubchannel = new ChannelSubchannelWrapper(subchannel, this); + this.wrappedSubchannels.add(wrappedSubchannel); + return wrappedSubchannel; + }, + updateState: (connectivityState, picker) => { + this.currentPicker = picker; + const queueCopy = this.pickQueue.slice(); + this.pickQueue = []; + if (queueCopy.length > 0) { + this.callRefTimerUnref(); + } + for (const call of queueCopy) { + call.doPick(); + } + this.updateState(connectivityState); + }, + requestReresolution: () => { + // This should never be called. + throw new Error('Resolving load balancer should never call requestReresolution'); + }, + addChannelzChild: (child) => { + if (this.channelzEnabled) { + this.childrenTracker.refChild(child); + } + }, + removeChannelzChild: (child) => { + if (this.channelzEnabled) { + this.childrenTracker.unrefChild(child); + } + }, + }; + this.resolvingLoadBalancer = new resolving_load_balancer_1.ResolvingLoadBalancer(this.target, channelControlHelper, credentials, options, (serviceConfig, configSelector) => { + if (serviceConfig.retryThrottling) { + RETRY_THROTTLER_MAP.set(this.getTarget(), new retrying_call_1.RetryThrottler(serviceConfig.retryThrottling.maxTokens, serviceConfig.retryThrottling.tokenRatio, RETRY_THROTTLER_MAP.get(this.getTarget()))); + } + else { + RETRY_THROTTLER_MAP.delete(this.getTarget()); + } + if (this.channelzEnabled) { + this.channelzTrace.addTrace('CT_INFO', 'Address resolution succeeded'); + } + this.configSelector = configSelector; + this.currentResolutionError = null; + /* We process the queue asynchronously to ensure that the corresponding + * load balancer update has completed. */ + process.nextTick(() => { + const localQueue = this.configSelectionQueue; + this.configSelectionQueue = []; + if (localQueue.length > 0) { + this.callRefTimerUnref(); + } + for (const call of localQueue) { + call.getConfig(); + } + }); + }, status => { + if (this.channelzEnabled) { + this.channelzTrace.addTrace('CT_WARNING', 'Address resolution failed with code ' + + status.code + + ' and details "' + + status.details + + '"'); + } + if (this.configSelectionQueue.length > 0) { + this.trace('Name resolution failed with calls queued for config selection'); + } + if (this.configSelector === null) { + this.currentResolutionError = Object.assign(Object.assign({}, (0, control_plane_status_1.restrictControlPlaneStatusCode)(status.code, status.details)), { metadata: status.metadata }); + } + const localQueue = this.configSelectionQueue; + this.configSelectionQueue = []; + if (localQueue.length > 0) { + this.callRefTimerUnref(); + } + for (const call of localQueue) { + call.reportResolverError(status); + } + }); + this.filterStackFactory = new filter_stack_1.FilterStackFactory([ + new compression_filter_1.CompressionFilterFactory(this, this.options), + ]); + this.trace('Channel constructed with options ' + + JSON.stringify(options, undefined, 2)); + const error = new Error(); + if ((0, logging_1.isTracerEnabled)('channel_stacktrace')) { + (0, logging_1.trace)(constants_1.LogVerbosity.DEBUG, 'channel_stacktrace', '(' + + this.channelzRef.id + + ') ' + + 'Channel constructed \n' + + ((_h = error.stack) === null || _h === void 0 ? void 0 : _h.substring(error.stack.indexOf('\n') + 1))); + } + this.lastActivityTimestamp = new Date(); + } + getChannelzInfo() { + return { + target: this.originalTarget, + state: this.connectivityState, + trace: this.channelzTrace, + callTracker: this.callTracker, + children: this.childrenTracker.getChildLists(), + }; + } + trace(text, verbosityOverride) { + (0, logging_1.trace)(verbosityOverride !== null && verbosityOverride !== void 0 ? verbosityOverride : constants_1.LogVerbosity.DEBUG, 'channel', '(' + this.channelzRef.id + ') ' + (0, uri_parser_1.uriToString)(this.target) + ' ' + text); + } + callRefTimerRef() { + var _a, _b, _c, _d; + // If the hasRef function does not exist, always run the code + if (!((_b = (_a = this.callRefTimer).hasRef) === null || _b === void 0 ? void 0 : _b.call(_a))) { + this.trace('callRefTimer.ref | configSelectionQueue.length=' + + this.configSelectionQueue.length + + ' pickQueue.length=' + + this.pickQueue.length); + (_d = (_c = this.callRefTimer).ref) === null || _d === void 0 ? void 0 : _d.call(_c); + } + } + callRefTimerUnref() { + var _a, _b; + // If the hasRef function does not exist, always run the code + if (!this.callRefTimer.hasRef || this.callRefTimer.hasRef()) { + this.trace('callRefTimer.unref | configSelectionQueue.length=' + + this.configSelectionQueue.length + + ' pickQueue.length=' + + this.pickQueue.length); + (_b = (_a = this.callRefTimer).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + } + removeConnectivityStateWatcher(watcherObject) { + const watcherIndex = this.connectivityStateWatchers.findIndex(value => value === watcherObject); + if (watcherIndex >= 0) { + this.connectivityStateWatchers.splice(watcherIndex, 1); + } + } + updateState(newState) { + (0, logging_1.trace)(constants_1.LogVerbosity.DEBUG, 'connectivity_state', '(' + + this.channelzRef.id + + ') ' + + (0, uri_parser_1.uriToString)(this.target) + + ' ' + + connectivity_state_1.ConnectivityState[this.connectivityState] + + ' -> ' + + connectivity_state_1.ConnectivityState[newState]); + if (this.channelzEnabled) { + this.channelzTrace.addTrace('CT_INFO', 'Connectivity state change to ' + connectivity_state_1.ConnectivityState[newState]); + } + this.connectivityState = newState; + const watchersCopy = this.connectivityStateWatchers.slice(); + for (const watcherObject of watchersCopy) { + if (newState !== watcherObject.currentState) { + if (watcherObject.timer) { + clearTimeout(watcherObject.timer); + } + this.removeConnectivityStateWatcher(watcherObject); + watcherObject.callback(); + } + } + if (newState !== connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE) { + this.currentResolutionError = null; + } + } + throttleKeepalive(newKeepaliveTime) { + if (newKeepaliveTime > this.keepaliveTime) { + this.keepaliveTime = newKeepaliveTime; + for (const wrappedSubchannel of this.wrappedSubchannels) { + wrappedSubchannel.throttleKeepalive(newKeepaliveTime); + } + } + } + removeWrappedSubchannel(wrappedSubchannel) { + this.wrappedSubchannels.delete(wrappedSubchannel); + } + doPick(metadata, extraPickInfo) { + return this.currentPicker.pick({ + metadata: metadata, + extraPickInfo: extraPickInfo, + }); + } + queueCallForPick(call) { + this.pickQueue.push(call); + this.callRefTimerRef(); + } + getConfig(method, metadata) { + if (this.connectivityState !== connectivity_state_1.ConnectivityState.SHUTDOWN) { + this.resolvingLoadBalancer.exitIdle(); + } + if (this.configSelector) { + return { + type: 'SUCCESS', + config: this.configSelector(method, metadata, this.randomChannelId), + }; + } + else { + if (this.currentResolutionError) { + return { + type: 'ERROR', + error: this.currentResolutionError, + }; + } + else { + return { + type: 'NONE', + }; + } + } + } + queueCallForConfig(call) { + this.configSelectionQueue.push(call); + this.callRefTimerRef(); + } + enterIdle() { + this.resolvingLoadBalancer.destroy(); + this.updateState(connectivity_state_1.ConnectivityState.IDLE); + this.currentPicker = new picker_1.QueuePicker(this.resolvingLoadBalancer); + if (this.idleTimer) { + clearTimeout(this.idleTimer); + this.idleTimer = null; + } + } + startIdleTimeout(timeoutMs) { + var _a, _b; + this.idleTimer = setTimeout(() => { + if (this.callCount > 0) { + /* If there is currently a call, the channel will not go idle for a + * period of at least idleTimeoutMs, so check again after that time. + */ + this.startIdleTimeout(this.idleTimeoutMs); + return; + } + const now = new Date(); + const timeSinceLastActivity = now.valueOf() - this.lastActivityTimestamp.valueOf(); + if (timeSinceLastActivity >= this.idleTimeoutMs) { + this.trace('Idle timer triggered after ' + + this.idleTimeoutMs + + 'ms of inactivity'); + this.enterIdle(); + } + else { + /* Whenever the timer fires with the latest activity being too recent, + * set the timer again for the time when the time since the last + * activity is equal to the timeout. This should result in the timer + * firing no more than once every idleTimeoutMs/2 on average. */ + this.startIdleTimeout(this.idleTimeoutMs - timeSinceLastActivity); + } + }, timeoutMs); + (_b = (_a = this.idleTimer).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + maybeStartIdleTimer() { + if (this.connectivityState !== connectivity_state_1.ConnectivityState.SHUTDOWN && + !this.idleTimer) { + this.startIdleTimeout(this.idleTimeoutMs); + } + } + onCallStart() { + if (this.channelzEnabled) { + this.callTracker.addCallStarted(); + } + this.callCount += 1; + } + onCallEnd(status) { + if (this.channelzEnabled) { + if (status.code === constants_1.Status.OK) { + this.callTracker.addCallSucceeded(); + } + else { + this.callTracker.addCallFailed(); + } + } + this.callCount -= 1; + this.lastActivityTimestamp = new Date(); + this.maybeStartIdleTimer(); + } + createLoadBalancingCall(callConfig, method, host, credentials, deadline) { + const callNumber = (0, call_number_1.getNextCallNumber)(); + this.trace('createLoadBalancingCall [' + callNumber + '] method="' + method + '"'); + return new load_balancing_call_1.LoadBalancingCall(this, callConfig, method, host, credentials, deadline, callNumber); + } + createRetryingCall(callConfig, method, host, credentials, deadline) { + const callNumber = (0, call_number_1.getNextCallNumber)(); + this.trace('createRetryingCall [' + callNumber + '] method="' + method + '"'); + return new retrying_call_1.RetryingCall(this, callConfig, method, host, credentials, deadline, callNumber, this.retryBufferTracker, RETRY_THROTTLER_MAP.get(this.getTarget())); + } + createInnerCall(callConfig, method, host, credentials, deadline) { + // Create a RetryingCall if retries are enabled + if (this.options['grpc.enable_retries'] === 0) { + return this.createLoadBalancingCall(callConfig, method, host, credentials, deadline); + } + else { + return this.createRetryingCall(callConfig, method, host, credentials, deadline); + } + } + createResolvingCall(method, deadline, host, parentCall, propagateFlags) { + const callNumber = (0, call_number_1.getNextCallNumber)(); + this.trace('createResolvingCall [' + + callNumber + + '] method="' + + method + + '", deadline=' + + (0, deadline_1.deadlineToString)(deadline)); + const finalOptions = { + deadline: deadline, + flags: propagateFlags !== null && propagateFlags !== void 0 ? propagateFlags : constants_1.Propagate.DEFAULTS, + host: host !== null && host !== void 0 ? host : this.defaultAuthority, + parentCall: parentCall, + }; + const call = new resolving_call_1.ResolvingCall(this, method, finalOptions, this.filterStackFactory.clone(), this.credentials._getCallCredentials(), callNumber); + this.onCallStart(); + call.addStatusWatcher(status => { + this.onCallEnd(status); + }); + return call; + } + close() { + this.resolvingLoadBalancer.destroy(); + this.updateState(connectivity_state_1.ConnectivityState.SHUTDOWN); + this.currentPicker = new ShutdownPicker(); + for (const call of this.configSelectionQueue) { + call.cancelWithStatus(constants_1.Status.UNAVAILABLE, 'Channel closed before call started'); + } + this.configSelectionQueue = []; + for (const call of this.pickQueue) { + call.cancelWithStatus(constants_1.Status.UNAVAILABLE, 'Channel closed before call started'); + } + this.pickQueue = []; + clearInterval(this.callRefTimer); + if (this.idleTimer) { + clearTimeout(this.idleTimer); + } + if (this.channelzEnabled) { + (0, channelz_1.unregisterChannelzRef)(this.channelzRef); + } + this.subchannelPool.unrefUnusedSubchannels(); + } + getTarget() { + return (0, uri_parser_1.uriToString)(this.target); + } + getConnectivityState(tryToConnect) { + const connectivityState = this.connectivityState; + if (tryToConnect) { + this.resolvingLoadBalancer.exitIdle(); + this.lastActivityTimestamp = new Date(); + this.maybeStartIdleTimer(); + } + return connectivityState; + } + watchConnectivityState(currentState, deadline, callback) { + if (this.connectivityState === connectivity_state_1.ConnectivityState.SHUTDOWN) { + throw new Error('Channel has been shut down'); + } + let timer = null; + if (deadline !== Infinity) { + const deadlineDate = deadline instanceof Date ? deadline : new Date(deadline); + const now = new Date(); + if (deadline === -Infinity || deadlineDate <= now) { + process.nextTick(callback, new Error('Deadline passed without connectivity state change')); + return; + } + timer = setTimeout(() => { + this.removeConnectivityStateWatcher(watcherObject); + callback(new Error('Deadline passed without connectivity state change')); + }, deadlineDate.getTime() - now.getTime()); + } + const watcherObject = { + currentState, + callback, + timer, + }; + this.connectivityStateWatchers.push(watcherObject); + } + /** + * Get the channelz reference object for this channel. The returned value is + * garbage if channelz is disabled for this channel. + * @returns + */ + getChannelzRef() { + return this.channelzRef; + } + createCall(method, deadline, host, parentCall, propagateFlags) { + if (typeof method !== 'string') { + throw new TypeError('Channel#createCall: method must be a string'); + } + if (!(typeof deadline === 'number' || deadline instanceof Date)) { + throw new TypeError('Channel#createCall: deadline must be a number or Date'); + } + if (this.connectivityState === connectivity_state_1.ConnectivityState.SHUTDOWN) { + throw new Error('Channel has been shut down'); + } + return this.createResolvingCall(method, deadline, host, parentCall, propagateFlags); + } + getOptions() { + return this.options; + } +} +exports.InternalChannel = InternalChannel; +//# sourceMappingURL=internal-channel.js.map + +/***/ }), + +/***/ 17559: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2020 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ChildLoadBalancerHandler = void 0; +const load_balancer_1 = __nccwpck_require__(52680); +const connectivity_state_1 = __nccwpck_require__(80878); +const TYPE_NAME = 'child_load_balancer_helper'; +class ChildLoadBalancerHandler { + constructor(channelControlHelper, credentials, options) { + this.channelControlHelper = channelControlHelper; + this.credentials = credentials; + this.options = options; + this.currentChild = null; + this.pendingChild = null; + this.latestConfig = null; + this.ChildPolicyHelper = class { + constructor(parent) { + this.parent = parent; + this.child = null; + } + createSubchannel(subchannelAddress, subchannelArgs, credentialsOverride) { + return this.parent.channelControlHelper.createSubchannel(subchannelAddress, subchannelArgs, credentialsOverride); + } + updateState(connectivityState, picker) { + var _a; + if (this.calledByPendingChild()) { + if (connectivityState === connectivity_state_1.ConnectivityState.CONNECTING) { + return; + } + (_a = this.parent.currentChild) === null || _a === void 0 ? void 0 : _a.destroy(); + this.parent.currentChild = this.parent.pendingChild; + this.parent.pendingChild = null; + } + else if (!this.calledByCurrentChild()) { + return; + } + this.parent.channelControlHelper.updateState(connectivityState, picker); + } + requestReresolution() { + var _a; + const latestChild = (_a = this.parent.pendingChild) !== null && _a !== void 0 ? _a : this.parent.currentChild; + if (this.child === latestChild) { + this.parent.channelControlHelper.requestReresolution(); + } + } + setChild(newChild) { + this.child = newChild; + } + addChannelzChild(child) { + this.parent.channelControlHelper.addChannelzChild(child); + } + removeChannelzChild(child) { + this.parent.channelControlHelper.removeChannelzChild(child); + } + calledByPendingChild() { + return this.child === this.parent.pendingChild; + } + calledByCurrentChild() { + return this.child === this.parent.currentChild; + } + }; + } + configUpdateRequiresNewPolicyInstance(oldConfig, newConfig) { + return oldConfig.getLoadBalancerName() !== newConfig.getLoadBalancerName(); + } + /** + * Prerequisites: lbConfig !== null and lbConfig.name is registered + * @param endpointList + * @param lbConfig + * @param attributes + */ + updateAddressList(endpointList, lbConfig, attributes) { + let childToUpdate; + if (this.currentChild === null || + this.latestConfig === null || + this.configUpdateRequiresNewPolicyInstance(this.latestConfig, lbConfig)) { + const newHelper = new this.ChildPolicyHelper(this); + const newChild = (0, load_balancer_1.createLoadBalancer)(lbConfig, newHelper, this.credentials, this.options); + newHelper.setChild(newChild); + if (this.currentChild === null) { + this.currentChild = newChild; + childToUpdate = this.currentChild; + } + else { + if (this.pendingChild) { + this.pendingChild.destroy(); + } + this.pendingChild = newChild; + childToUpdate = this.pendingChild; + } + } + else { + if (this.pendingChild === null) { + childToUpdate = this.currentChild; + } + else { + childToUpdate = this.pendingChild; + } + } + this.latestConfig = lbConfig; + childToUpdate.updateAddressList(endpointList, lbConfig, attributes); + } + exitIdle() { + if (this.currentChild) { + this.currentChild.exitIdle(); + if (this.pendingChild) { + this.pendingChild.exitIdle(); + } + } + } + resetBackoff() { + if (this.currentChild) { + this.currentChild.resetBackoff(); + if (this.pendingChild) { + this.pendingChild.resetBackoff(); + } + } + } + destroy() { + /* Note: state updates are only propagated from the child balancer if that + * object is equal to this.currentChild or this.pendingChild. Since this + * function sets both of those to null, no further state updates will + * occur after this function returns. */ + if (this.currentChild) { + this.currentChild.destroy(); + this.currentChild = null; + } + if (this.pendingChild) { + this.pendingChild.destroy(); + this.pendingChild = null; + } + } + getTypeName() { + return TYPE_NAME; + } +} +exports.ChildLoadBalancerHandler = ChildLoadBalancerHandler; +//# sourceMappingURL=load-balancer-child-handler.js.map + +/***/ }), + +/***/ 76828: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2022 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setup = exports.OutlierDetectionLoadBalancer = exports.OutlierDetectionLoadBalancingConfig = void 0; +const connectivity_state_1 = __nccwpck_require__(80878); +const constants_1 = __nccwpck_require__(90634); +const duration_1 = __nccwpck_require__(62668); +const experimental_1 = __nccwpck_require__(37626); +const load_balancer_1 = __nccwpck_require__(52680); +const load_balancer_child_handler_1 = __nccwpck_require__(17559); +const picker_1 = __nccwpck_require__(81611); +const subchannel_address_1 = __nccwpck_require__(78021); +const subchannel_interface_1 = __nccwpck_require__(12258); +const logging = __nccwpck_require__(35993); +const TRACER_NAME = 'outlier_detection'; +function trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, text); +} +const TYPE_NAME = 'outlier_detection'; +const OUTLIER_DETECTION_ENABLED = ((_a = process.env.GRPC_EXPERIMENTAL_ENABLE_OUTLIER_DETECTION) !== null && _a !== void 0 ? _a : 'true') === 'true'; +const defaultSuccessRateEjectionConfig = { + stdev_factor: 1900, + enforcement_percentage: 100, + minimum_hosts: 5, + request_volume: 100, +}; +const defaultFailurePercentageEjectionConfig = { + threshold: 85, + enforcement_percentage: 100, + minimum_hosts: 5, + request_volume: 50, +}; +function validateFieldType(obj, fieldName, expectedType, objectName) { + if (fieldName in obj && + obj[fieldName] !== undefined && + typeof obj[fieldName] !== expectedType) { + const fullFieldName = objectName ? `${objectName}.${fieldName}` : fieldName; + throw new Error(`outlier detection config ${fullFieldName} parse error: expected ${expectedType}, got ${typeof obj[fieldName]}`); + } +} +function validatePositiveDuration(obj, fieldName, objectName) { + const fullFieldName = objectName ? `${objectName}.${fieldName}` : fieldName; + if (fieldName in obj && obj[fieldName] !== undefined) { + if (!(0, duration_1.isDuration)(obj[fieldName])) { + throw new Error(`outlier detection config ${fullFieldName} parse error: expected Duration, got ${typeof obj[fieldName]}`); + } + if (!(obj[fieldName].seconds >= 0 && + obj[fieldName].seconds <= 315576000000 && + obj[fieldName].nanos >= 0 && + obj[fieldName].nanos <= 999999999)) { + throw new Error(`outlier detection config ${fullFieldName} parse error: values out of range for non-negative Duaration`); + } + } +} +function validatePercentage(obj, fieldName, objectName) { + const fullFieldName = objectName ? `${objectName}.${fieldName}` : fieldName; + validateFieldType(obj, fieldName, 'number', objectName); + if (fieldName in obj && + obj[fieldName] !== undefined && + !(obj[fieldName] >= 0 && obj[fieldName] <= 100)) { + throw new Error(`outlier detection config ${fullFieldName} parse error: value out of range for percentage (0-100)`); + } +} +class OutlierDetectionLoadBalancingConfig { + constructor(intervalMs, baseEjectionTimeMs, maxEjectionTimeMs, maxEjectionPercent, successRateEjection, failurePercentageEjection, childPolicy) { + this.childPolicy = childPolicy; + if (childPolicy.getLoadBalancerName() === 'pick_first') { + throw new Error('outlier_detection LB policy cannot have a pick_first child policy'); + } + this.intervalMs = intervalMs !== null && intervalMs !== void 0 ? intervalMs : 10000; + this.baseEjectionTimeMs = baseEjectionTimeMs !== null && baseEjectionTimeMs !== void 0 ? baseEjectionTimeMs : 30000; + this.maxEjectionTimeMs = maxEjectionTimeMs !== null && maxEjectionTimeMs !== void 0 ? maxEjectionTimeMs : 300000; + this.maxEjectionPercent = maxEjectionPercent !== null && maxEjectionPercent !== void 0 ? maxEjectionPercent : 10; + this.successRateEjection = successRateEjection + ? Object.assign(Object.assign({}, defaultSuccessRateEjectionConfig), successRateEjection) : null; + this.failurePercentageEjection = failurePercentageEjection + ? Object.assign(Object.assign({}, defaultFailurePercentageEjectionConfig), failurePercentageEjection) : null; + } + getLoadBalancerName() { + return TYPE_NAME; + } + toJsonObject() { + var _a, _b; + return { + outlier_detection: { + interval: (0, duration_1.msToDuration)(this.intervalMs), + base_ejection_time: (0, duration_1.msToDuration)(this.baseEjectionTimeMs), + max_ejection_time: (0, duration_1.msToDuration)(this.maxEjectionTimeMs), + max_ejection_percent: this.maxEjectionPercent, + success_rate_ejection: (_a = this.successRateEjection) !== null && _a !== void 0 ? _a : undefined, + failure_percentage_ejection: (_b = this.failurePercentageEjection) !== null && _b !== void 0 ? _b : undefined, + child_policy: [this.childPolicy.toJsonObject()], + }, + }; + } + getIntervalMs() { + return this.intervalMs; + } + getBaseEjectionTimeMs() { + return this.baseEjectionTimeMs; + } + getMaxEjectionTimeMs() { + return this.maxEjectionTimeMs; + } + getMaxEjectionPercent() { + return this.maxEjectionPercent; + } + getSuccessRateEjectionConfig() { + return this.successRateEjection; + } + getFailurePercentageEjectionConfig() { + return this.failurePercentageEjection; + } + getChildPolicy() { + return this.childPolicy; + } + static createFromJson(obj) { + var _a; + validatePositiveDuration(obj, 'interval'); + validatePositiveDuration(obj, 'base_ejection_time'); + validatePositiveDuration(obj, 'max_ejection_time'); + validatePercentage(obj, 'max_ejection_percent'); + if ('success_rate_ejection' in obj && + obj.success_rate_ejection !== undefined) { + if (typeof obj.success_rate_ejection !== 'object') { + throw new Error('outlier detection config success_rate_ejection must be an object'); + } + validateFieldType(obj.success_rate_ejection, 'stdev_factor', 'number', 'success_rate_ejection'); + validatePercentage(obj.success_rate_ejection, 'enforcement_percentage', 'success_rate_ejection'); + validateFieldType(obj.success_rate_ejection, 'minimum_hosts', 'number', 'success_rate_ejection'); + validateFieldType(obj.success_rate_ejection, 'request_volume', 'number', 'success_rate_ejection'); + } + if ('failure_percentage_ejection' in obj && + obj.failure_percentage_ejection !== undefined) { + if (typeof obj.failure_percentage_ejection !== 'object') { + throw new Error('outlier detection config failure_percentage_ejection must be an object'); + } + validatePercentage(obj.failure_percentage_ejection, 'threshold', 'failure_percentage_ejection'); + validatePercentage(obj.failure_percentage_ejection, 'enforcement_percentage', 'failure_percentage_ejection'); + validateFieldType(obj.failure_percentage_ejection, 'minimum_hosts', 'number', 'failure_percentage_ejection'); + validateFieldType(obj.failure_percentage_ejection, 'request_volume', 'number', 'failure_percentage_ejection'); + } + if (!('child_policy' in obj) || !Array.isArray(obj.child_policy)) { + throw new Error('outlier detection config child_policy must be an array'); + } + const childPolicy = (0, load_balancer_1.selectLbConfigFromList)(obj.child_policy); + if (!childPolicy) { + throw new Error('outlier detection config child_policy: no valid recognized policy found'); + } + return new OutlierDetectionLoadBalancingConfig(obj.interval ? (0, duration_1.durationToMs)(obj.interval) : null, obj.base_ejection_time ? (0, duration_1.durationToMs)(obj.base_ejection_time) : null, obj.max_ejection_time ? (0, duration_1.durationToMs)(obj.max_ejection_time) : null, (_a = obj.max_ejection_percent) !== null && _a !== void 0 ? _a : null, obj.success_rate_ejection, obj.failure_percentage_ejection, childPolicy); + } +} +exports.OutlierDetectionLoadBalancingConfig = OutlierDetectionLoadBalancingConfig; +class OutlierDetectionSubchannelWrapper extends subchannel_interface_1.BaseSubchannelWrapper { + constructor(childSubchannel, mapEntry) { + super(childSubchannel); + this.mapEntry = mapEntry; + this.refCount = 0; + } + ref() { + this.child.ref(); + this.refCount += 1; + } + unref() { + this.child.unref(); + this.refCount -= 1; + if (this.refCount <= 0) { + if (this.mapEntry) { + const index = this.mapEntry.subchannelWrappers.indexOf(this); + if (index >= 0) { + this.mapEntry.subchannelWrappers.splice(index, 1); + } + } + } + } + eject() { + this.setHealthy(false); + } + uneject() { + this.setHealthy(true); + } + getMapEntry() { + return this.mapEntry; + } + getWrappedSubchannel() { + return this.child; + } +} +function createEmptyBucket() { + return { + success: 0, + failure: 0, + }; +} +class CallCounter { + constructor() { + this.activeBucket = createEmptyBucket(); + this.inactiveBucket = createEmptyBucket(); + } + addSuccess() { + this.activeBucket.success += 1; + } + addFailure() { + this.activeBucket.failure += 1; + } + switchBuckets() { + this.inactiveBucket = this.activeBucket; + this.activeBucket = createEmptyBucket(); + } + getLastSuccesses() { + return this.inactiveBucket.success; + } + getLastFailures() { + return this.inactiveBucket.failure; + } +} +class OutlierDetectionPicker { + constructor(wrappedPicker, countCalls) { + this.wrappedPicker = wrappedPicker; + this.countCalls = countCalls; + } + pick(pickArgs) { + const wrappedPick = this.wrappedPicker.pick(pickArgs); + if (wrappedPick.pickResultType === picker_1.PickResultType.COMPLETE) { + const subchannelWrapper = wrappedPick.subchannel; + const mapEntry = subchannelWrapper.getMapEntry(); + if (mapEntry) { + let onCallEnded = wrappedPick.onCallEnded; + if (this.countCalls) { + onCallEnded = statusCode => { + var _a; + if (statusCode === constants_1.Status.OK) { + mapEntry.counter.addSuccess(); + } + else { + mapEntry.counter.addFailure(); + } + (_a = wrappedPick.onCallEnded) === null || _a === void 0 ? void 0 : _a.call(wrappedPick, statusCode); + }; + } + return Object.assign(Object.assign({}, wrappedPick), { subchannel: subchannelWrapper.getWrappedSubchannel(), onCallEnded: onCallEnded }); + } + else { + return Object.assign(Object.assign({}, wrappedPick), { subchannel: subchannelWrapper.getWrappedSubchannel() }); + } + } + else { + return wrappedPick; + } + } +} +class OutlierDetectionLoadBalancer { + constructor(channelControlHelper, credentials, options) { + this.entryMap = new subchannel_address_1.EndpointMap(); + this.latestConfig = null; + this.timerStartTime = null; + this.childBalancer = new load_balancer_child_handler_1.ChildLoadBalancerHandler((0, experimental_1.createChildChannelControlHelper)(channelControlHelper, { + createSubchannel: (subchannelAddress, subchannelArgs, credentialsOverride) => { + const originalSubchannel = channelControlHelper.createSubchannel(subchannelAddress, subchannelArgs, credentialsOverride); + const mapEntry = this.entryMap.getForSubchannelAddress(subchannelAddress); + const subchannelWrapper = new OutlierDetectionSubchannelWrapper(originalSubchannel, mapEntry); + if ((mapEntry === null || mapEntry === void 0 ? void 0 : mapEntry.currentEjectionTimestamp) !== null) { + // If the address is ejected, propagate that to the new subchannel wrapper + subchannelWrapper.eject(); + } + mapEntry === null || mapEntry === void 0 ? void 0 : mapEntry.subchannelWrappers.push(subchannelWrapper); + return subchannelWrapper; + }, + updateState: (connectivityState, picker) => { + if (connectivityState === connectivity_state_1.ConnectivityState.READY) { + channelControlHelper.updateState(connectivityState, new OutlierDetectionPicker(picker, this.isCountingEnabled())); + } + else { + channelControlHelper.updateState(connectivityState, picker); + } + }, + }), credentials, options); + this.ejectionTimer = setInterval(() => { }, 0); + clearInterval(this.ejectionTimer); + } + isCountingEnabled() { + return (this.latestConfig !== null && + (this.latestConfig.getSuccessRateEjectionConfig() !== null || + this.latestConfig.getFailurePercentageEjectionConfig() !== null)); + } + getCurrentEjectionPercent() { + let ejectionCount = 0; + for (const mapEntry of this.entryMap.values()) { + if (mapEntry.currentEjectionTimestamp !== null) { + ejectionCount += 1; + } + } + return (ejectionCount * 100) / this.entryMap.size; + } + runSuccessRateCheck(ejectionTimestamp) { + if (!this.latestConfig) { + return; + } + const successRateConfig = this.latestConfig.getSuccessRateEjectionConfig(); + if (!successRateConfig) { + return; + } + trace('Running success rate check'); + // Step 1 + const targetRequestVolume = successRateConfig.request_volume; + let addresesWithTargetVolume = 0; + const successRates = []; + for (const [endpoint, mapEntry] of this.entryMap.entries()) { + const successes = mapEntry.counter.getLastSuccesses(); + const failures = mapEntry.counter.getLastFailures(); + trace('Stats for ' + + (0, subchannel_address_1.endpointToString)(endpoint) + + ': successes=' + + successes + + ' failures=' + + failures + + ' targetRequestVolume=' + + targetRequestVolume); + if (successes + failures >= targetRequestVolume) { + addresesWithTargetVolume += 1; + successRates.push(successes / (successes + failures)); + } + } + trace('Found ' + + addresesWithTargetVolume + + ' success rate candidates; currentEjectionPercent=' + + this.getCurrentEjectionPercent() + + ' successRates=[' + + successRates + + ']'); + if (addresesWithTargetVolume < successRateConfig.minimum_hosts) { + return; + } + // Step 2 + const successRateMean = successRates.reduce((a, b) => a + b) / successRates.length; + let successRateDeviationSum = 0; + for (const rate of successRates) { + const deviation = rate - successRateMean; + successRateDeviationSum += deviation * deviation; + } + const successRateVariance = successRateDeviationSum / successRates.length; + const successRateStdev = Math.sqrt(successRateVariance); + const ejectionThreshold = successRateMean - + successRateStdev * (successRateConfig.stdev_factor / 1000); + trace('stdev=' + successRateStdev + ' ejectionThreshold=' + ejectionThreshold); + // Step 3 + for (const [address, mapEntry] of this.entryMap.entries()) { + // Step 3.i + if (this.getCurrentEjectionPercent() >= + this.latestConfig.getMaxEjectionPercent()) { + break; + } + // Step 3.ii + const successes = mapEntry.counter.getLastSuccesses(); + const failures = mapEntry.counter.getLastFailures(); + if (successes + failures < targetRequestVolume) { + continue; + } + // Step 3.iii + const successRate = successes / (successes + failures); + trace('Checking candidate ' + address + ' successRate=' + successRate); + if (successRate < ejectionThreshold) { + const randomNumber = Math.random() * 100; + trace('Candidate ' + + address + + ' randomNumber=' + + randomNumber + + ' enforcement_percentage=' + + successRateConfig.enforcement_percentage); + if (randomNumber < successRateConfig.enforcement_percentage) { + trace('Ejecting candidate ' + address); + this.eject(mapEntry, ejectionTimestamp); + } + } + } + } + runFailurePercentageCheck(ejectionTimestamp) { + if (!this.latestConfig) { + return; + } + const failurePercentageConfig = this.latestConfig.getFailurePercentageEjectionConfig(); + if (!failurePercentageConfig) { + return; + } + trace('Running failure percentage check. threshold=' + + failurePercentageConfig.threshold + + ' request volume threshold=' + + failurePercentageConfig.request_volume); + // Step 1 + let addressesWithTargetVolume = 0; + for (const mapEntry of this.entryMap.values()) { + const successes = mapEntry.counter.getLastSuccesses(); + const failures = mapEntry.counter.getLastFailures(); + if (successes + failures >= failurePercentageConfig.request_volume) { + addressesWithTargetVolume += 1; + } + } + if (addressesWithTargetVolume < failurePercentageConfig.minimum_hosts) { + return; + } + // Step 2 + for (const [address, mapEntry] of this.entryMap.entries()) { + // Step 2.i + if (this.getCurrentEjectionPercent() >= + this.latestConfig.getMaxEjectionPercent()) { + break; + } + // Step 2.ii + const successes = mapEntry.counter.getLastSuccesses(); + const failures = mapEntry.counter.getLastFailures(); + trace('Candidate successes=' + successes + ' failures=' + failures); + if (successes + failures < failurePercentageConfig.request_volume) { + continue; + } + // Step 2.iii + const failurePercentage = (failures * 100) / (failures + successes); + if (failurePercentage > failurePercentageConfig.threshold) { + const randomNumber = Math.random() * 100; + trace('Candidate ' + + address + + ' randomNumber=' + + randomNumber + + ' enforcement_percentage=' + + failurePercentageConfig.enforcement_percentage); + if (randomNumber < failurePercentageConfig.enforcement_percentage) { + trace('Ejecting candidate ' + address); + this.eject(mapEntry, ejectionTimestamp); + } + } + } + } + eject(mapEntry, ejectionTimestamp) { + mapEntry.currentEjectionTimestamp = new Date(); + mapEntry.ejectionTimeMultiplier += 1; + for (const subchannelWrapper of mapEntry.subchannelWrappers) { + subchannelWrapper.eject(); + } + } + uneject(mapEntry) { + mapEntry.currentEjectionTimestamp = null; + for (const subchannelWrapper of mapEntry.subchannelWrappers) { + subchannelWrapper.uneject(); + } + } + switchAllBuckets() { + for (const mapEntry of this.entryMap.values()) { + mapEntry.counter.switchBuckets(); + } + } + startTimer(delayMs) { + var _a, _b; + this.ejectionTimer = setTimeout(() => this.runChecks(), delayMs); + (_b = (_a = this.ejectionTimer).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + runChecks() { + const ejectionTimestamp = new Date(); + trace('Ejection timer running'); + this.switchAllBuckets(); + if (!this.latestConfig) { + return; + } + this.timerStartTime = ejectionTimestamp; + this.startTimer(this.latestConfig.getIntervalMs()); + this.runSuccessRateCheck(ejectionTimestamp); + this.runFailurePercentageCheck(ejectionTimestamp); + for (const [address, mapEntry] of this.entryMap.entries()) { + if (mapEntry.currentEjectionTimestamp === null) { + if (mapEntry.ejectionTimeMultiplier > 0) { + mapEntry.ejectionTimeMultiplier -= 1; + } + } + else { + const baseEjectionTimeMs = this.latestConfig.getBaseEjectionTimeMs(); + const maxEjectionTimeMs = this.latestConfig.getMaxEjectionTimeMs(); + const returnTime = new Date(mapEntry.currentEjectionTimestamp.getTime()); + returnTime.setMilliseconds(returnTime.getMilliseconds() + + Math.min(baseEjectionTimeMs * mapEntry.ejectionTimeMultiplier, Math.max(baseEjectionTimeMs, maxEjectionTimeMs))); + if (returnTime < new Date()) { + trace('Unejecting ' + address); + this.uneject(mapEntry); + } + } + } + } + updateAddressList(endpointList, lbConfig, attributes) { + if (!(lbConfig instanceof OutlierDetectionLoadBalancingConfig)) { + return; + } + for (const endpoint of endpointList) { + if (!this.entryMap.has(endpoint)) { + trace('Adding map entry for ' + (0, subchannel_address_1.endpointToString)(endpoint)); + this.entryMap.set(endpoint, { + counter: new CallCounter(), + currentEjectionTimestamp: null, + ejectionTimeMultiplier: 0, + subchannelWrappers: [], + }); + } + } + this.entryMap.deleteMissing(endpointList); + const childPolicy = lbConfig.getChildPolicy(); + this.childBalancer.updateAddressList(endpointList, childPolicy, attributes); + if (lbConfig.getSuccessRateEjectionConfig() || + lbConfig.getFailurePercentageEjectionConfig()) { + if (this.timerStartTime) { + trace('Previous timer existed. Replacing timer'); + clearTimeout(this.ejectionTimer); + const remainingDelay = lbConfig.getIntervalMs() - + (new Date().getTime() - this.timerStartTime.getTime()); + this.startTimer(remainingDelay); + } + else { + trace('Starting new timer'); + this.timerStartTime = new Date(); + this.startTimer(lbConfig.getIntervalMs()); + this.switchAllBuckets(); + } + } + else { + trace('Counting disabled. Cancelling timer.'); + this.timerStartTime = null; + clearTimeout(this.ejectionTimer); + for (const mapEntry of this.entryMap.values()) { + this.uneject(mapEntry); + mapEntry.ejectionTimeMultiplier = 0; + } + } + this.latestConfig = lbConfig; + } + exitIdle() { + this.childBalancer.exitIdle(); + } + resetBackoff() { + this.childBalancer.resetBackoff(); + } + destroy() { + clearTimeout(this.ejectionTimer); + this.childBalancer.destroy(); + } + getTypeName() { + return TYPE_NAME; + } +} +exports.OutlierDetectionLoadBalancer = OutlierDetectionLoadBalancer; +function setup() { + if (OUTLIER_DETECTION_ENABLED) { + (0, experimental_1.registerLoadBalancerType)(TYPE_NAME, OutlierDetectionLoadBalancer, OutlierDetectionLoadBalancingConfig); + } +} +exports.setup = setup; +//# sourceMappingURL=load-balancer-outlier-detection.js.map + +/***/ }), + +/***/ 38977: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setup = exports.LeafLoadBalancer = exports.PickFirstLoadBalancer = exports.shuffled = exports.PickFirstLoadBalancingConfig = void 0; +const load_balancer_1 = __nccwpck_require__(52680); +const connectivity_state_1 = __nccwpck_require__(80878); +const picker_1 = __nccwpck_require__(81611); +const subchannel_address_1 = __nccwpck_require__(78021); +const logging = __nccwpck_require__(35993); +const constants_1 = __nccwpck_require__(90634); +const subchannel_address_2 = __nccwpck_require__(78021); +const net_1 = __nccwpck_require__(41808); +const TRACER_NAME = 'pick_first'; +function trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, text); +} +const TYPE_NAME = 'pick_first'; +/** + * Delay after starting a connection on a subchannel before starting a + * connection on the next subchannel in the list, for Happy Eyeballs algorithm. + */ +const CONNECTION_DELAY_INTERVAL_MS = 250; +class PickFirstLoadBalancingConfig { + constructor(shuffleAddressList) { + this.shuffleAddressList = shuffleAddressList; + } + getLoadBalancerName() { + return TYPE_NAME; + } + toJsonObject() { + return { + [TYPE_NAME]: { + shuffleAddressList: this.shuffleAddressList, + }, + }; + } + getShuffleAddressList() { + return this.shuffleAddressList; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + static createFromJson(obj) { + if ('shuffleAddressList' in obj && + !(typeof obj.shuffleAddressList === 'boolean')) { + throw new Error('pick_first config field shuffleAddressList must be a boolean if provided'); + } + return new PickFirstLoadBalancingConfig(obj.shuffleAddressList === true); + } +} +exports.PickFirstLoadBalancingConfig = PickFirstLoadBalancingConfig; +/** + * Picker for a `PickFirstLoadBalancer` in the READY state. Always returns the + * picked subchannel. + */ +class PickFirstPicker { + constructor(subchannel) { + this.subchannel = subchannel; + } + pick(pickArgs) { + return { + pickResultType: picker_1.PickResultType.COMPLETE, + subchannel: this.subchannel, + status: null, + onCallStarted: null, + onCallEnded: null, + }; + } +} +/** + * Return a new array with the elements of the input array in a random order + * @param list The input array + * @returns A shuffled array of the elements of list + */ +function shuffled(list) { + const result = list.slice(); + for (let i = result.length - 1; i > 1; i--) { + const j = Math.floor(Math.random() * (i + 1)); + const temp = result[i]; + result[i] = result[j]; + result[j] = temp; + } + return result; +} +exports.shuffled = shuffled; +/** + * Interleave addresses in addressList by family in accordance with RFC-8304 section 4 + * @param addressList + * @returns + */ +function interleaveAddressFamilies(addressList) { + const result = []; + const ipv6Addresses = []; + const ipv4Addresses = []; + const ipv6First = (0, subchannel_address_2.isTcpSubchannelAddress)(addressList[0]) && (0, net_1.isIPv6)(addressList[0].host); + for (const address of addressList) { + if ((0, subchannel_address_2.isTcpSubchannelAddress)(address) && (0, net_1.isIPv6)(address.host)) { + ipv6Addresses.push(address); + } + else { + ipv4Addresses.push(address); + } + } + const firstList = ipv6First ? ipv6Addresses : ipv4Addresses; + const secondList = ipv6First ? ipv4Addresses : ipv6Addresses; + for (let i = 0; i < Math.max(firstList.length, secondList.length); i++) { + if (i < firstList.length) { + result.push(firstList[i]); + } + if (i < secondList.length) { + result.push(secondList[i]); + } + } + return result; +} +const REPORT_HEALTH_STATUS_OPTION_NAME = 'grpc-node.internal.pick-first.report_health_status'; +class PickFirstLoadBalancer { + /** + * Load balancer that attempts to connect to each backend in the address list + * in order, and picks the first one that connects, using it for every + * request. + * @param channelControlHelper `ChannelControlHelper` instance provided by + * this load balancer's owner. + */ + constructor(channelControlHelper, credentials, options) { + this.channelControlHelper = channelControlHelper; + /** + * The list of subchannels this load balancer is currently attempting to + * connect to. + */ + this.children = []; + /** + * The current connectivity state of the load balancer. + */ + this.currentState = connectivity_state_1.ConnectivityState.IDLE; + /** + * The index within the `subchannels` array of the subchannel with the most + * recently started connection attempt. + */ + this.currentSubchannelIndex = 0; + /** + * The currently picked subchannel used for making calls. Populated if + * and only if the load balancer's current state is READY. In that case, + * the subchannel's current state is also READY. + */ + this.currentPick = null; + /** + * Listener callback attached to each subchannel in the `subchannels` list + * while establishing a connection. + */ + this.subchannelStateListener = (subchannel, previousState, newState, keepaliveTime, errorMessage) => { + this.onSubchannelStateUpdate(subchannel, previousState, newState, errorMessage); + }; + this.pickedSubchannelHealthListener = () => this.calculateAndReportNewState(); + /** + * The LB policy enters sticky TRANSIENT_FAILURE mode when all + * subchannels have failed to connect at least once, and it stays in that + * mode until a connection attempt is successful. While in sticky TF mode, + * the LB policy continuously attempts to connect to all of its subchannels. + */ + this.stickyTransientFailureMode = false; + /** + * The most recent error reported by any subchannel as it transitioned to + * TRANSIENT_FAILURE. + */ + this.lastError = null; + this.latestAddressList = null; + this.connectionDelayTimeout = setTimeout(() => { }, 0); + clearTimeout(this.connectionDelayTimeout); + this.reportHealthStatus = options[REPORT_HEALTH_STATUS_OPTION_NAME]; + } + allChildrenHaveReportedTF() { + return this.children.every(child => child.hasReportedTransientFailure); + } + resetChildrenReportedTF() { + this.children.every(child => child.hasReportedTransientFailure = false); + } + calculateAndReportNewState() { + if (this.currentPick) { + if (this.reportHealthStatus && !this.currentPick.isHealthy()) { + this.updateState(connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE, new picker_1.UnavailablePicker({ + details: `Picked subchannel ${this.currentPick.getAddress()} is unhealthy`, + })); + } + else { + this.updateState(connectivity_state_1.ConnectivityState.READY, new PickFirstPicker(this.currentPick)); + } + } + else if (this.children.length === 0) { + this.updateState(connectivity_state_1.ConnectivityState.IDLE, new picker_1.QueuePicker(this)); + } + else { + if (this.stickyTransientFailureMode) { + this.updateState(connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE, new picker_1.UnavailablePicker({ + details: `No connection established. Last error: ${this.lastError}`, + })); + } + else { + this.updateState(connectivity_state_1.ConnectivityState.CONNECTING, new picker_1.QueuePicker(this)); + } + } + } + requestReresolution() { + this.channelControlHelper.requestReresolution(); + } + maybeEnterStickyTransientFailureMode() { + if (!this.allChildrenHaveReportedTF()) { + return; + } + this.requestReresolution(); + this.resetChildrenReportedTF(); + if (this.stickyTransientFailureMode) { + this.calculateAndReportNewState(); + return; + } + this.stickyTransientFailureMode = true; + for (const { subchannel } of this.children) { + subchannel.startConnecting(); + } + this.calculateAndReportNewState(); + } + removeCurrentPick() { + if (this.currentPick !== null) { + this.currentPick.removeConnectivityStateListener(this.subchannelStateListener); + this.channelControlHelper.removeChannelzChild(this.currentPick.getChannelzRef()); + this.currentPick.removeHealthStateWatcher(this.pickedSubchannelHealthListener); + // Unref last, to avoid triggering listeners + this.currentPick.unref(); + this.currentPick = null; + } + } + onSubchannelStateUpdate(subchannel, previousState, newState, errorMessage) { + var _a; + if ((_a = this.currentPick) === null || _a === void 0 ? void 0 : _a.realSubchannelEquals(subchannel)) { + if (newState !== connectivity_state_1.ConnectivityState.READY) { + this.removeCurrentPick(); + this.calculateAndReportNewState(); + } + return; + } + for (const [index, child] of this.children.entries()) { + if (subchannel.realSubchannelEquals(child.subchannel)) { + if (newState === connectivity_state_1.ConnectivityState.READY) { + this.pickSubchannel(child.subchannel); + } + if (newState === connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE) { + child.hasReportedTransientFailure = true; + if (errorMessage) { + this.lastError = errorMessage; + } + this.maybeEnterStickyTransientFailureMode(); + if (index === this.currentSubchannelIndex) { + this.startNextSubchannelConnecting(index + 1); + } + } + child.subchannel.startConnecting(); + return; + } + } + } + startNextSubchannelConnecting(startIndex) { + clearTimeout(this.connectionDelayTimeout); + for (const [index, child] of this.children.entries()) { + if (index >= startIndex) { + const subchannelState = child.subchannel.getConnectivityState(); + if (subchannelState === connectivity_state_1.ConnectivityState.IDLE || + subchannelState === connectivity_state_1.ConnectivityState.CONNECTING) { + this.startConnecting(index); + return; + } + } + } + this.maybeEnterStickyTransientFailureMode(); + } + /** + * Have a single subchannel in the `subchannels` list start connecting. + * @param subchannelIndex The index into the `subchannels` list. + */ + startConnecting(subchannelIndex) { + var _a, _b; + clearTimeout(this.connectionDelayTimeout); + this.currentSubchannelIndex = subchannelIndex; + if (this.children[subchannelIndex].subchannel.getConnectivityState() === + connectivity_state_1.ConnectivityState.IDLE) { + trace('Start connecting to subchannel with address ' + + this.children[subchannelIndex].subchannel.getAddress()); + process.nextTick(() => { + var _a; + (_a = this.children[subchannelIndex]) === null || _a === void 0 ? void 0 : _a.subchannel.startConnecting(); + }); + } + this.connectionDelayTimeout = setTimeout(() => { + this.startNextSubchannelConnecting(subchannelIndex + 1); + }, CONNECTION_DELAY_INTERVAL_MS); + (_b = (_a = this.connectionDelayTimeout).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + /** + * Declare that the specified subchannel should be used to make requests. + * This functions the same independent of whether subchannel is a member of + * this.children and whether it is equal to this.currentPick. + * Prerequisite: subchannel.getConnectivityState() === READY. + * @param subchannel + */ + pickSubchannel(subchannel) { + trace('Pick subchannel with address ' + subchannel.getAddress()); + this.stickyTransientFailureMode = false; + /* Ref before removeCurrentPick and resetSubchannelList to avoid the + * refcount dropping to 0 during this process. */ + subchannel.ref(); + this.channelControlHelper.addChannelzChild(subchannel.getChannelzRef()); + this.removeCurrentPick(); + this.resetSubchannelList(); + subchannel.addConnectivityStateListener(this.subchannelStateListener); + subchannel.addHealthStateWatcher(this.pickedSubchannelHealthListener); + this.currentPick = subchannel; + clearTimeout(this.connectionDelayTimeout); + this.calculateAndReportNewState(); + } + updateState(newState, picker) { + trace(connectivity_state_1.ConnectivityState[this.currentState] + + ' -> ' + + connectivity_state_1.ConnectivityState[newState]); + this.currentState = newState; + this.channelControlHelper.updateState(newState, picker); + } + resetSubchannelList() { + for (const child of this.children) { + /* Always remoev the connectivity state listener. If the subchannel is + getting picked, it will be re-added then. */ + child.subchannel.removeConnectivityStateListener(this.subchannelStateListener); + /* Refs are counted independently for the children list and the + * currentPick, so we call unref whether or not the child is the + * currentPick. Channelz child references are also refcounted, so + * removeChannelzChild can be handled the same way. */ + child.subchannel.unref(); + this.channelControlHelper.removeChannelzChild(child.subchannel.getChannelzRef()); + } + this.currentSubchannelIndex = 0; + this.children = []; + } + connectToAddressList(addressList) { + trace('connectToAddressList([' + addressList.map(address => (0, subchannel_address_1.subchannelAddressToString)(address)) + '])'); + const newChildrenList = addressList.map(address => ({ + subchannel: this.channelControlHelper.createSubchannel(address, {}, null), + hasReportedTransientFailure: false, + })); + for (const { subchannel } of newChildrenList) { + if (subchannel.getConnectivityState() === connectivity_state_1.ConnectivityState.READY) { + this.pickSubchannel(subchannel); + return; + } + } + /* Ref each subchannel before resetting the list, to ensure that + * subchannels shared between the list don't drop to 0 refs during the + * transition. */ + for (const { subchannel } of newChildrenList) { + subchannel.ref(); + this.channelControlHelper.addChannelzChild(subchannel.getChannelzRef()); + } + this.resetSubchannelList(); + this.children = newChildrenList; + for (const { subchannel } of this.children) { + subchannel.addConnectivityStateListener(this.subchannelStateListener); + } + for (const child of this.children) { + if (child.subchannel.getConnectivityState() === + connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE) { + child.hasReportedTransientFailure = true; + } + } + this.startNextSubchannelConnecting(0); + this.calculateAndReportNewState(); + } + updateAddressList(endpointList, lbConfig) { + if (!(lbConfig instanceof PickFirstLoadBalancingConfig)) { + return; + } + /* Previously, an update would be discarded if it was identical to the + * previous update, to minimize churn. Now the DNS resolver is + * rate-limited, so that is less of a concern. */ + if (lbConfig.getShuffleAddressList()) { + endpointList = shuffled(endpointList); + } + const rawAddressList = [].concat(...endpointList.map(endpoint => endpoint.addresses)); + trace('updateAddressList([' + rawAddressList.map(address => (0, subchannel_address_1.subchannelAddressToString)(address)) + '])'); + if (rawAddressList.length === 0) { + throw new Error('No addresses in endpoint list passed to pick_first'); + } + const addressList = interleaveAddressFamilies(rawAddressList); + this.latestAddressList = addressList; + this.connectToAddressList(addressList); + } + exitIdle() { + if (this.currentState === connectivity_state_1.ConnectivityState.IDLE && + this.latestAddressList) { + this.connectToAddressList(this.latestAddressList); + } + } + resetBackoff() { + /* The pick first load balancer does not have a connection backoff, so this + * does nothing */ + } + destroy() { + this.resetSubchannelList(); + this.removeCurrentPick(); + } + getTypeName() { + return TYPE_NAME; + } +} +exports.PickFirstLoadBalancer = PickFirstLoadBalancer; +const LEAF_CONFIG = new PickFirstLoadBalancingConfig(false); +/** + * This class handles the leaf load balancing operations for a single endpoint. + * It is a thin wrapper around a PickFirstLoadBalancer with a different API + * that more closely reflects how it will be used as a leaf balancer. + */ +class LeafLoadBalancer { + constructor(endpoint, channelControlHelper, credentials, options) { + this.endpoint = endpoint; + this.latestState = connectivity_state_1.ConnectivityState.IDLE; + const childChannelControlHelper = (0, load_balancer_1.createChildChannelControlHelper)(channelControlHelper, { + updateState: (connectivityState, picker) => { + this.latestState = connectivityState; + this.latestPicker = picker; + channelControlHelper.updateState(connectivityState, picker); + }, + }); + this.pickFirstBalancer = new PickFirstLoadBalancer(childChannelControlHelper, credentials, Object.assign(Object.assign({}, options), { [REPORT_HEALTH_STATUS_OPTION_NAME]: true })); + this.latestPicker = new picker_1.QueuePicker(this.pickFirstBalancer); + } + startConnecting() { + this.pickFirstBalancer.updateAddressList([this.endpoint], LEAF_CONFIG); + } + /** + * Update the endpoint associated with this LeafLoadBalancer to a new + * endpoint. Does not trigger connection establishment if a connection + * attempt is not already in progress. + * @param newEndpoint + */ + updateEndpoint(newEndpoint) { + this.endpoint = newEndpoint; + if (this.latestState !== connectivity_state_1.ConnectivityState.IDLE) { + this.startConnecting(); + } + } + getConnectivityState() { + return this.latestState; + } + getPicker() { + return this.latestPicker; + } + getEndpoint() { + return this.endpoint; + } + exitIdle() { + this.pickFirstBalancer.exitIdle(); + } + destroy() { + this.pickFirstBalancer.destroy(); + } +} +exports.LeafLoadBalancer = LeafLoadBalancer; +function setup() { + (0, load_balancer_1.registerLoadBalancerType)(TYPE_NAME, PickFirstLoadBalancer, PickFirstLoadBalancingConfig); + (0, load_balancer_1.registerDefaultLoadBalancerType)(TYPE_NAME); +} +exports.setup = setup; +//# sourceMappingURL=load-balancer-pick-first.js.map + +/***/ }), + +/***/ 92787: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setup = exports.RoundRobinLoadBalancer = void 0; +const load_balancer_1 = __nccwpck_require__(52680); +const connectivity_state_1 = __nccwpck_require__(80878); +const picker_1 = __nccwpck_require__(81611); +const logging = __nccwpck_require__(35993); +const constants_1 = __nccwpck_require__(90634); +const subchannel_address_1 = __nccwpck_require__(78021); +const load_balancer_pick_first_1 = __nccwpck_require__(38977); +const TRACER_NAME = 'round_robin'; +function trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, text); +} +const TYPE_NAME = 'round_robin'; +class RoundRobinLoadBalancingConfig { + getLoadBalancerName() { + return TYPE_NAME; + } + constructor() { } + toJsonObject() { + return { + [TYPE_NAME]: {}, + }; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + static createFromJson(obj) { + return new RoundRobinLoadBalancingConfig(); + } +} +class RoundRobinPicker { + constructor(children, nextIndex = 0) { + this.children = children; + this.nextIndex = nextIndex; + } + pick(pickArgs) { + const childPicker = this.children[this.nextIndex].picker; + this.nextIndex = (this.nextIndex + 1) % this.children.length; + return childPicker.pick(pickArgs); + } + /** + * Check what the next subchannel returned would be. Used by the load + * balancer implementation to preserve this part of the picker state if + * possible when a subchannel connects or disconnects. + */ + peekNextEndpoint() { + return this.children[this.nextIndex].endpoint; + } +} +class RoundRobinLoadBalancer { + constructor(channelControlHelper, credentials, options) { + this.channelControlHelper = channelControlHelper; + this.credentials = credentials; + this.options = options; + this.children = []; + this.currentState = connectivity_state_1.ConnectivityState.IDLE; + this.currentReadyPicker = null; + this.updatesPaused = false; + this.lastError = null; + this.childChannelControlHelper = (0, load_balancer_1.createChildChannelControlHelper)(channelControlHelper, { + updateState: (connectivityState, picker) => { + /* Ensure that name resolution is requested again after active + * connections are dropped. This is more aggressive than necessary to + * accomplish that, so we are counting on resolvers to have + * reasonable rate limits. */ + if (this.currentState === connectivity_state_1.ConnectivityState.READY && connectivityState !== connectivity_state_1.ConnectivityState.READY) { + this.channelControlHelper.requestReresolution(); + } + this.calculateAndUpdateState(); + }, + }); + } + countChildrenWithState(state) { + return this.children.filter(child => child.getConnectivityState() === state) + .length; + } + calculateAndUpdateState() { + if (this.updatesPaused) { + return; + } + if (this.countChildrenWithState(connectivity_state_1.ConnectivityState.READY) > 0) { + const readyChildren = this.children.filter(child => child.getConnectivityState() === connectivity_state_1.ConnectivityState.READY); + let index = 0; + if (this.currentReadyPicker !== null) { + const nextPickedEndpoint = this.currentReadyPicker.peekNextEndpoint(); + index = readyChildren.findIndex(child => (0, subchannel_address_1.endpointEqual)(child.getEndpoint(), nextPickedEndpoint)); + if (index < 0) { + index = 0; + } + } + this.updateState(connectivity_state_1.ConnectivityState.READY, new RoundRobinPicker(readyChildren.map(child => ({ + endpoint: child.getEndpoint(), + picker: child.getPicker(), + })), index)); + } + else if (this.countChildrenWithState(connectivity_state_1.ConnectivityState.CONNECTING) > 0) { + this.updateState(connectivity_state_1.ConnectivityState.CONNECTING, new picker_1.QueuePicker(this)); + } + else if (this.countChildrenWithState(connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE) > 0) { + this.updateState(connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE, new picker_1.UnavailablePicker({ + details: `No connection established. Last error: ${this.lastError}`, + })); + } + else { + this.updateState(connectivity_state_1.ConnectivityState.IDLE, new picker_1.QueuePicker(this)); + } + /* round_robin should keep all children connected, this is how we do that. + * We can't do this more efficiently in the individual child's updateState + * callback because that doesn't have a reference to which child the state + * change is associated with. */ + for (const child of this.children) { + if (child.getConnectivityState() === connectivity_state_1.ConnectivityState.IDLE) { + child.exitIdle(); + } + } + } + updateState(newState, picker) { + trace(connectivity_state_1.ConnectivityState[this.currentState] + + ' -> ' + + connectivity_state_1.ConnectivityState[newState]); + if (newState === connectivity_state_1.ConnectivityState.READY) { + this.currentReadyPicker = picker; + } + else { + this.currentReadyPicker = null; + } + this.currentState = newState; + this.channelControlHelper.updateState(newState, picker); + } + resetSubchannelList() { + for (const child of this.children) { + child.destroy(); + } + } + updateAddressList(endpointList, lbConfig) { + this.resetSubchannelList(); + trace('Connect to endpoint list ' + endpointList.map(subchannel_address_1.endpointToString)); + this.updatesPaused = true; + this.children = endpointList.map(endpoint => new load_balancer_pick_first_1.LeafLoadBalancer(endpoint, this.childChannelControlHelper, this.credentials, this.options)); + for (const child of this.children) { + child.startConnecting(); + } + this.updatesPaused = false; + this.calculateAndUpdateState(); + } + exitIdle() { + /* The round_robin LB policy is only in the IDLE state if it has no + * addresses to try to connect to and it has no picked subchannel. + * In that case, there is no meaningful action that can be taken here. */ + } + resetBackoff() { + // This LB policy has no backoff to reset + } + destroy() { + this.resetSubchannelList(); + } + getTypeName() { + return TYPE_NAME; + } +} +exports.RoundRobinLoadBalancer = RoundRobinLoadBalancer; +function setup() { + (0, load_balancer_1.registerLoadBalancerType)(TYPE_NAME, RoundRobinLoadBalancer, RoundRobinLoadBalancingConfig); +} +exports.setup = setup; +//# sourceMappingURL=load-balancer-round-robin.js.map + +/***/ }), + +/***/ 52680: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.selectLbConfigFromList = exports.getDefaultConfig = exports.parseLoadBalancingConfig = exports.isLoadBalancerNameRegistered = exports.createLoadBalancer = exports.registerDefaultLoadBalancerType = exports.registerLoadBalancerType = exports.createChildChannelControlHelper = void 0; +const logging_1 = __nccwpck_require__(35993); +const constants_1 = __nccwpck_require__(90634); +/** + * Create a child ChannelControlHelper that overrides some methods of the + * parent while letting others pass through to the parent unmodified. This + * allows other code to create these children without needing to know about + * all of the methods to be passed through. + * @param parent + * @param overrides + */ +function createChildChannelControlHelper(parent, overrides) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k; + return { + createSubchannel: (_b = (_a = overrides.createSubchannel) === null || _a === void 0 ? void 0 : _a.bind(overrides)) !== null && _b !== void 0 ? _b : parent.createSubchannel.bind(parent), + updateState: (_d = (_c = overrides.updateState) === null || _c === void 0 ? void 0 : _c.bind(overrides)) !== null && _d !== void 0 ? _d : parent.updateState.bind(parent), + requestReresolution: (_f = (_e = overrides.requestReresolution) === null || _e === void 0 ? void 0 : _e.bind(overrides)) !== null && _f !== void 0 ? _f : parent.requestReresolution.bind(parent), + addChannelzChild: (_h = (_g = overrides.addChannelzChild) === null || _g === void 0 ? void 0 : _g.bind(overrides)) !== null && _h !== void 0 ? _h : parent.addChannelzChild.bind(parent), + removeChannelzChild: (_k = (_j = overrides.removeChannelzChild) === null || _j === void 0 ? void 0 : _j.bind(overrides)) !== null && _k !== void 0 ? _k : parent.removeChannelzChild.bind(parent), + }; +} +exports.createChildChannelControlHelper = createChildChannelControlHelper; +const registeredLoadBalancerTypes = {}; +let defaultLoadBalancerType = null; +function registerLoadBalancerType(typeName, loadBalancerType, loadBalancingConfigType) { + registeredLoadBalancerTypes[typeName] = { + LoadBalancer: loadBalancerType, + LoadBalancingConfig: loadBalancingConfigType, + }; +} +exports.registerLoadBalancerType = registerLoadBalancerType; +function registerDefaultLoadBalancerType(typeName) { + defaultLoadBalancerType = typeName; +} +exports.registerDefaultLoadBalancerType = registerDefaultLoadBalancerType; +function createLoadBalancer(config, channelControlHelper, credentials, options) { + const typeName = config.getLoadBalancerName(); + if (typeName in registeredLoadBalancerTypes) { + return new registeredLoadBalancerTypes[typeName].LoadBalancer(channelControlHelper, credentials, options); + } + else { + return null; + } +} +exports.createLoadBalancer = createLoadBalancer; +function isLoadBalancerNameRegistered(typeName) { + return typeName in registeredLoadBalancerTypes; +} +exports.isLoadBalancerNameRegistered = isLoadBalancerNameRegistered; +function parseLoadBalancingConfig(rawConfig) { + const keys = Object.keys(rawConfig); + if (keys.length !== 1) { + throw new Error('Provided load balancing config has multiple conflicting entries'); + } + const typeName = keys[0]; + if (typeName in registeredLoadBalancerTypes) { + try { + return registeredLoadBalancerTypes[typeName].LoadBalancingConfig.createFromJson(rawConfig[typeName]); + } + catch (e) { + throw new Error(`${typeName}: ${e.message}`); + } + } + else { + throw new Error(`Unrecognized load balancing config name ${typeName}`); + } +} +exports.parseLoadBalancingConfig = parseLoadBalancingConfig; +function getDefaultConfig() { + if (!defaultLoadBalancerType) { + throw new Error('No default load balancer type registered'); + } + return new registeredLoadBalancerTypes[defaultLoadBalancerType].LoadBalancingConfig(); +} +exports.getDefaultConfig = getDefaultConfig; +function selectLbConfigFromList(configs, fallbackTodefault = false) { + for (const config of configs) { + try { + return parseLoadBalancingConfig(config); + } + catch (e) { + (0, logging_1.log)(constants_1.LogVerbosity.DEBUG, 'Config parsing failed with error', e.message); + continue; + } + } + if (fallbackTodefault) { + if (defaultLoadBalancerType) { + return new registeredLoadBalancerTypes[defaultLoadBalancerType].LoadBalancingConfig(); + } + else { + return null; + } + } + else { + return null; + } +} +exports.selectLbConfigFromList = selectLbConfigFromList; +//# sourceMappingURL=load-balancer.js.map + +/***/ }), + +/***/ 776: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2022 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LoadBalancingCall = void 0; +const connectivity_state_1 = __nccwpck_require__(80878); +const constants_1 = __nccwpck_require__(90634); +const deadline_1 = __nccwpck_require__(511); +const metadata_1 = __nccwpck_require__(83665); +const picker_1 = __nccwpck_require__(81611); +const uri_parser_1 = __nccwpck_require__(65974); +const logging = __nccwpck_require__(35993); +const control_plane_status_1 = __nccwpck_require__(39129); +const http2 = __nccwpck_require__(85158); +const TRACER_NAME = 'load_balancing_call'; +class LoadBalancingCall { + constructor(channel, callConfig, methodName, host, credentials, deadline, callNumber) { + var _a, _b; + this.channel = channel; + this.callConfig = callConfig; + this.methodName = methodName; + this.host = host; + this.credentials = credentials; + this.deadline = deadline; + this.callNumber = callNumber; + this.child = null; + this.readPending = false; + this.pendingMessage = null; + this.pendingHalfClose = false; + this.ended = false; + this.metadata = null; + this.listener = null; + this.onCallEnded = null; + this.childStartTime = null; + const splitPath = this.methodName.split('/'); + let serviceName = ''; + /* The standard path format is "/{serviceName}/{methodName}", so if we split + * by '/', the first item should be empty and the second should be the + * service name */ + if (splitPath.length >= 2) { + serviceName = splitPath[1]; + } + const hostname = (_b = (_a = (0, uri_parser_1.splitHostPort)(this.host)) === null || _a === void 0 ? void 0 : _a.host) !== null && _b !== void 0 ? _b : 'localhost'; + /* Currently, call credentials are only allowed on HTTPS connections, so we + * can assume that the scheme is "https" */ + this.serviceUrl = `https://${hostname}/${serviceName}`; + this.startTime = new Date(); + } + getDeadlineInfo() { + var _a, _b; + const deadlineInfo = []; + if (this.childStartTime) { + if (this.childStartTime > this.startTime) { + if ((_a = this.metadata) === null || _a === void 0 ? void 0 : _a.getOptions().waitForReady) { + deadlineInfo.push('wait_for_ready'); + } + deadlineInfo.push(`LB pick: ${(0, deadline_1.formatDateDifference)(this.startTime, this.childStartTime)}`); + } + deadlineInfo.push(...this.child.getDeadlineInfo()); + return deadlineInfo; + } + else { + if ((_b = this.metadata) === null || _b === void 0 ? void 0 : _b.getOptions().waitForReady) { + deadlineInfo.push('wait_for_ready'); + } + deadlineInfo.push('Waiting for LB pick'); + } + return deadlineInfo; + } + trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, '[' + this.callNumber + '] ' + text); + } + outputStatus(status, progress) { + var _a, _b; + if (!this.ended) { + this.ended = true; + this.trace('ended with status: code=' + + status.code + + ' details="' + + status.details + + '" start time=' + + this.startTime.toISOString()); + const finalStatus = Object.assign(Object.assign({}, status), { progress }); + (_a = this.listener) === null || _a === void 0 ? void 0 : _a.onReceiveStatus(finalStatus); + (_b = this.onCallEnded) === null || _b === void 0 ? void 0 : _b.call(this, finalStatus.code); + } + } + doPick() { + var _a, _b; + if (this.ended) { + return; + } + if (!this.metadata) { + throw new Error('doPick called before start'); + } + this.trace('Pick called'); + const finalMetadata = this.metadata.clone(); + const pickResult = this.channel.doPick(finalMetadata, this.callConfig.pickInformation); + const subchannelString = pickResult.subchannel + ? '(' + + pickResult.subchannel.getChannelzRef().id + + ') ' + + pickResult.subchannel.getAddress() + : '' + pickResult.subchannel; + this.trace('Pick result: ' + + picker_1.PickResultType[pickResult.pickResultType] + + ' subchannel: ' + + subchannelString + + ' status: ' + + ((_a = pickResult.status) === null || _a === void 0 ? void 0 : _a.code) + + ' ' + + ((_b = pickResult.status) === null || _b === void 0 ? void 0 : _b.details)); + switch (pickResult.pickResultType) { + case picker_1.PickResultType.COMPLETE: + this.credentials + .generateMetadata({ method_name: this.methodName, service_url: this.serviceUrl }) + .then(credsMetadata => { + var _a, _b, _c; + /* If this call was cancelled (e.g. by the deadline) before + * metadata generation finished, we shouldn't do anything with + * it. */ + if (this.ended) { + this.trace('Credentials metadata generation finished after call ended'); + return; + } + finalMetadata.merge(credsMetadata); + if (finalMetadata.get('authorization').length > 1) { + this.outputStatus({ + code: constants_1.Status.INTERNAL, + details: '"authorization" metadata cannot have multiple values', + metadata: new metadata_1.Metadata(), + }, 'PROCESSED'); + } + if (pickResult.subchannel.getConnectivityState() !== + connectivity_state_1.ConnectivityState.READY) { + this.trace('Picked subchannel ' + + subchannelString + + ' has state ' + + connectivity_state_1.ConnectivityState[pickResult.subchannel.getConnectivityState()] + + ' after getting credentials metadata. Retrying pick'); + this.doPick(); + return; + } + if (this.deadline !== Infinity) { + finalMetadata.set('grpc-timeout', (0, deadline_1.getDeadlineTimeoutString)(this.deadline)); + } + try { + this.child = pickResult + .subchannel.getRealSubchannel() + .createCall(finalMetadata, this.host, this.methodName, { + onReceiveMetadata: metadata => { + this.trace('Received metadata'); + this.listener.onReceiveMetadata(metadata); + }, + onReceiveMessage: message => { + this.trace('Received message'); + this.listener.onReceiveMessage(message); + }, + onReceiveStatus: status => { + this.trace('Received status'); + if (status.rstCode === + http2.constants.NGHTTP2_REFUSED_STREAM) { + this.outputStatus(status, 'REFUSED'); + } + else { + this.outputStatus(status, 'PROCESSED'); + } + }, + }); + this.childStartTime = new Date(); + } + catch (error) { + this.trace('Failed to start call on picked subchannel ' + + subchannelString + + ' with error ' + + error.message); + this.outputStatus({ + code: constants_1.Status.INTERNAL, + details: 'Failed to start HTTP/2 stream with error ' + + error.message, + metadata: new metadata_1.Metadata(), + }, 'NOT_STARTED'); + return; + } + (_b = (_a = this.callConfig).onCommitted) === null || _b === void 0 ? void 0 : _b.call(_a); + (_c = pickResult.onCallStarted) === null || _c === void 0 ? void 0 : _c.call(pickResult); + this.onCallEnded = pickResult.onCallEnded; + this.trace('Created child call [' + this.child.getCallNumber() + ']'); + if (this.readPending) { + this.child.startRead(); + } + if (this.pendingMessage) { + this.child.sendMessageWithContext(this.pendingMessage.context, this.pendingMessage.message); + } + if (this.pendingHalfClose) { + this.child.halfClose(); + } + }, (error) => { + // We assume the error code isn't 0 (Status.OK) + const { code, details } = (0, control_plane_status_1.restrictControlPlaneStatusCode)(typeof error.code === 'number' ? error.code : constants_1.Status.UNKNOWN, `Getting metadata from plugin failed with error: ${error.message}`); + this.outputStatus({ + code: code, + details: details, + metadata: new metadata_1.Metadata(), + }, 'PROCESSED'); + }); + break; + case picker_1.PickResultType.DROP: + const { code, details } = (0, control_plane_status_1.restrictControlPlaneStatusCode)(pickResult.status.code, pickResult.status.details); + setImmediate(() => { + this.outputStatus({ code, details, metadata: pickResult.status.metadata }, 'DROP'); + }); + break; + case picker_1.PickResultType.TRANSIENT_FAILURE: + if (this.metadata.getOptions().waitForReady) { + this.channel.queueCallForPick(this); + } + else { + const { code, details } = (0, control_plane_status_1.restrictControlPlaneStatusCode)(pickResult.status.code, pickResult.status.details); + setImmediate(() => { + this.outputStatus({ code, details, metadata: pickResult.status.metadata }, 'PROCESSED'); + }); + } + break; + case picker_1.PickResultType.QUEUE: + this.channel.queueCallForPick(this); + } + } + cancelWithStatus(status, details) { + var _a; + this.trace('cancelWithStatus code: ' + status + ' details: "' + details + '"'); + (_a = this.child) === null || _a === void 0 ? void 0 : _a.cancelWithStatus(status, details); + this.outputStatus({ code: status, details: details, metadata: new metadata_1.Metadata() }, 'PROCESSED'); + } + getPeer() { + var _a, _b; + return (_b = (_a = this.child) === null || _a === void 0 ? void 0 : _a.getPeer()) !== null && _b !== void 0 ? _b : this.channel.getTarget(); + } + start(metadata, listener) { + this.trace('start called'); + this.listener = listener; + this.metadata = metadata; + this.doPick(); + } + sendMessageWithContext(context, message) { + this.trace('write() called with message of length ' + message.length); + if (this.child) { + this.child.sendMessageWithContext(context, message); + } + else { + this.pendingMessage = { context, message }; + } + } + startRead() { + this.trace('startRead called'); + if (this.child) { + this.child.startRead(); + } + else { + this.readPending = true; + } + } + halfClose() { + this.trace('halfClose called'); + if (this.child) { + this.child.halfClose(); + } + else { + this.pendingHalfClose = true; + } + } + setCredentials(credentials) { + throw new Error('Method not implemented.'); + } + getCallNumber() { + return this.callNumber; + } +} +exports.LoadBalancingCall = LoadBalancingCall; +//# sourceMappingURL=load-balancing-call.js.map + +/***/ }), + +/***/ 35993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +var _a, _b, _c, _d; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isTracerEnabled = exports.trace = exports.log = exports.setLoggerVerbosity = exports.setLogger = exports.getLogger = void 0; +const constants_1 = __nccwpck_require__(90634); +const process_1 = __nccwpck_require__(77282); +const clientVersion = (__nccwpck_require__(56569)/* .version */ .i8); +const DEFAULT_LOGGER = { + error: (message, ...optionalParams) => { + console.error('E ' + message, ...optionalParams); + }, + info: (message, ...optionalParams) => { + console.error('I ' + message, ...optionalParams); + }, + debug: (message, ...optionalParams) => { + console.error('D ' + message, ...optionalParams); + }, +}; +let _logger = DEFAULT_LOGGER; +let _logVerbosity = constants_1.LogVerbosity.ERROR; +const verbosityString = (_b = (_a = process.env.GRPC_NODE_VERBOSITY) !== null && _a !== void 0 ? _a : process.env.GRPC_VERBOSITY) !== null && _b !== void 0 ? _b : ''; +switch (verbosityString.toUpperCase()) { + case 'DEBUG': + _logVerbosity = constants_1.LogVerbosity.DEBUG; + break; + case 'INFO': + _logVerbosity = constants_1.LogVerbosity.INFO; + break; + case 'ERROR': + _logVerbosity = constants_1.LogVerbosity.ERROR; + break; + case 'NONE': + _logVerbosity = constants_1.LogVerbosity.NONE; + break; + default: + // Ignore any other values +} +const getLogger = () => { + return _logger; +}; +exports.getLogger = getLogger; +const setLogger = (logger) => { + _logger = logger; +}; +exports.setLogger = setLogger; +const setLoggerVerbosity = (verbosity) => { + _logVerbosity = verbosity; +}; +exports.setLoggerVerbosity = setLoggerVerbosity; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const log = (severity, ...args) => { + let logFunction; + if (severity >= _logVerbosity) { + switch (severity) { + case constants_1.LogVerbosity.DEBUG: + logFunction = _logger.debug; + break; + case constants_1.LogVerbosity.INFO: + logFunction = _logger.info; + break; + case constants_1.LogVerbosity.ERROR: + logFunction = _logger.error; + break; + } + /* Fall back to _logger.error when other methods are not available for + * compatiblity with older behavior that always logged to _logger.error */ + if (!logFunction) { + logFunction = _logger.error; + } + if (logFunction) { + logFunction.bind(_logger)(...args); + } + } +}; +exports.log = log; +const tracersString = (_d = (_c = process.env.GRPC_NODE_TRACE) !== null && _c !== void 0 ? _c : process.env.GRPC_TRACE) !== null && _d !== void 0 ? _d : ''; +const enabledTracers = new Set(); +const disabledTracers = new Set(); +for (const tracerName of tracersString.split(',')) { + if (tracerName.startsWith('-')) { + disabledTracers.add(tracerName.substring(1)); + } + else { + enabledTracers.add(tracerName); + } +} +const allEnabled = enabledTracers.has('all'); +function trace(severity, tracer, text) { + if (isTracerEnabled(tracer)) { + (0, exports.log)(severity, new Date().toISOString() + + ' | v' + + clientVersion + + ' ' + + process_1.pid + + ' | ' + + tracer + + ' | ' + + text); + } +} +exports.trace = trace; +function isTracerEnabled(tracer) { + return (!disabledTracers.has(tracer) && (allEnabled || enabledTracers.has(tracer))); +} +exports.isTracerEnabled = isTracerEnabled; +//# sourceMappingURL=logging.js.map + +/***/ }), + +/***/ 38541: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadPackageDefinition = exports.makeClientConstructor = void 0; +const client_1 = __nccwpck_require__(87172); +/** + * Map with short names for each of the requester maker functions. Used in + * makeClientConstructor + * @private + */ +const requesterFuncs = { + unary: client_1.Client.prototype.makeUnaryRequest, + server_stream: client_1.Client.prototype.makeServerStreamRequest, + client_stream: client_1.Client.prototype.makeClientStreamRequest, + bidi: client_1.Client.prototype.makeBidiStreamRequest, +}; +/** + * Returns true, if given key is included in the blacklisted + * keys. + * @param key key for check, string. + */ +function isPrototypePolluted(key) { + return ['__proto__', 'prototype', 'constructor'].includes(key); +} +/** + * Creates a constructor for a client with the given methods, as specified in + * the methods argument. The resulting class will have an instance method for + * each method in the service, which is a partial application of one of the + * [Client]{@link grpc.Client} request methods, depending on `requestSerialize` + * and `responseSerialize`, with the `method`, `serialize`, and `deserialize` + * arguments predefined. + * @param methods An object mapping method names to + * method attributes + * @param serviceName The fully qualified name of the service + * @param classOptions An options object. + * @return New client constructor, which is a subclass of + * {@link grpc.Client}, and has the same arguments as that constructor. + */ +function makeClientConstructor(methods, serviceName, classOptions) { + if (!classOptions) { + classOptions = {}; + } + class ServiceClientImpl extends client_1.Client { + } + Object.keys(methods).forEach(name => { + if (isPrototypePolluted(name)) { + return; + } + const attrs = methods[name]; + let methodType; + // TODO(murgatroid99): Verify that we don't need this anymore + if (typeof name === 'string' && name.charAt(0) === '$') { + throw new Error('Method names cannot start with $'); + } + if (attrs.requestStream) { + if (attrs.responseStream) { + methodType = 'bidi'; + } + else { + methodType = 'client_stream'; + } + } + else { + if (attrs.responseStream) { + methodType = 'server_stream'; + } + else { + methodType = 'unary'; + } + } + const serialize = attrs.requestSerialize; + const deserialize = attrs.responseDeserialize; + const methodFunc = partial(requesterFuncs[methodType], attrs.path, serialize, deserialize); + ServiceClientImpl.prototype[name] = methodFunc; + // Associate all provided attributes with the method + Object.assign(ServiceClientImpl.prototype[name], attrs); + if (attrs.originalName && !isPrototypePolluted(attrs.originalName)) { + ServiceClientImpl.prototype[attrs.originalName] = + ServiceClientImpl.prototype[name]; + } + }); + ServiceClientImpl.service = methods; + ServiceClientImpl.serviceName = serviceName; + return ServiceClientImpl; +} +exports.makeClientConstructor = makeClientConstructor; +function partial(fn, path, serialize, deserialize) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return function (...args) { + return fn.call(this, path, serialize, deserialize, ...args); + }; +} +function isProtobufTypeDefinition(obj) { + return 'format' in obj; +} +/** + * Load a gRPC package definition as a gRPC object hierarchy. + * @param packageDef The package definition object. + * @return The resulting gRPC object. + */ +function loadPackageDefinition(packageDef) { + const result = {}; + for (const serviceFqn in packageDef) { + if (Object.prototype.hasOwnProperty.call(packageDef, serviceFqn)) { + const service = packageDef[serviceFqn]; + const nameComponents = serviceFqn.split('.'); + if (nameComponents.some((comp) => isPrototypePolluted(comp))) { + continue; + } + const serviceName = nameComponents[nameComponents.length - 1]; + let current = result; + for (const packageName of nameComponents.slice(0, -1)) { + if (!current[packageName]) { + current[packageName] = {}; + } + current = current[packageName]; + } + if (isProtobufTypeDefinition(service)) { + current[serviceName] = service; + } + else { + current[serviceName] = makeClientConstructor(service, serviceName, {}); + } + } + } + return result; +} +exports.loadPackageDefinition = loadPackageDefinition; +//# sourceMappingURL=make-client.js.map + +/***/ }), + +/***/ 83665: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Metadata = void 0; +const logging_1 = __nccwpck_require__(35993); +const constants_1 = __nccwpck_require__(90634); +const error_1 = __nccwpck_require__(22336); +const LEGAL_KEY_REGEX = /^[0-9a-z_.-]+$/; +const LEGAL_NON_BINARY_VALUE_REGEX = /^[ -~]*$/; +function isLegalKey(key) { + return LEGAL_KEY_REGEX.test(key); +} +function isLegalNonBinaryValue(value) { + return LEGAL_NON_BINARY_VALUE_REGEX.test(value); +} +function isBinaryKey(key) { + return key.endsWith('-bin'); +} +function isCustomMetadata(key) { + return !key.startsWith('grpc-'); +} +function normalizeKey(key) { + return key.toLowerCase(); +} +function validate(key, value) { + if (!isLegalKey(key)) { + throw new Error('Metadata key "' + key + '" contains illegal characters'); + } + if (value !== null && value !== undefined) { + if (isBinaryKey(key)) { + if (!Buffer.isBuffer(value)) { + throw new Error("keys that end with '-bin' must have Buffer values"); + } + } + else { + if (Buffer.isBuffer(value)) { + throw new Error("keys that don't end with '-bin' must have String values"); + } + if (!isLegalNonBinaryValue(value)) { + throw new Error('Metadata string value "' + value + '" contains illegal characters'); + } + } + } +} +/** + * A class for storing metadata. Keys are normalized to lowercase ASCII. + */ +class Metadata { + constructor(options = {}) { + this.internalRepr = new Map(); + this.options = options; + } + /** + * Sets the given value for the given key by replacing any other values + * associated with that key. Normalizes the key. + * @param key The key to whose value should be set. + * @param value The value to set. Must be a buffer if and only + * if the normalized key ends with '-bin'. + */ + set(key, value) { + key = normalizeKey(key); + validate(key, value); + this.internalRepr.set(key, [value]); + } + /** + * Adds the given value for the given key by appending to a list of previous + * values associated with that key. Normalizes the key. + * @param key The key for which a new value should be appended. + * @param value The value to add. Must be a buffer if and only + * if the normalized key ends with '-bin'. + */ + add(key, value) { + key = normalizeKey(key); + validate(key, value); + const existingValue = this.internalRepr.get(key); + if (existingValue === undefined) { + this.internalRepr.set(key, [value]); + } + else { + existingValue.push(value); + } + } + /** + * Removes the given key and any associated values. Normalizes the key. + * @param key The key whose values should be removed. + */ + remove(key) { + key = normalizeKey(key); + // validate(key); + this.internalRepr.delete(key); + } + /** + * Gets a list of all values associated with the key. Normalizes the key. + * @param key The key whose value should be retrieved. + * @return A list of values associated with the given key. + */ + get(key) { + key = normalizeKey(key); + // validate(key); + return this.internalRepr.get(key) || []; + } + /** + * Gets a plain object mapping each key to the first value associated with it. + * This reflects the most common way that people will want to see metadata. + * @return A key/value mapping of the metadata. + */ + getMap() { + const result = {}; + for (const [key, values] of this.internalRepr) { + if (values.length > 0) { + const v = values[0]; + result[key] = Buffer.isBuffer(v) ? Buffer.from(v) : v; + } + } + return result; + } + /** + * Clones the metadata object. + * @return The newly cloned object. + */ + clone() { + const newMetadata = new Metadata(this.options); + const newInternalRepr = newMetadata.internalRepr; + for (const [key, value] of this.internalRepr) { + const clonedValue = value.map(v => { + if (Buffer.isBuffer(v)) { + return Buffer.from(v); + } + else { + return v; + } + }); + newInternalRepr.set(key, clonedValue); + } + return newMetadata; + } + /** + * Merges all key-value pairs from a given Metadata object into this one. + * If both this object and the given object have values in the same key, + * values from the other Metadata object will be appended to this object's + * values. + * @param other A Metadata object. + */ + merge(other) { + for (const [key, values] of other.internalRepr) { + const mergedValue = (this.internalRepr.get(key) || []).concat(values); + this.internalRepr.set(key, mergedValue); + } + } + setOptions(options) { + this.options = options; + } + getOptions() { + return this.options; + } + /** + * Creates an OutgoingHttpHeaders object that can be used with the http2 API. + */ + toHttp2Headers() { + // NOTE: Node <8.9 formats http2 headers incorrectly. + const result = {}; + for (const [key, values] of this.internalRepr) { + // We assume that the user's interaction with this object is limited to + // through its public API (i.e. keys and values are already validated). + result[key] = values.map(bufToString); + } + return result; + } + /** + * This modifies the behavior of JSON.stringify to show an object + * representation of the metadata map. + */ + toJSON() { + const result = {}; + for (const [key, values] of this.internalRepr) { + result[key] = values; + } + return result; + } + /** + * Returns a new Metadata object based fields in a given IncomingHttpHeaders + * object. + * @param headers An IncomingHttpHeaders object. + */ + static fromHttp2Headers(headers) { + const result = new Metadata(); + for (const key of Object.keys(headers)) { + // Reserved headers (beginning with `:`) are not valid keys. + if (key.charAt(0) === ':') { + continue; + } + const values = headers[key]; + try { + if (isBinaryKey(key)) { + if (Array.isArray(values)) { + values.forEach(value => { + result.add(key, Buffer.from(value, 'base64')); + }); + } + else if (values !== undefined) { + if (isCustomMetadata(key)) { + values.split(',').forEach(v => { + result.add(key, Buffer.from(v.trim(), 'base64')); + }); + } + else { + result.add(key, Buffer.from(values, 'base64')); + } + } + } + else { + if (Array.isArray(values)) { + values.forEach(value => { + result.add(key, value); + }); + } + else if (values !== undefined) { + result.add(key, values); + } + } + } + catch (error) { + const message = `Failed to add metadata entry ${key}: ${values}. ${(0, error_1.getErrorMessage)(error)}. For more information see https://github.com/grpc/grpc-node/issues/1173`; + (0, logging_1.log)(constants_1.LogVerbosity.ERROR, message); + } + } + return result; + } +} +exports.Metadata = Metadata; +const bufToString = (val) => { + return Buffer.isBuffer(val) ? val.toString('base64') : val; +}; +//# sourceMappingURL=metadata.js.map + +/***/ }), + +/***/ 81611: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.QueuePicker = exports.UnavailablePicker = exports.PickResultType = void 0; +const metadata_1 = __nccwpck_require__(83665); +const constants_1 = __nccwpck_require__(90634); +var PickResultType; +(function (PickResultType) { + PickResultType[PickResultType["COMPLETE"] = 0] = "COMPLETE"; + PickResultType[PickResultType["QUEUE"] = 1] = "QUEUE"; + PickResultType[PickResultType["TRANSIENT_FAILURE"] = 2] = "TRANSIENT_FAILURE"; + PickResultType[PickResultType["DROP"] = 3] = "DROP"; +})(PickResultType || (exports.PickResultType = PickResultType = {})); +/** + * A standard picker representing a load balancer in the TRANSIENT_FAILURE + * state. Always responds to every pick request with an UNAVAILABLE status. + */ +class UnavailablePicker { + constructor(status) { + this.status = Object.assign({ code: constants_1.Status.UNAVAILABLE, details: 'No connection established', metadata: new metadata_1.Metadata() }, status); + } + pick(pickArgs) { + return { + pickResultType: PickResultType.TRANSIENT_FAILURE, + subchannel: null, + status: this.status, + onCallStarted: null, + onCallEnded: null, + }; + } +} +exports.UnavailablePicker = UnavailablePicker; +/** + * A standard picker representing a load balancer in the IDLE or CONNECTING + * state. Always responds to every pick request with a QUEUE pick result + * indicating that the pick should be tried again with the next `Picker`. Also + * reports back to the load balancer that a connection should be established + * once any pick is attempted. + * If the childPicker is provided, delegate to it instead of returning the + * hardcoded QUEUE pick result, but still calls exitIdle. + */ +class QueuePicker { + // Constructed with a load balancer. Calls exitIdle on it the first time pick is called + constructor(loadBalancer, childPicker) { + this.loadBalancer = loadBalancer; + this.childPicker = childPicker; + this.calledExitIdle = false; + } + pick(pickArgs) { + if (!this.calledExitIdle) { + process.nextTick(() => { + this.loadBalancer.exitIdle(); + }); + this.calledExitIdle = true; + } + if (this.childPicker) { + return this.childPicker.pick(pickArgs); + } + else { + return { + pickResultType: PickResultType.QUEUE, + subchannel: null, + status: null, + onCallStarted: null, + onCallEnded: null, + }; + } + } +} +exports.QueuePicker = QueuePicker; +//# sourceMappingURL=picker.js.map + +/***/ }), + +/***/ 49421: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setup = exports.DEFAULT_PORT = void 0; +const resolver_1 = __nccwpck_require__(31594); +const dns_1 = __nccwpck_require__(17578); +const service_config_1 = __nccwpck_require__(21761); +const constants_1 = __nccwpck_require__(90634); +const metadata_1 = __nccwpck_require__(83665); +const logging = __nccwpck_require__(35993); +const constants_2 = __nccwpck_require__(90634); +const uri_parser_1 = __nccwpck_require__(65974); +const net_1 = __nccwpck_require__(41808); +const backoff_timeout_1 = __nccwpck_require__(34186); +const environment_1 = __nccwpck_require__(29160); +const TRACER_NAME = 'dns_resolver'; +function trace(text) { + logging.trace(constants_2.LogVerbosity.DEBUG, TRACER_NAME, text); +} +/** + * The default TCP port to connect to if not explicitly specified in the target. + */ +exports.DEFAULT_PORT = 443; +const DEFAULT_MIN_TIME_BETWEEN_RESOLUTIONS_MS = 30000; +/** + * Resolver implementation that handles DNS names and IP addresses. + */ +class DnsResolver { + constructor(target, listener, channelOptions) { + var _a, _b, _c; + this.target = target; + this.listener = listener; + this.pendingLookupPromise = null; + this.pendingTxtPromise = null; + this.latestLookupResult = null; + this.latestServiceConfig = null; + this.latestServiceConfigError = null; + this.continueResolving = false; + this.isNextResolutionTimerRunning = false; + this.isServiceConfigEnabled = true; + this.returnedIpResult = false; + this.alternativeResolver = new dns_1.promises.Resolver(); + trace('Resolver constructed for target ' + (0, uri_parser_1.uriToString)(target)); + if (target.authority) { + this.alternativeResolver.setServers([target.authority]); + } + const hostPort = (0, uri_parser_1.splitHostPort)(target.path); + if (hostPort === null) { + this.ipResult = null; + this.dnsHostname = null; + this.port = null; + } + else { + if ((0, net_1.isIPv4)(hostPort.host) || (0, net_1.isIPv6)(hostPort.host)) { + this.ipResult = [ + { + addresses: [ + { + host: hostPort.host, + port: (_a = hostPort.port) !== null && _a !== void 0 ? _a : exports.DEFAULT_PORT, + }, + ], + }, + ]; + this.dnsHostname = null; + this.port = null; + } + else { + this.ipResult = null; + this.dnsHostname = hostPort.host; + this.port = (_b = hostPort.port) !== null && _b !== void 0 ? _b : exports.DEFAULT_PORT; + } + } + this.percentage = Math.random() * 100; + if (channelOptions['grpc.service_config_disable_resolution'] === 1) { + this.isServiceConfigEnabled = false; + } + this.defaultResolutionError = { + code: constants_1.Status.UNAVAILABLE, + details: `Name resolution failed for target ${(0, uri_parser_1.uriToString)(this.target)}`, + metadata: new metadata_1.Metadata(), + }; + const backoffOptions = { + initialDelay: channelOptions['grpc.initial_reconnect_backoff_ms'], + maxDelay: channelOptions['grpc.max_reconnect_backoff_ms'], + }; + this.backoff = new backoff_timeout_1.BackoffTimeout(() => { + if (this.continueResolving) { + this.startResolutionWithBackoff(); + } + }, backoffOptions); + this.backoff.unref(); + this.minTimeBetweenResolutionsMs = + (_c = channelOptions['grpc.dns_min_time_between_resolutions_ms']) !== null && _c !== void 0 ? _c : DEFAULT_MIN_TIME_BETWEEN_RESOLUTIONS_MS; + this.nextResolutionTimer = setTimeout(() => { }, 0); + clearTimeout(this.nextResolutionTimer); + } + /** + * If the target is an IP address, just provide that address as a result. + * Otherwise, initiate A, AAAA, and TXT lookups + */ + startResolution() { + if (this.ipResult !== null) { + if (!this.returnedIpResult) { + trace('Returning IP address for target ' + (0, uri_parser_1.uriToString)(this.target)); + setImmediate(() => { + this.listener.onSuccessfulResolution(this.ipResult, null, null, null, {}); + }); + this.returnedIpResult = true; + } + this.backoff.stop(); + this.backoff.reset(); + this.stopNextResolutionTimer(); + return; + } + if (this.dnsHostname === null) { + trace('Failed to parse DNS address ' + (0, uri_parser_1.uriToString)(this.target)); + setImmediate(() => { + this.listener.onError({ + code: constants_1.Status.UNAVAILABLE, + details: `Failed to parse DNS address ${(0, uri_parser_1.uriToString)(this.target)}`, + metadata: new metadata_1.Metadata(), + }); + }); + this.stopNextResolutionTimer(); + } + else { + if (this.pendingLookupPromise !== null) { + return; + } + trace('Looking up DNS hostname ' + this.dnsHostname); + /* We clear out latestLookupResult here to ensure that it contains the + * latest result since the last time we started resolving. That way, the + * TXT resolution handler can use it, but only if it finishes second. We + * don't clear out any previous service config results because it's + * better to use a service config that's slightly out of date than to + * revert to an effectively blank one. */ + this.latestLookupResult = null; + const hostname = this.dnsHostname; + this.pendingLookupPromise = this.lookup(hostname); + this.pendingLookupPromise.then(addressList => { + if (this.pendingLookupPromise === null) { + return; + } + this.pendingLookupPromise = null; + this.backoff.reset(); + this.backoff.stop(); + this.latestLookupResult = addressList.map(address => ({ + addresses: [address], + })); + const allAddressesString = '[' + + addressList.map(addr => addr.host + ':' + addr.port).join(',') + + ']'; + trace('Resolved addresses for target ' + + (0, uri_parser_1.uriToString)(this.target) + + ': ' + + allAddressesString); + if (this.latestLookupResult.length === 0) { + this.listener.onError(this.defaultResolutionError); + return; + } + /* If the TXT lookup has not yet finished, both of the last two + * arguments will be null, which is the equivalent of getting an + * empty TXT response. When the TXT lookup does finish, its handler + * can update the service config by using the same address list */ + this.listener.onSuccessfulResolution(this.latestLookupResult, this.latestServiceConfig, this.latestServiceConfigError, null, {}); + }, err => { + if (this.pendingLookupPromise === null) { + return; + } + trace('Resolution error for target ' + + (0, uri_parser_1.uriToString)(this.target) + + ': ' + + err.message); + this.pendingLookupPromise = null; + this.stopNextResolutionTimer(); + this.listener.onError(this.defaultResolutionError); + }); + /* If there already is a still-pending TXT resolution, we can just use + * that result when it comes in */ + if (this.isServiceConfigEnabled && this.pendingTxtPromise === null) { + /* We handle the TXT query promise differently than the others because + * the name resolution attempt as a whole is a success even if the TXT + * lookup fails */ + this.pendingTxtPromise = this.resolveTxt(hostname); + this.pendingTxtPromise.then(txtRecord => { + if (this.pendingTxtPromise === null) { + return; + } + this.pendingTxtPromise = null; + try { + this.latestServiceConfig = (0, service_config_1.extractAndSelectServiceConfig)(txtRecord, this.percentage); + } + catch (err) { + this.latestServiceConfigError = { + code: constants_1.Status.UNAVAILABLE, + details: `Parsing service config failed with error ${err.message}`, + metadata: new metadata_1.Metadata(), + }; + } + if (this.latestLookupResult !== null) { + /* We rely here on the assumption that calling this function with + * identical parameters will be essentialy idempotent, and calling + * it with the same address list and a different service config + * should result in a fast and seamless switchover. */ + this.listener.onSuccessfulResolution(this.latestLookupResult, this.latestServiceConfig, this.latestServiceConfigError, null, {}); + } + }, err => { + /* If TXT lookup fails we should do nothing, which means that we + * continue to use the result of the most recent successful lookup, + * or the default null config object if there has never been a + * successful lookup. We do not set the latestServiceConfigError + * here because that is specifically used for response validation + * errors. We still need to handle this error so that it does not + * bubble up as an unhandled promise rejection. */ + }); + } + } + } + async lookup(hostname) { + if (environment_1.GRPC_NODE_USE_ALTERNATIVE_RESOLVER) { + trace('Using alternative DNS resolver.'); + const records = await Promise.allSettled([ + this.alternativeResolver.resolve4(hostname), + this.alternativeResolver.resolve6(hostname), + ]); + if (records.every(result => result.status === 'rejected')) { + throw new Error(records[0].reason); + } + return records + .reduce((acc, result) => { + return result.status === 'fulfilled' + ? [...acc, ...result.value] + : acc; + }, []) + .map(addr => ({ + host: addr, + port: +this.port, + })); + } + /* We lookup both address families here and then split them up later + * because when looking up a single family, dns.lookup outputs an error + * if the name exists but there are no records for that family, and that + * error is indistinguishable from other kinds of errors */ + const addressList = await dns_1.promises.lookup(hostname, { all: true }); + return addressList.map(addr => ({ host: addr.address, port: +this.port })); + } + async resolveTxt(hostname) { + if (environment_1.GRPC_NODE_USE_ALTERNATIVE_RESOLVER) { + trace('Using alternative DNS resolver.'); + return this.alternativeResolver.resolveTxt(hostname); + } + return dns_1.promises.resolveTxt(hostname); + } + startNextResolutionTimer() { + var _a, _b; + clearTimeout(this.nextResolutionTimer); + this.nextResolutionTimer = setTimeout(() => { + this.stopNextResolutionTimer(); + if (this.continueResolving) { + this.startResolutionWithBackoff(); + } + }, this.minTimeBetweenResolutionsMs); + (_b = (_a = this.nextResolutionTimer).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + this.isNextResolutionTimerRunning = true; + } + stopNextResolutionTimer() { + clearTimeout(this.nextResolutionTimer); + this.isNextResolutionTimerRunning = false; + } + startResolutionWithBackoff() { + if (this.pendingLookupPromise === null) { + this.continueResolving = false; + this.backoff.runOnce(); + this.startNextResolutionTimer(); + this.startResolution(); + } + } + updateResolution() { + /* If there is a pending lookup, just let it finish. Otherwise, if the + * nextResolutionTimer or backoff timer is running, set the + * continueResolving flag to resolve when whichever of those timers + * fires. Otherwise, start resolving immediately. */ + if (this.pendingLookupPromise === null) { + if (this.isNextResolutionTimerRunning || this.backoff.isRunning()) { + if (this.isNextResolutionTimerRunning) { + trace('resolution update delayed by "min time between resolutions" rate limit'); + } + else { + trace('resolution update delayed by backoff timer until ' + + this.backoff.getEndTime().toISOString()); + } + this.continueResolving = true; + } + else { + this.startResolutionWithBackoff(); + } + } + } + /** + * Reset the resolver to the same state it had when it was created. In-flight + * DNS requests cannot be cancelled, but they are discarded and their results + * will be ignored. + */ + destroy() { + this.continueResolving = false; + this.backoff.reset(); + this.backoff.stop(); + this.stopNextResolutionTimer(); + this.pendingLookupPromise = null; + this.pendingTxtPromise = null; + this.latestLookupResult = null; + this.latestServiceConfig = null; + this.latestServiceConfigError = null; + this.returnedIpResult = false; + } + /** + * Get the default authority for the given target. For IP targets, that is + * the IP address. For DNS targets, it is the hostname. + * @param target + */ + static getDefaultAuthority(target) { + return target.path; + } +} +/** + * Set up the DNS resolver class by registering it as the handler for the + * "dns:" prefix and as the default resolver. + */ +function setup() { + (0, resolver_1.registerResolver)('dns', DnsResolver); + (0, resolver_1.registerDefaultScheme)('dns'); +} +exports.setup = setup; +//# sourceMappingURL=resolver-dns.js.map + +/***/ }), + +/***/ 97902: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2021 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setup = void 0; +const net_1 = __nccwpck_require__(41808); +const constants_1 = __nccwpck_require__(90634); +const metadata_1 = __nccwpck_require__(83665); +const resolver_1 = __nccwpck_require__(31594); +const uri_parser_1 = __nccwpck_require__(65974); +const logging = __nccwpck_require__(35993); +const TRACER_NAME = 'ip_resolver'; +function trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, text); +} +const IPV4_SCHEME = 'ipv4'; +const IPV6_SCHEME = 'ipv6'; +/** + * The default TCP port to connect to if not explicitly specified in the target. + */ +const DEFAULT_PORT = 443; +class IpResolver { + constructor(target, listener, channelOptions) { + var _a; + this.listener = listener; + this.endpoints = []; + this.error = null; + this.hasReturnedResult = false; + trace('Resolver constructed for target ' + (0, uri_parser_1.uriToString)(target)); + const addresses = []; + if (!(target.scheme === IPV4_SCHEME || target.scheme === IPV6_SCHEME)) { + this.error = { + code: constants_1.Status.UNAVAILABLE, + details: `Unrecognized scheme ${target.scheme} in IP resolver`, + metadata: new metadata_1.Metadata(), + }; + return; + } + const pathList = target.path.split(','); + for (const path of pathList) { + const hostPort = (0, uri_parser_1.splitHostPort)(path); + if (hostPort === null) { + this.error = { + code: constants_1.Status.UNAVAILABLE, + details: `Failed to parse ${target.scheme} address ${path}`, + metadata: new metadata_1.Metadata(), + }; + return; + } + if ((target.scheme === IPV4_SCHEME && !(0, net_1.isIPv4)(hostPort.host)) || + (target.scheme === IPV6_SCHEME && !(0, net_1.isIPv6)(hostPort.host))) { + this.error = { + code: constants_1.Status.UNAVAILABLE, + details: `Failed to parse ${target.scheme} address ${path}`, + metadata: new metadata_1.Metadata(), + }; + return; + } + addresses.push({ + host: hostPort.host, + port: (_a = hostPort.port) !== null && _a !== void 0 ? _a : DEFAULT_PORT, + }); + } + this.endpoints = addresses.map(address => ({ addresses: [address] })); + trace('Parsed ' + target.scheme + ' address list ' + addresses); + } + updateResolution() { + if (!this.hasReturnedResult) { + this.hasReturnedResult = true; + process.nextTick(() => { + if (this.error) { + this.listener.onError(this.error); + } + else { + this.listener.onSuccessfulResolution(this.endpoints, null, null, null, {}); + } + }); + } + } + destroy() { + this.hasReturnedResult = false; + } + static getDefaultAuthority(target) { + return target.path.split(',')[0]; + } +} +function setup() { + (0, resolver_1.registerResolver)(IPV4_SCHEME, IpResolver); + (0, resolver_1.registerResolver)(IPV6_SCHEME, IpResolver); +} +exports.setup = setup; +//# sourceMappingURL=resolver-ip.js.map + +/***/ }), + +/***/ 5252: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setup = void 0; +const resolver_1 = __nccwpck_require__(31594); +class UdsResolver { + constructor(target, listener, channelOptions) { + this.listener = listener; + this.hasReturnedResult = false; + this.endpoints = []; + let path; + if (target.authority === '') { + path = '/' + target.path; + } + else { + path = target.path; + } + this.endpoints = [{ addresses: [{ path }] }]; + } + updateResolution() { + if (!this.hasReturnedResult) { + this.hasReturnedResult = true; + process.nextTick(this.listener.onSuccessfulResolution, this.endpoints, null, null, null, {}); + } + } + destroy() { + this.hasReturnedResult = false; + } + static getDefaultAuthority(target) { + return 'localhost'; + } +} +function setup() { + (0, resolver_1.registerResolver)('unix', UdsResolver); +} +exports.setup = setup; +//# sourceMappingURL=resolver-uds.js.map + +/***/ }), + +/***/ 31594: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.mapUriDefaultScheme = exports.getDefaultAuthority = exports.createResolver = exports.registerDefaultScheme = exports.registerResolver = void 0; +const uri_parser_1 = __nccwpck_require__(65974); +const registeredResolvers = {}; +let defaultScheme = null; +/** + * Register a resolver class to handle target names prefixed with the `prefix` + * string. This prefix should correspond to a URI scheme name listed in the + * [gRPC Name Resolution document](https://github.com/grpc/grpc/blob/master/doc/naming.md) + * @param prefix + * @param resolverClass + */ +function registerResolver(scheme, resolverClass) { + registeredResolvers[scheme] = resolverClass; +} +exports.registerResolver = registerResolver; +/** + * Register a default resolver to handle target names that do not start with + * any registered prefix. + * @param resolverClass + */ +function registerDefaultScheme(scheme) { + defaultScheme = scheme; +} +exports.registerDefaultScheme = registerDefaultScheme; +/** + * Create a name resolver for the specified target, if possible. Throws an + * error if no such name resolver can be created. + * @param target + * @param listener + */ +function createResolver(target, listener, options) { + if (target.scheme !== undefined && target.scheme in registeredResolvers) { + return new registeredResolvers[target.scheme](target, listener, options); + } + else { + throw new Error(`No resolver could be created for target ${(0, uri_parser_1.uriToString)(target)}`); + } +} +exports.createResolver = createResolver; +/** + * Get the default authority for the specified target, if possible. Throws an + * error if no registered name resolver can parse that target string. + * @param target + */ +function getDefaultAuthority(target) { + if (target.scheme !== undefined && target.scheme in registeredResolvers) { + return registeredResolvers[target.scheme].getDefaultAuthority(target); + } + else { + throw new Error(`Invalid target ${(0, uri_parser_1.uriToString)(target)}`); + } +} +exports.getDefaultAuthority = getDefaultAuthority; +function mapUriDefaultScheme(target) { + if (target.scheme === undefined || !(target.scheme in registeredResolvers)) { + if (defaultScheme !== null) { + return { + scheme: defaultScheme, + authority: undefined, + path: (0, uri_parser_1.uriToString)(target), + }; + } + else { + return null; + } + } + return target; +} +exports.mapUriDefaultScheme = mapUriDefaultScheme; +//# sourceMappingURL=resolver.js.map + +/***/ }), + +/***/ 39909: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2022 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResolvingCall = void 0; +const constants_1 = __nccwpck_require__(90634); +const deadline_1 = __nccwpck_require__(511); +const metadata_1 = __nccwpck_require__(83665); +const logging = __nccwpck_require__(35993); +const control_plane_status_1 = __nccwpck_require__(39129); +const TRACER_NAME = 'resolving_call'; +class ResolvingCall { + constructor(channel, method, options, filterStackFactory, credentials, callNumber) { + this.channel = channel; + this.method = method; + this.filterStackFactory = filterStackFactory; + this.credentials = credentials; + this.callNumber = callNumber; + this.child = null; + this.readPending = false; + this.pendingMessage = null; + this.pendingHalfClose = false; + this.ended = false; + this.readFilterPending = false; + this.writeFilterPending = false; + this.pendingChildStatus = null; + this.metadata = null; + this.listener = null; + this.statusWatchers = []; + this.deadlineTimer = setTimeout(() => { }, 0); + this.filterStack = null; + this.deadlineStartTime = null; + this.configReceivedTime = null; + this.childStartTime = null; + this.deadline = options.deadline; + this.host = options.host; + if (options.parentCall) { + if (options.flags & constants_1.Propagate.CANCELLATION) { + options.parentCall.on('cancelled', () => { + this.cancelWithStatus(constants_1.Status.CANCELLED, 'Cancelled by parent call'); + }); + } + if (options.flags & constants_1.Propagate.DEADLINE) { + this.trace('Propagating deadline from parent: ' + + options.parentCall.getDeadline()); + this.deadline = (0, deadline_1.minDeadline)(this.deadline, options.parentCall.getDeadline()); + } + } + this.trace('Created'); + this.runDeadlineTimer(); + } + trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, '[' + this.callNumber + '] ' + text); + } + runDeadlineTimer() { + clearTimeout(this.deadlineTimer); + this.deadlineStartTime = new Date(); + this.trace('Deadline: ' + (0, deadline_1.deadlineToString)(this.deadline)); + const timeout = (0, deadline_1.getRelativeTimeout)(this.deadline); + if (timeout !== Infinity) { + this.trace('Deadline will be reached in ' + timeout + 'ms'); + const handleDeadline = () => { + if (!this.deadlineStartTime) { + this.cancelWithStatus(constants_1.Status.DEADLINE_EXCEEDED, 'Deadline exceeded'); + return; + } + const deadlineInfo = []; + const deadlineEndTime = new Date(); + deadlineInfo.push(`Deadline exceeded after ${(0, deadline_1.formatDateDifference)(this.deadlineStartTime, deadlineEndTime)}`); + if (this.configReceivedTime) { + if (this.configReceivedTime > this.deadlineStartTime) { + deadlineInfo.push(`name resolution: ${(0, deadline_1.formatDateDifference)(this.deadlineStartTime, this.configReceivedTime)}`); + } + if (this.childStartTime) { + if (this.childStartTime > this.configReceivedTime) { + deadlineInfo.push(`metadata filters: ${(0, deadline_1.formatDateDifference)(this.configReceivedTime, this.childStartTime)}`); + } + } + else { + deadlineInfo.push('waiting for metadata filters'); + } + } + else { + deadlineInfo.push('waiting for name resolution'); + } + if (this.child) { + deadlineInfo.push(...this.child.getDeadlineInfo()); + } + this.cancelWithStatus(constants_1.Status.DEADLINE_EXCEEDED, deadlineInfo.join(',')); + }; + if (timeout <= 0) { + process.nextTick(handleDeadline); + } + else { + this.deadlineTimer = setTimeout(handleDeadline, timeout); + } + } + } + outputStatus(status) { + if (!this.ended) { + this.ended = true; + if (!this.filterStack) { + this.filterStack = this.filterStackFactory.createFilter(); + } + clearTimeout(this.deadlineTimer); + const filteredStatus = this.filterStack.receiveTrailers(status); + this.trace('ended with status: code=' + + filteredStatus.code + + ' details="' + + filteredStatus.details + + '"'); + this.statusWatchers.forEach(watcher => watcher(filteredStatus)); + process.nextTick(() => { + var _a; + (_a = this.listener) === null || _a === void 0 ? void 0 : _a.onReceiveStatus(filteredStatus); + }); + } + } + sendMessageOnChild(context, message) { + if (!this.child) { + throw new Error('sendMessageonChild called with child not populated'); + } + const child = this.child; + this.writeFilterPending = true; + this.filterStack.sendMessage(Promise.resolve({ message: message, flags: context.flags })).then(filteredMessage => { + this.writeFilterPending = false; + child.sendMessageWithContext(context, filteredMessage.message); + if (this.pendingHalfClose) { + child.halfClose(); + } + }, (status) => { + this.cancelWithStatus(status.code, status.details); + }); + } + getConfig() { + if (this.ended) { + return; + } + if (!this.metadata || !this.listener) { + throw new Error('getConfig called before start'); + } + const configResult = this.channel.getConfig(this.method, this.metadata); + if (configResult.type === 'NONE') { + this.channel.queueCallForConfig(this); + return; + } + else if (configResult.type === 'ERROR') { + if (this.metadata.getOptions().waitForReady) { + this.channel.queueCallForConfig(this); + } + else { + this.outputStatus(configResult.error); + } + return; + } + // configResult.type === 'SUCCESS' + this.configReceivedTime = new Date(); + const config = configResult.config; + if (config.status !== constants_1.Status.OK) { + const { code, details } = (0, control_plane_status_1.restrictControlPlaneStatusCode)(config.status, 'Failed to route call to method ' + this.method); + this.outputStatus({ + code: code, + details: details, + metadata: new metadata_1.Metadata(), + }); + return; + } + if (config.methodConfig.timeout) { + const configDeadline = new Date(); + configDeadline.setSeconds(configDeadline.getSeconds() + config.methodConfig.timeout.seconds); + configDeadline.setMilliseconds(configDeadline.getMilliseconds() + + config.methodConfig.timeout.nanos / 1000000); + this.deadline = (0, deadline_1.minDeadline)(this.deadline, configDeadline); + this.runDeadlineTimer(); + } + this.filterStackFactory.push(config.dynamicFilterFactories); + this.filterStack = this.filterStackFactory.createFilter(); + this.filterStack.sendMetadata(Promise.resolve(this.metadata)).then(filteredMetadata => { + this.child = this.channel.createInnerCall(config, this.method, this.host, this.credentials, this.deadline); + this.trace('Created child [' + this.child.getCallNumber() + ']'); + this.childStartTime = new Date(); + this.child.start(filteredMetadata, { + onReceiveMetadata: metadata => { + this.trace('Received metadata'); + this.listener.onReceiveMetadata(this.filterStack.receiveMetadata(metadata)); + }, + onReceiveMessage: message => { + this.trace('Received message'); + this.readFilterPending = true; + this.filterStack.receiveMessage(message).then(filteredMesssage => { + this.trace('Finished filtering received message'); + this.readFilterPending = false; + this.listener.onReceiveMessage(filteredMesssage); + if (this.pendingChildStatus) { + this.outputStatus(this.pendingChildStatus); + } + }, (status) => { + this.cancelWithStatus(status.code, status.details); + }); + }, + onReceiveStatus: status => { + this.trace('Received status'); + if (this.readFilterPending) { + this.pendingChildStatus = status; + } + else { + this.outputStatus(status); + } + }, + }); + if (this.readPending) { + this.child.startRead(); + } + if (this.pendingMessage) { + this.sendMessageOnChild(this.pendingMessage.context, this.pendingMessage.message); + } + else if (this.pendingHalfClose) { + this.child.halfClose(); + } + }, (status) => { + this.outputStatus(status); + }); + } + reportResolverError(status) { + var _a; + if ((_a = this.metadata) === null || _a === void 0 ? void 0 : _a.getOptions().waitForReady) { + this.channel.queueCallForConfig(this); + } + else { + this.outputStatus(status); + } + } + cancelWithStatus(status, details) { + var _a; + this.trace('cancelWithStatus code: ' + status + ' details: "' + details + '"'); + (_a = this.child) === null || _a === void 0 ? void 0 : _a.cancelWithStatus(status, details); + this.outputStatus({ + code: status, + details: details, + metadata: new metadata_1.Metadata(), + }); + } + getPeer() { + var _a, _b; + return (_b = (_a = this.child) === null || _a === void 0 ? void 0 : _a.getPeer()) !== null && _b !== void 0 ? _b : this.channel.getTarget(); + } + start(metadata, listener) { + this.trace('start called'); + this.metadata = metadata.clone(); + this.listener = listener; + this.getConfig(); + } + sendMessageWithContext(context, message) { + this.trace('write() called with message of length ' + message.length); + if (this.child) { + this.sendMessageOnChild(context, message); + } + else { + this.pendingMessage = { context, message }; + } + } + startRead() { + this.trace('startRead called'); + if (this.child) { + this.child.startRead(); + } + else { + this.readPending = true; + } + } + halfClose() { + this.trace('halfClose called'); + if (this.child && !this.writeFilterPending) { + this.child.halfClose(); + } + else { + this.pendingHalfClose = true; + } + } + setCredentials(credentials) { + this.credentials = this.credentials.compose(credentials); + } + addStatusWatcher(watcher) { + this.statusWatchers.push(watcher); + } + getCallNumber() { + return this.callNumber; + } +} +exports.ResolvingCall = ResolvingCall; +//# sourceMappingURL=resolving-call.js.map + +/***/ }), + +/***/ 19192: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResolvingLoadBalancer = void 0; +const load_balancer_1 = __nccwpck_require__(52680); +const service_config_1 = __nccwpck_require__(21761); +const connectivity_state_1 = __nccwpck_require__(80878); +const resolver_1 = __nccwpck_require__(31594); +const picker_1 = __nccwpck_require__(81611); +const backoff_timeout_1 = __nccwpck_require__(34186); +const constants_1 = __nccwpck_require__(90634); +const metadata_1 = __nccwpck_require__(83665); +const logging = __nccwpck_require__(35993); +const constants_2 = __nccwpck_require__(90634); +const uri_parser_1 = __nccwpck_require__(65974); +const load_balancer_child_handler_1 = __nccwpck_require__(17559); +const TRACER_NAME = 'resolving_load_balancer'; +function trace(text) { + logging.trace(constants_2.LogVerbosity.DEBUG, TRACER_NAME, text); +} +/** + * Name match levels in order from most to least specific. This is the order in + * which searches will be performed. + */ +const NAME_MATCH_LEVEL_ORDER = [ + 'SERVICE_AND_METHOD', + 'SERVICE', + 'EMPTY', +]; +function hasMatchingName(service, method, methodConfig, matchLevel) { + for (const name of methodConfig.name) { + switch (matchLevel) { + case 'EMPTY': + if (!name.service && !name.method) { + return true; + } + break; + case 'SERVICE': + if (name.service === service && !name.method) { + return true; + } + break; + case 'SERVICE_AND_METHOD': + if (name.service === service && name.method === method) { + return true; + } + } + } + return false; +} +function findMatchingConfig(service, method, methodConfigs, matchLevel) { + for (const config of methodConfigs) { + if (hasMatchingName(service, method, config, matchLevel)) { + return config; + } + } + return null; +} +function getDefaultConfigSelector(serviceConfig) { + return function defaultConfigSelector(methodName, metadata) { + var _a, _b; + const splitName = methodName.split('/').filter(x => x.length > 0); + const service = (_a = splitName[0]) !== null && _a !== void 0 ? _a : ''; + const method = (_b = splitName[1]) !== null && _b !== void 0 ? _b : ''; + if (serviceConfig && serviceConfig.methodConfig) { + /* Check for the following in order, and return the first method + * config that matches: + * 1. A name that exactly matches the service and method + * 2. A name with no method set that matches the service + * 3. An empty name + */ + for (const matchLevel of NAME_MATCH_LEVEL_ORDER) { + const matchingConfig = findMatchingConfig(service, method, serviceConfig.methodConfig, matchLevel); + if (matchingConfig) { + return { + methodConfig: matchingConfig, + pickInformation: {}, + status: constants_1.Status.OK, + dynamicFilterFactories: [], + }; + } + } + } + return { + methodConfig: { name: [] }, + pickInformation: {}, + status: constants_1.Status.OK, + dynamicFilterFactories: [], + }; + }; +} +class ResolvingLoadBalancer { + /** + * Wrapper class that behaves like a `LoadBalancer` and also handles name + * resolution internally. + * @param target The address of the backend to connect to. + * @param channelControlHelper `ChannelControlHelper` instance provided by + * this load balancer's owner. + * @param defaultServiceConfig The default service configuration to be used + * if none is provided by the name resolver. A `null` value indicates + * that the default behavior should be the default unconfigured behavior. + * In practice, that means using the "pick first" load balancer + * implmentation + */ + constructor(target, channelControlHelper, credentials, channelOptions, onSuccessfulResolution, onFailedResolution) { + this.target = target; + this.channelControlHelper = channelControlHelper; + this.onSuccessfulResolution = onSuccessfulResolution; + this.onFailedResolution = onFailedResolution; + this.latestChildState = connectivity_state_1.ConnectivityState.IDLE; + this.latestChildPicker = new picker_1.QueuePicker(this); + /** + * This resolving load balancer's current connectivity state. + */ + this.currentState = connectivity_state_1.ConnectivityState.IDLE; + /** + * The service config object from the last successful resolution, if + * available. A value of null indicates that we have not yet received a valid + * service config from the resolver. + */ + this.previousServiceConfig = null; + /** + * Indicates whether we should attempt to resolve again after the backoff + * timer runs out. + */ + this.continueResolving = false; + if (channelOptions['grpc.service_config']) { + this.defaultServiceConfig = (0, service_config_1.validateServiceConfig)(JSON.parse(channelOptions['grpc.service_config'])); + } + else { + this.defaultServiceConfig = { + loadBalancingConfig: [], + methodConfig: [], + }; + } + this.updateState(connectivity_state_1.ConnectivityState.IDLE, new picker_1.QueuePicker(this)); + this.childLoadBalancer = new load_balancer_child_handler_1.ChildLoadBalancerHandler({ + createSubchannel: channelControlHelper.createSubchannel.bind(channelControlHelper), + requestReresolution: () => { + /* If the backoffTimeout is running, we're still backing off from + * making resolve requests, so we shouldn't make another one here. + * In that case, the backoff timer callback will call + * updateResolution */ + if (this.backoffTimeout.isRunning()) { + trace('requestReresolution delayed by backoff timer until ' + + this.backoffTimeout.getEndTime().toISOString()); + this.continueResolving = true; + } + else { + this.updateResolution(); + } + }, + updateState: (newState, picker) => { + this.latestChildState = newState; + this.latestChildPicker = picker; + this.updateState(newState, picker); + }, + addChannelzChild: channelControlHelper.addChannelzChild.bind(channelControlHelper), + removeChannelzChild: channelControlHelper.removeChannelzChild.bind(channelControlHelper), + }, credentials, channelOptions); + this.innerResolver = (0, resolver_1.createResolver)(target, { + onSuccessfulResolution: (endpointList, serviceConfig, serviceConfigError, configSelector, attributes) => { + var _a; + this.backoffTimeout.stop(); + this.backoffTimeout.reset(); + let workingServiceConfig = null; + /* This first group of conditionals implements the algorithm described + * in https://github.com/grpc/proposal/blob/master/A21-service-config-error-handling.md + * in the section called "Behavior on receiving a new gRPC Config". + */ + if (serviceConfig === null) { + // Step 4 and 5 + if (serviceConfigError === null) { + // Step 5 + this.previousServiceConfig = null; + workingServiceConfig = this.defaultServiceConfig; + } + else { + // Step 4 + if (this.previousServiceConfig === null) { + // Step 4.ii + this.handleResolutionFailure(serviceConfigError); + } + else { + // Step 4.i + workingServiceConfig = this.previousServiceConfig; + } + } + } + else { + // Step 3 + workingServiceConfig = serviceConfig; + this.previousServiceConfig = serviceConfig; + } + const workingConfigList = (_a = workingServiceConfig === null || workingServiceConfig === void 0 ? void 0 : workingServiceConfig.loadBalancingConfig) !== null && _a !== void 0 ? _a : []; + const loadBalancingConfig = (0, load_balancer_1.selectLbConfigFromList)(workingConfigList, true); + if (loadBalancingConfig === null) { + // There were load balancing configs but none are supported. This counts as a resolution failure + this.handleResolutionFailure({ + code: constants_1.Status.UNAVAILABLE, + details: 'All load balancer options in service config are not compatible', + metadata: new metadata_1.Metadata(), + }); + return; + } + this.childLoadBalancer.updateAddressList(endpointList, loadBalancingConfig, attributes); + const finalServiceConfig = workingServiceConfig !== null && workingServiceConfig !== void 0 ? workingServiceConfig : this.defaultServiceConfig; + this.onSuccessfulResolution(finalServiceConfig, configSelector !== null && configSelector !== void 0 ? configSelector : getDefaultConfigSelector(finalServiceConfig)); + }, + onError: (error) => { + this.handleResolutionFailure(error); + }, + }, channelOptions); + const backoffOptions = { + initialDelay: channelOptions['grpc.initial_reconnect_backoff_ms'], + maxDelay: channelOptions['grpc.max_reconnect_backoff_ms'], + }; + this.backoffTimeout = new backoff_timeout_1.BackoffTimeout(() => { + if (this.continueResolving) { + this.updateResolution(); + this.continueResolving = false; + } + else { + this.updateState(this.latestChildState, this.latestChildPicker); + } + }, backoffOptions); + this.backoffTimeout.unref(); + } + updateResolution() { + this.innerResolver.updateResolution(); + if (this.currentState === connectivity_state_1.ConnectivityState.IDLE) { + /* this.latestChildPicker is initialized as new QueuePicker(this), which + * is an appropriate value here if the child LB policy is unset. + * Otherwise, we want to delegate to the child here, in case that + * triggers something. */ + this.updateState(connectivity_state_1.ConnectivityState.CONNECTING, this.latestChildPicker); + } + this.backoffTimeout.runOnce(); + } + updateState(connectivityState, picker) { + trace((0, uri_parser_1.uriToString)(this.target) + + ' ' + + connectivity_state_1.ConnectivityState[this.currentState] + + ' -> ' + + connectivity_state_1.ConnectivityState[connectivityState]); + // Ensure that this.exitIdle() is called by the picker + if (connectivityState === connectivity_state_1.ConnectivityState.IDLE) { + picker = new picker_1.QueuePicker(this, picker); + } + this.currentState = connectivityState; + this.channelControlHelper.updateState(connectivityState, picker); + } + handleResolutionFailure(error) { + if (this.latestChildState === connectivity_state_1.ConnectivityState.IDLE) { + this.updateState(connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE, new picker_1.UnavailablePicker(error)); + this.onFailedResolution(error); + } + } + exitIdle() { + if (this.currentState === connectivity_state_1.ConnectivityState.IDLE || + this.currentState === connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE) { + if (this.backoffTimeout.isRunning()) { + this.continueResolving = true; + } + else { + this.updateResolution(); + } + } + this.childLoadBalancer.exitIdle(); + } + updateAddressList(endpointList, lbConfig) { + throw new Error('updateAddressList not supported on ResolvingLoadBalancer'); + } + resetBackoff() { + this.backoffTimeout.reset(); + this.childLoadBalancer.resetBackoff(); + } + destroy() { + this.childLoadBalancer.destroy(); + this.innerResolver.destroy(); + this.backoffTimeout.reset(); + this.backoffTimeout.stop(); + this.latestChildState = connectivity_state_1.ConnectivityState.IDLE; + this.latestChildPicker = new picker_1.QueuePicker(this); + this.currentState = connectivity_state_1.ConnectivityState.IDLE; + this.previousServiceConfig = null; + this.continueResolving = false; + } + getTypeName() { + return 'resolving_load_balancer'; + } +} +exports.ResolvingLoadBalancer = ResolvingLoadBalancer; +//# sourceMappingURL=resolving-load-balancer.js.map + +/***/ }), + +/***/ 48159: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2022 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RetryingCall = exports.MessageBufferTracker = exports.RetryThrottler = void 0; +const constants_1 = __nccwpck_require__(90634); +const deadline_1 = __nccwpck_require__(511); +const metadata_1 = __nccwpck_require__(83665); +const logging = __nccwpck_require__(35993); +const TRACER_NAME = 'retrying_call'; +class RetryThrottler { + constructor(maxTokens, tokenRatio, previousRetryThrottler) { + this.maxTokens = maxTokens; + this.tokenRatio = tokenRatio; + if (previousRetryThrottler) { + /* When carrying over tokens from a previous config, rescale them to the + * new max value */ + this.tokens = + previousRetryThrottler.tokens * + (maxTokens / previousRetryThrottler.maxTokens); + } + else { + this.tokens = maxTokens; + } + } + addCallSucceeded() { + this.tokens = Math.max(this.tokens + this.tokenRatio, this.maxTokens); + } + addCallFailed() { + this.tokens = Math.min(this.tokens - 1, 0); + } + canRetryCall() { + return this.tokens > this.maxTokens / 2; + } +} +exports.RetryThrottler = RetryThrottler; +class MessageBufferTracker { + constructor(totalLimit, limitPerCall) { + this.totalLimit = totalLimit; + this.limitPerCall = limitPerCall; + this.totalAllocated = 0; + this.allocatedPerCall = new Map(); + } + allocate(size, callId) { + var _a; + const currentPerCall = (_a = this.allocatedPerCall.get(callId)) !== null && _a !== void 0 ? _a : 0; + if (this.limitPerCall - currentPerCall < size || + this.totalLimit - this.totalAllocated < size) { + return false; + } + this.allocatedPerCall.set(callId, currentPerCall + size); + this.totalAllocated += size; + return true; + } + free(size, callId) { + var _a; + if (this.totalAllocated < size) { + throw new Error(`Invalid buffer allocation state: call ${callId} freed ${size} > total allocated ${this.totalAllocated}`); + } + this.totalAllocated -= size; + const currentPerCall = (_a = this.allocatedPerCall.get(callId)) !== null && _a !== void 0 ? _a : 0; + if (currentPerCall < size) { + throw new Error(`Invalid buffer allocation state: call ${callId} freed ${size} > allocated for call ${currentPerCall}`); + } + this.allocatedPerCall.set(callId, currentPerCall - size); + } + freeAll(callId) { + var _a; + const currentPerCall = (_a = this.allocatedPerCall.get(callId)) !== null && _a !== void 0 ? _a : 0; + if (this.totalAllocated < currentPerCall) { + throw new Error(`Invalid buffer allocation state: call ${callId} allocated ${currentPerCall} > total allocated ${this.totalAllocated}`); + } + this.totalAllocated -= currentPerCall; + this.allocatedPerCall.delete(callId); + } +} +exports.MessageBufferTracker = MessageBufferTracker; +const PREVIONS_RPC_ATTEMPTS_METADATA_KEY = 'grpc-previous-rpc-attempts'; +const DEFAULT_MAX_ATTEMPTS_LIMIT = 5; +class RetryingCall { + constructor(channel, callConfig, methodName, host, credentials, deadline, callNumber, bufferTracker, retryThrottler) { + var _a; + this.channel = channel; + this.callConfig = callConfig; + this.methodName = methodName; + this.host = host; + this.credentials = credentials; + this.deadline = deadline; + this.callNumber = callNumber; + this.bufferTracker = bufferTracker; + this.retryThrottler = retryThrottler; + this.listener = null; + this.initialMetadata = null; + this.underlyingCalls = []; + this.writeBuffer = []; + /** + * The offset of message indices in the writeBuffer. For example, if + * writeBufferOffset is 10, message 10 is in writeBuffer[0] and message 15 + * is in writeBuffer[5]. + */ + this.writeBufferOffset = 0; + /** + * Tracks whether a read has been started, so that we know whether to start + * reads on new child calls. This only matters for the first read, because + * once a message comes in the child call becomes committed and there will + * be no new child calls. + */ + this.readStarted = false; + this.transparentRetryUsed = false; + /** + * Number of attempts so far + */ + this.attempts = 0; + this.hedgingTimer = null; + this.committedCallIndex = null; + this.initialRetryBackoffSec = 0; + this.nextRetryBackoffSec = 0; + const maxAttemptsLimit = (_a = channel.getOptions()['grpc-node.retry_max_attempts_limit']) !== null && _a !== void 0 ? _a : DEFAULT_MAX_ATTEMPTS_LIMIT; + if (callConfig.methodConfig.retryPolicy) { + this.state = 'RETRY'; + const retryPolicy = callConfig.methodConfig.retryPolicy; + this.nextRetryBackoffSec = this.initialRetryBackoffSec = Number(retryPolicy.initialBackoff.substring(0, retryPolicy.initialBackoff.length - 1)); + this.maxAttempts = Math.min(retryPolicy.maxAttempts, maxAttemptsLimit); + } + else if (callConfig.methodConfig.hedgingPolicy) { + this.state = 'HEDGING'; + this.maxAttempts = Math.min(callConfig.methodConfig.hedgingPolicy.maxAttempts, maxAttemptsLimit); + } + else { + this.state = 'TRANSPARENT_ONLY'; + this.maxAttempts = 1; + } + this.startTime = new Date(); + } + getDeadlineInfo() { + if (this.underlyingCalls.length === 0) { + return []; + } + const deadlineInfo = []; + const latestCall = this.underlyingCalls[this.underlyingCalls.length - 1]; + if (this.underlyingCalls.length > 1) { + deadlineInfo.push(`previous attempts: ${this.underlyingCalls.length - 1}`); + } + if (latestCall.startTime > this.startTime) { + deadlineInfo.push(`time to current attempt start: ${(0, deadline_1.formatDateDifference)(this.startTime, latestCall.startTime)}`); + } + deadlineInfo.push(...latestCall.call.getDeadlineInfo()); + return deadlineInfo; + } + getCallNumber() { + return this.callNumber; + } + trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, '[' + this.callNumber + '] ' + text); + } + reportStatus(statusObject) { + this.trace('ended with status: code=' + + statusObject.code + + ' details="' + + statusObject.details + + '" start time=' + + this.startTime.toISOString()); + this.bufferTracker.freeAll(this.callNumber); + this.writeBufferOffset = this.writeBufferOffset + this.writeBuffer.length; + this.writeBuffer = []; + process.nextTick(() => { + var _a; + // Explicitly construct status object to remove progress field + (_a = this.listener) === null || _a === void 0 ? void 0 : _a.onReceiveStatus({ + code: statusObject.code, + details: statusObject.details, + metadata: statusObject.metadata, + }); + }); + } + cancelWithStatus(status, details) { + this.trace('cancelWithStatus code: ' + status + ' details: "' + details + '"'); + this.reportStatus({ code: status, details, metadata: new metadata_1.Metadata() }); + for (const { call } of this.underlyingCalls) { + call.cancelWithStatus(status, details); + } + } + getPeer() { + if (this.committedCallIndex !== null) { + return this.underlyingCalls[this.committedCallIndex].call.getPeer(); + } + else { + return 'unknown'; + } + } + getBufferEntry(messageIndex) { + var _a; + return ((_a = this.writeBuffer[messageIndex - this.writeBufferOffset]) !== null && _a !== void 0 ? _a : { + entryType: 'FREED', + allocated: false, + }); + } + getNextBufferIndex() { + return this.writeBufferOffset + this.writeBuffer.length; + } + clearSentMessages() { + if (this.state !== 'COMMITTED') { + return; + } + const earliestNeededMessageIndex = this.underlyingCalls[this.committedCallIndex].nextMessageToSend; + for (let messageIndex = this.writeBufferOffset; messageIndex < earliestNeededMessageIndex; messageIndex++) { + const bufferEntry = this.getBufferEntry(messageIndex); + if (bufferEntry.allocated) { + this.bufferTracker.free(bufferEntry.message.message.length, this.callNumber); + } + } + this.writeBuffer = this.writeBuffer.slice(earliestNeededMessageIndex - this.writeBufferOffset); + this.writeBufferOffset = earliestNeededMessageIndex; + } + commitCall(index) { + if (this.state === 'COMMITTED') { + return; + } + if (this.underlyingCalls[index].state === 'COMPLETED') { + return; + } + this.trace('Committing call [' + + this.underlyingCalls[index].call.getCallNumber() + + '] at index ' + + index); + this.state = 'COMMITTED'; + this.committedCallIndex = index; + for (let i = 0; i < this.underlyingCalls.length; i++) { + if (i === index) { + continue; + } + if (this.underlyingCalls[i].state === 'COMPLETED') { + continue; + } + this.underlyingCalls[i].state = 'COMPLETED'; + this.underlyingCalls[i].call.cancelWithStatus(constants_1.Status.CANCELLED, 'Discarded in favor of other hedged attempt'); + } + this.clearSentMessages(); + } + commitCallWithMostMessages() { + if (this.state === 'COMMITTED') { + return; + } + let mostMessages = -1; + let callWithMostMessages = -1; + for (const [index, childCall] of this.underlyingCalls.entries()) { + if (childCall.state === 'ACTIVE' && + childCall.nextMessageToSend > mostMessages) { + mostMessages = childCall.nextMessageToSend; + callWithMostMessages = index; + } + } + if (callWithMostMessages === -1) { + /* There are no active calls, disable retries to force the next call that + * is started to be committed. */ + this.state = 'TRANSPARENT_ONLY'; + } + else { + this.commitCall(callWithMostMessages); + } + } + isStatusCodeInList(list, code) { + return list.some(value => { + var _a; + return value === code || + value.toString().toLowerCase() === ((_a = constants_1.Status[code]) === null || _a === void 0 ? void 0 : _a.toLowerCase()); + }); + } + getNextRetryBackoffMs() { + var _a; + const retryPolicy = (_a = this.callConfig) === null || _a === void 0 ? void 0 : _a.methodConfig.retryPolicy; + if (!retryPolicy) { + return 0; + } + const nextBackoffMs = Math.random() * this.nextRetryBackoffSec * 1000; + const maxBackoffSec = Number(retryPolicy.maxBackoff.substring(0, retryPolicy.maxBackoff.length - 1)); + this.nextRetryBackoffSec = Math.min(this.nextRetryBackoffSec * retryPolicy.backoffMultiplier, maxBackoffSec); + return nextBackoffMs; + } + maybeRetryCall(pushback, callback) { + if (this.state !== 'RETRY') { + callback(false); + return; + } + if (this.attempts >= this.maxAttempts) { + callback(false); + return; + } + let retryDelayMs; + if (pushback === null) { + retryDelayMs = this.getNextRetryBackoffMs(); + } + else if (pushback < 0) { + this.state = 'TRANSPARENT_ONLY'; + callback(false); + return; + } + else { + retryDelayMs = pushback; + this.nextRetryBackoffSec = this.initialRetryBackoffSec; + } + setTimeout(() => { + var _a, _b; + if (this.state !== 'RETRY') { + callback(false); + return; + } + if ((_b = (_a = this.retryThrottler) === null || _a === void 0 ? void 0 : _a.canRetryCall()) !== null && _b !== void 0 ? _b : true) { + callback(true); + this.attempts += 1; + this.startNewAttempt(); + } + }, retryDelayMs); + } + countActiveCalls() { + let count = 0; + for (const call of this.underlyingCalls) { + if ((call === null || call === void 0 ? void 0 : call.state) === 'ACTIVE') { + count += 1; + } + } + return count; + } + handleProcessedStatus(status, callIndex, pushback) { + var _a, _b, _c; + switch (this.state) { + case 'COMMITTED': + case 'TRANSPARENT_ONLY': + this.commitCall(callIndex); + this.reportStatus(status); + break; + case 'HEDGING': + if (this.isStatusCodeInList((_a = this.callConfig.methodConfig.hedgingPolicy.nonFatalStatusCodes) !== null && _a !== void 0 ? _a : [], status.code)) { + (_b = this.retryThrottler) === null || _b === void 0 ? void 0 : _b.addCallFailed(); + let delayMs; + if (pushback === null) { + delayMs = 0; + } + else if (pushback < 0) { + this.state = 'TRANSPARENT_ONLY'; + this.commitCall(callIndex); + this.reportStatus(status); + return; + } + else { + delayMs = pushback; + } + setTimeout(() => { + this.maybeStartHedgingAttempt(); + // If after trying to start a call there are no active calls, this was the last one + if (this.countActiveCalls() === 0) { + this.commitCall(callIndex); + this.reportStatus(status); + } + }, delayMs); + } + else { + this.commitCall(callIndex); + this.reportStatus(status); + } + break; + case 'RETRY': + if (this.isStatusCodeInList(this.callConfig.methodConfig.retryPolicy.retryableStatusCodes, status.code)) { + (_c = this.retryThrottler) === null || _c === void 0 ? void 0 : _c.addCallFailed(); + this.maybeRetryCall(pushback, retried => { + if (!retried) { + this.commitCall(callIndex); + this.reportStatus(status); + } + }); + } + else { + this.commitCall(callIndex); + this.reportStatus(status); + } + break; + } + } + getPushback(metadata) { + const mdValue = metadata.get('grpc-retry-pushback-ms'); + if (mdValue.length === 0) { + return null; + } + try { + return parseInt(mdValue[0]); + } + catch (e) { + return -1; + } + } + handleChildStatus(status, callIndex) { + var _a; + if (this.underlyingCalls[callIndex].state === 'COMPLETED') { + return; + } + this.trace('state=' + + this.state + + ' handling status with progress ' + + status.progress + + ' from child [' + + this.underlyingCalls[callIndex].call.getCallNumber() + + '] in state ' + + this.underlyingCalls[callIndex].state); + this.underlyingCalls[callIndex].state = 'COMPLETED'; + if (status.code === constants_1.Status.OK) { + (_a = this.retryThrottler) === null || _a === void 0 ? void 0 : _a.addCallSucceeded(); + this.commitCall(callIndex); + this.reportStatus(status); + return; + } + if (this.state === 'COMMITTED') { + this.reportStatus(status); + return; + } + const pushback = this.getPushback(status.metadata); + switch (status.progress) { + case 'NOT_STARTED': + // RPC never leaves the client, always safe to retry + this.startNewAttempt(); + break; + case 'REFUSED': + // RPC reaches the server library, but not the server application logic + if (this.transparentRetryUsed) { + this.handleProcessedStatus(status, callIndex, pushback); + } + else { + this.transparentRetryUsed = true; + this.startNewAttempt(); + } + break; + case 'DROP': + this.commitCall(callIndex); + this.reportStatus(status); + break; + case 'PROCESSED': + this.handleProcessedStatus(status, callIndex, pushback); + break; + } + } + maybeStartHedgingAttempt() { + if (this.state !== 'HEDGING') { + return; + } + if (!this.callConfig.methodConfig.hedgingPolicy) { + return; + } + if (this.attempts >= this.maxAttempts) { + return; + } + this.attempts += 1; + this.startNewAttempt(); + this.maybeStartHedgingTimer(); + } + maybeStartHedgingTimer() { + var _a, _b, _c; + if (this.hedgingTimer) { + clearTimeout(this.hedgingTimer); + } + if (this.state !== 'HEDGING') { + return; + } + if (!this.callConfig.methodConfig.hedgingPolicy) { + return; + } + const hedgingPolicy = this.callConfig.methodConfig.hedgingPolicy; + if (this.attempts >= this.maxAttempts) { + return; + } + const hedgingDelayString = (_a = hedgingPolicy.hedgingDelay) !== null && _a !== void 0 ? _a : '0s'; + const hedgingDelaySec = Number(hedgingDelayString.substring(0, hedgingDelayString.length - 1)); + this.hedgingTimer = setTimeout(() => { + this.maybeStartHedgingAttempt(); + }, hedgingDelaySec * 1000); + (_c = (_b = this.hedgingTimer).unref) === null || _c === void 0 ? void 0 : _c.call(_b); + } + startNewAttempt() { + const child = this.channel.createLoadBalancingCall(this.callConfig, this.methodName, this.host, this.credentials, this.deadline); + this.trace('Created child call [' + + child.getCallNumber() + + '] for attempt ' + + this.attempts); + const index = this.underlyingCalls.length; + this.underlyingCalls.push({ + state: 'ACTIVE', + call: child, + nextMessageToSend: 0, + startTime: new Date() + }); + const previousAttempts = this.attempts - 1; + const initialMetadata = this.initialMetadata.clone(); + if (previousAttempts > 0) { + initialMetadata.set(PREVIONS_RPC_ATTEMPTS_METADATA_KEY, `${previousAttempts}`); + } + let receivedMetadata = false; + child.start(initialMetadata, { + onReceiveMetadata: metadata => { + this.trace('Received metadata from child [' + child.getCallNumber() + ']'); + this.commitCall(index); + receivedMetadata = true; + if (previousAttempts > 0) { + metadata.set(PREVIONS_RPC_ATTEMPTS_METADATA_KEY, `${previousAttempts}`); + } + if (this.underlyingCalls[index].state === 'ACTIVE') { + this.listener.onReceiveMetadata(metadata); + } + }, + onReceiveMessage: message => { + this.trace('Received message from child [' + child.getCallNumber() + ']'); + this.commitCall(index); + if (this.underlyingCalls[index].state === 'ACTIVE') { + this.listener.onReceiveMessage(message); + } + }, + onReceiveStatus: status => { + this.trace('Received status from child [' + child.getCallNumber() + ']'); + if (!receivedMetadata && previousAttempts > 0) { + status.metadata.set(PREVIONS_RPC_ATTEMPTS_METADATA_KEY, `${previousAttempts}`); + } + this.handleChildStatus(status, index); + }, + }); + this.sendNextChildMessage(index); + if (this.readStarted) { + child.startRead(); + } + } + start(metadata, listener) { + this.trace('start called'); + this.listener = listener; + this.initialMetadata = metadata; + this.attempts += 1; + this.startNewAttempt(); + this.maybeStartHedgingTimer(); + } + handleChildWriteCompleted(childIndex) { + var _a, _b; + const childCall = this.underlyingCalls[childIndex]; + const messageIndex = childCall.nextMessageToSend; + (_b = (_a = this.getBufferEntry(messageIndex)).callback) === null || _b === void 0 ? void 0 : _b.call(_a); + this.clearSentMessages(); + childCall.nextMessageToSend += 1; + this.sendNextChildMessage(childIndex); + } + sendNextChildMessage(childIndex) { + const childCall = this.underlyingCalls[childIndex]; + if (childCall.state === 'COMPLETED') { + return; + } + if (this.getBufferEntry(childCall.nextMessageToSend)) { + const bufferEntry = this.getBufferEntry(childCall.nextMessageToSend); + switch (bufferEntry.entryType) { + case 'MESSAGE': + childCall.call.sendMessageWithContext({ + callback: error => { + // Ignore error + this.handleChildWriteCompleted(childIndex); + }, + }, bufferEntry.message.message); + break; + case 'HALF_CLOSE': + childCall.nextMessageToSend += 1; + childCall.call.halfClose(); + break; + case 'FREED': + // Should not be possible + break; + } + } + } + sendMessageWithContext(context, message) { + var _a; + this.trace('write() called with message of length ' + message.length); + const writeObj = { + message, + flags: context.flags, + }; + const messageIndex = this.getNextBufferIndex(); + const bufferEntry = { + entryType: 'MESSAGE', + message: writeObj, + allocated: this.bufferTracker.allocate(message.length, this.callNumber), + }; + this.writeBuffer.push(bufferEntry); + if (bufferEntry.allocated) { + (_a = context.callback) === null || _a === void 0 ? void 0 : _a.call(context); + for (const [callIndex, call] of this.underlyingCalls.entries()) { + if (call.state === 'ACTIVE' && + call.nextMessageToSend === messageIndex) { + call.call.sendMessageWithContext({ + callback: error => { + // Ignore error + this.handleChildWriteCompleted(callIndex); + }, + }, message); + } + } + } + else { + this.commitCallWithMostMessages(); + // commitCallWithMostMessages can fail if we are between ping attempts + if (this.committedCallIndex === null) { + return; + } + const call = this.underlyingCalls[this.committedCallIndex]; + bufferEntry.callback = context.callback; + if (call.state === 'ACTIVE' && call.nextMessageToSend === messageIndex) { + call.call.sendMessageWithContext({ + callback: error => { + // Ignore error + this.handleChildWriteCompleted(this.committedCallIndex); + }, + }, message); + } + } + } + startRead() { + this.trace('startRead called'); + this.readStarted = true; + for (const underlyingCall of this.underlyingCalls) { + if ((underlyingCall === null || underlyingCall === void 0 ? void 0 : underlyingCall.state) === 'ACTIVE') { + underlyingCall.call.startRead(); + } + } + } + halfClose() { + this.trace('halfClose called'); + const halfCloseIndex = this.getNextBufferIndex(); + this.writeBuffer.push({ + entryType: 'HALF_CLOSE', + allocated: false, + }); + for (const call of this.underlyingCalls) { + if ((call === null || call === void 0 ? void 0 : call.state) === 'ACTIVE' && + call.nextMessageToSend === halfCloseIndex) { + call.nextMessageToSend += 1; + call.call.halfClose(); + } + } + } + setCredentials(newCredentials) { + throw new Error('Method not implemented.'); + } + getMethod() { + return this.methodName; + } + getHost() { + return this.host; + } +} +exports.RetryingCall = RetryingCall; +//# sourceMappingURL=retrying-call.js.map + +/***/ }), + +/***/ 62533: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ServerDuplexStreamImpl = exports.ServerWritableStreamImpl = exports.ServerReadableStreamImpl = exports.ServerUnaryCallImpl = exports.serverErrorToStatus = void 0; +const events_1 = __nccwpck_require__(82361); +const stream_1 = __nccwpck_require__(12781); +const constants_1 = __nccwpck_require__(90634); +const metadata_1 = __nccwpck_require__(83665); +function serverErrorToStatus(error, overrideTrailers) { + var _a; + const status = { + code: constants_1.Status.UNKNOWN, + details: 'message' in error ? error.message : 'Unknown Error', + metadata: (_a = overrideTrailers !== null && overrideTrailers !== void 0 ? overrideTrailers : error.metadata) !== null && _a !== void 0 ? _a : null, + }; + if ('code' in error && + typeof error.code === 'number' && + Number.isInteger(error.code)) { + status.code = error.code; + if ('details' in error && typeof error.details === 'string') { + status.details = error.details; + } + } + return status; +} +exports.serverErrorToStatus = serverErrorToStatus; +class ServerUnaryCallImpl extends events_1.EventEmitter { + constructor(path, call, metadata, request) { + super(); + this.path = path; + this.call = call; + this.metadata = metadata; + this.request = request; + this.cancelled = false; + } + getPeer() { + return this.call.getPeer(); + } + sendMetadata(responseMetadata) { + this.call.sendMetadata(responseMetadata); + } + getDeadline() { + return this.call.getDeadline(); + } + getPath() { + return this.path; + } + getHost() { + return this.call.getHost(); + } +} +exports.ServerUnaryCallImpl = ServerUnaryCallImpl; +class ServerReadableStreamImpl extends stream_1.Readable { + constructor(path, call, metadata) { + super({ objectMode: true }); + this.path = path; + this.call = call; + this.metadata = metadata; + this.cancelled = false; + } + _read(size) { + this.call.startRead(); + } + getPeer() { + return this.call.getPeer(); + } + sendMetadata(responseMetadata) { + this.call.sendMetadata(responseMetadata); + } + getDeadline() { + return this.call.getDeadline(); + } + getPath() { + return this.path; + } + getHost() { + return this.call.getHost(); + } +} +exports.ServerReadableStreamImpl = ServerReadableStreamImpl; +class ServerWritableStreamImpl extends stream_1.Writable { + constructor(path, call, metadata, request) { + super({ objectMode: true }); + this.path = path; + this.call = call; + this.metadata = metadata; + this.request = request; + this.pendingStatus = { + code: constants_1.Status.OK, + details: 'OK', + }; + this.cancelled = false; + this.trailingMetadata = new metadata_1.Metadata(); + this.on('error', err => { + this.pendingStatus = serverErrorToStatus(err); + this.end(); + }); + } + getPeer() { + return this.call.getPeer(); + } + sendMetadata(responseMetadata) { + this.call.sendMetadata(responseMetadata); + } + getDeadline() { + return this.call.getDeadline(); + } + getPath() { + return this.path; + } + getHost() { + return this.call.getHost(); + } + _write(chunk, encoding, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback) { + this.call.sendMessage(chunk, callback); + } + _final(callback) { + var _a; + callback(null); + this.call.sendStatus(Object.assign(Object.assign({}, this.pendingStatus), { metadata: (_a = this.pendingStatus.metadata) !== null && _a !== void 0 ? _a : this.trailingMetadata })); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + end(metadata) { + if (metadata) { + this.trailingMetadata = metadata; + } + return super.end(); + } +} +exports.ServerWritableStreamImpl = ServerWritableStreamImpl; +class ServerDuplexStreamImpl extends stream_1.Duplex { + constructor(path, call, metadata) { + super({ objectMode: true }); + this.path = path; + this.call = call; + this.metadata = metadata; + this.pendingStatus = { + code: constants_1.Status.OK, + details: 'OK', + }; + this.cancelled = false; + this.trailingMetadata = new metadata_1.Metadata(); + this.on('error', err => { + this.pendingStatus = serverErrorToStatus(err); + this.end(); + }); + } + getPeer() { + return this.call.getPeer(); + } + sendMetadata(responseMetadata) { + this.call.sendMetadata(responseMetadata); + } + getDeadline() { + return this.call.getDeadline(); + } + getPath() { + return this.path; + } + getHost() { + return this.call.getHost(); + } + _read(size) { + this.call.startRead(); + } + _write(chunk, encoding, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback) { + this.call.sendMessage(chunk, callback); + } + _final(callback) { + var _a; + callback(null); + this.call.sendStatus(Object.assign(Object.assign({}, this.pendingStatus), { metadata: (_a = this.pendingStatus.metadata) !== null && _a !== void 0 ? _a : this.trailingMetadata })); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + end(metadata) { + if (metadata) { + this.trailingMetadata = metadata; + } + return super.end(); + } +} +exports.ServerDuplexStreamImpl = ServerDuplexStreamImpl; +//# sourceMappingURL=server-call.js.map + +/***/ }), + +/***/ 63828: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createServerCredentialsWithInterceptors = exports.createCertificateProviderServerCredentials = exports.ServerCredentials = void 0; +const tls_helpers_1 = __nccwpck_require__(86581); +class ServerCredentials { + constructor() { + this.watchers = new Set(); + this.latestContextOptions = null; + } + _addWatcher(watcher) { + this.watchers.add(watcher); + } + _removeWatcher(watcher) { + this.watchers.delete(watcher); + } + getWatcherCount() { + return this.watchers.size; + } + updateSecureContextOptions(options) { + this.latestContextOptions = options; + for (const watcher of this.watchers) { + watcher(this.latestContextOptions); + } + } + _getSettings() { + return this.latestContextOptions; + } + _getInterceptors() { + return []; + } + static createInsecure() { + return new InsecureServerCredentials(); + } + static createSsl(rootCerts, keyCertPairs, checkClientCertificate = false) { + var _a; + if (rootCerts !== null && !Buffer.isBuffer(rootCerts)) { + throw new TypeError('rootCerts must be null or a Buffer'); + } + if (!Array.isArray(keyCertPairs)) { + throw new TypeError('keyCertPairs must be an array'); + } + if (typeof checkClientCertificate !== 'boolean') { + throw new TypeError('checkClientCertificate must be a boolean'); + } + const cert = []; + const key = []; + for (let i = 0; i < keyCertPairs.length; i++) { + const pair = keyCertPairs[i]; + if (pair === null || typeof pair !== 'object') { + throw new TypeError(`keyCertPair[${i}] must be an object`); + } + if (!Buffer.isBuffer(pair.private_key)) { + throw new TypeError(`keyCertPair[${i}].private_key must be a Buffer`); + } + if (!Buffer.isBuffer(pair.cert_chain)) { + throw new TypeError(`keyCertPair[${i}].cert_chain must be a Buffer`); + } + cert.push(pair.cert_chain); + key.push(pair.private_key); + } + return new SecureServerCredentials({ + ca: (_a = rootCerts !== null && rootCerts !== void 0 ? rootCerts : (0, tls_helpers_1.getDefaultRootsData)()) !== null && _a !== void 0 ? _a : undefined, + cert, + key, + requestCert: checkClientCertificate, + ciphers: tls_helpers_1.CIPHER_SUITES, + }); + } +} +exports.ServerCredentials = ServerCredentials; +class InsecureServerCredentials extends ServerCredentials { + _isSecure() { + return false; + } + _getSettings() { + return null; + } + _equals(other) { + return other instanceof InsecureServerCredentials; + } +} +class SecureServerCredentials extends ServerCredentials { + constructor(options) { + super(); + this.options = options; + } + _isSecure() { + return true; + } + _getSettings() { + return this.options; + } + /** + * Checks equality by checking the options that are actually set by + * createSsl. + * @param other + * @returns + */ + _equals(other) { + if (this === other) { + return true; + } + if (!(other instanceof SecureServerCredentials)) { + return false; + } + // options.ca equality check + if (Buffer.isBuffer(this.options.ca) && Buffer.isBuffer(other.options.ca)) { + if (!this.options.ca.equals(other.options.ca)) { + return false; + } + } + else { + if (this.options.ca !== other.options.ca) { + return false; + } + } + // options.cert equality check + if (Array.isArray(this.options.cert) && Array.isArray(other.options.cert)) { + if (this.options.cert.length !== other.options.cert.length) { + return false; + } + for (let i = 0; i < this.options.cert.length; i++) { + const thisCert = this.options.cert[i]; + const otherCert = other.options.cert[i]; + if (Buffer.isBuffer(thisCert) && Buffer.isBuffer(otherCert)) { + if (!thisCert.equals(otherCert)) { + return false; + } + } + else { + if (thisCert !== otherCert) { + return false; + } + } + } + } + else { + if (this.options.cert !== other.options.cert) { + return false; + } + } + // options.key equality check + if (Array.isArray(this.options.key) && Array.isArray(other.options.key)) { + if (this.options.key.length !== other.options.key.length) { + return false; + } + for (let i = 0; i < this.options.key.length; i++) { + const thisKey = this.options.key[i]; + const otherKey = other.options.key[i]; + if (Buffer.isBuffer(thisKey) && Buffer.isBuffer(otherKey)) { + if (!thisKey.equals(otherKey)) { + return false; + } + } + else { + if (thisKey !== otherKey) { + return false; + } + } + } + } + else { + if (this.options.key !== other.options.key) { + return false; + } + } + // options.requestCert equality check + if (this.options.requestCert !== other.options.requestCert) { + return false; + } + /* ciphers is derived from a value that is constant for the process, so no + * equality check is needed. */ + return true; + } +} +class CertificateProviderServerCredentials extends ServerCredentials { + constructor(identityCertificateProvider, caCertificateProvider, requireClientCertificate) { + super(); + this.identityCertificateProvider = identityCertificateProvider; + this.caCertificateProvider = caCertificateProvider; + this.requireClientCertificate = requireClientCertificate; + this.latestCaUpdate = null; + this.latestIdentityUpdate = null; + this.caCertificateUpdateListener = this.handleCaCertificateUpdate.bind(this); + this.identityCertificateUpdateListener = this.handleIdentityCertitificateUpdate.bind(this); + } + _addWatcher(watcher) { + var _a; + if (this.getWatcherCount() === 0) { + (_a = this.caCertificateProvider) === null || _a === void 0 ? void 0 : _a.addCaCertificateListener(this.caCertificateUpdateListener); + this.identityCertificateProvider.addIdentityCertificateListener(this.identityCertificateUpdateListener); + } + super._addWatcher(watcher); + } + _removeWatcher(watcher) { + var _a; + super._removeWatcher(watcher); + if (this.getWatcherCount() === 0) { + (_a = this.caCertificateProvider) === null || _a === void 0 ? void 0 : _a.removeCaCertificateListener(this.caCertificateUpdateListener); + this.identityCertificateProvider.removeIdentityCertificateListener(this.identityCertificateUpdateListener); + } + } + _isSecure() { + return true; + } + _equals(other) { + if (this === other) { + return true; + } + if (!(other instanceof CertificateProviderServerCredentials)) { + return false; + } + return (this.caCertificateProvider === other.caCertificateProvider && + this.identityCertificateProvider === other.identityCertificateProvider && + this.requireClientCertificate === other.requireClientCertificate); + } + calculateSecureContextOptions() { + var _a; + if (this.latestIdentityUpdate === null) { + return null; + } + if (this.caCertificateProvider !== null && this.latestCaUpdate === null) { + return null; + } + return { + ca: (_a = this.latestCaUpdate) === null || _a === void 0 ? void 0 : _a.caCertificate, + cert: this.latestIdentityUpdate.certificate, + key: this.latestIdentityUpdate.privateKey, + requestCert: this.latestIdentityUpdate !== null, + rejectUnauthorized: this.requireClientCertificate + }; + } + finalizeUpdate() { + this.updateSecureContextOptions(this.calculateSecureContextOptions()); + } + handleCaCertificateUpdate(update) { + this.latestCaUpdate = update; + this.finalizeUpdate(); + } + handleIdentityCertitificateUpdate(update) { + this.latestIdentityUpdate = update; + this.finalizeUpdate(); + } +} +function createCertificateProviderServerCredentials(caCertificateProvider, identityCertificateProvider, requireClientCertificate) { + return new CertificateProviderServerCredentials(caCertificateProvider, identityCertificateProvider, requireClientCertificate); +} +exports.createCertificateProviderServerCredentials = createCertificateProviderServerCredentials; +class InterceptorServerCredentials extends ServerCredentials { + constructor(childCredentials, interceptors) { + super(); + this.childCredentials = childCredentials; + this.interceptors = interceptors; + } + _isSecure() { + return this.childCredentials._isSecure(); + } + _equals(other) { + if (!(other instanceof InterceptorServerCredentials)) { + return false; + } + if (!(this.childCredentials._equals(other.childCredentials))) { + return false; + } + if (this.interceptors.length !== other.interceptors.length) { + return false; + } + for (let i = 0; i < this.interceptors.length; i++) { + if (this.interceptors[i] !== other.interceptors[i]) { + return false; + } + } + return true; + } + _getInterceptors() { + return this.interceptors; + } + _addWatcher(watcher) { + this.childCredentials._addWatcher(watcher); + } + _removeWatcher(watcher) { + this.childCredentials._removeWatcher(watcher); + } +} +function createServerCredentialsWithInterceptors(credentials, interceptors) { + return new InterceptorServerCredentials(credentials, interceptors); +} +exports.createServerCredentialsWithInterceptors = createServerCredentialsWithInterceptors; +//# sourceMappingURL=server-credentials.js.map + +/***/ }), + +/***/ 20998: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2024 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getServerInterceptingCall = exports.BaseServerInterceptingCall = exports.ServerInterceptingCall = exports.ResponderBuilder = exports.isInterceptingServerListener = exports.ServerListenerBuilder = void 0; +const metadata_1 = __nccwpck_require__(83665); +const constants_1 = __nccwpck_require__(90634); +const http2 = __nccwpck_require__(85158); +const error_1 = __nccwpck_require__(22336); +const zlib = __nccwpck_require__(59796); +const stream_decoder_1 = __nccwpck_require__(16575); +const logging = __nccwpck_require__(35993); +const TRACER_NAME = 'server_call'; +function trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, text); +} +class ServerListenerBuilder { + constructor() { + this.metadata = undefined; + this.message = undefined; + this.halfClose = undefined; + this.cancel = undefined; + } + withOnReceiveMetadata(onReceiveMetadata) { + this.metadata = onReceiveMetadata; + return this; + } + withOnReceiveMessage(onReceiveMessage) { + this.message = onReceiveMessage; + return this; + } + withOnReceiveHalfClose(onReceiveHalfClose) { + this.halfClose = onReceiveHalfClose; + return this; + } + withOnCancel(onCancel) { + this.cancel = onCancel; + return this; + } + build() { + return { + onReceiveMetadata: this.metadata, + onReceiveMessage: this.message, + onReceiveHalfClose: this.halfClose, + onCancel: this.cancel, + }; + } +} +exports.ServerListenerBuilder = ServerListenerBuilder; +function isInterceptingServerListener(listener) { + return (listener.onReceiveMetadata !== undefined && + listener.onReceiveMetadata.length === 1); +} +exports.isInterceptingServerListener = isInterceptingServerListener; +class InterceptingServerListenerImpl { + constructor(listener, nextListener) { + this.listener = listener; + this.nextListener = nextListener; + /** + * Once the call is cancelled, ignore all other events. + */ + this.cancelled = false; + this.processingMetadata = false; + this.hasPendingMessage = false; + this.pendingMessage = null; + this.processingMessage = false; + this.hasPendingHalfClose = false; + } + processPendingMessage() { + if (this.hasPendingMessage) { + this.nextListener.onReceiveMessage(this.pendingMessage); + this.pendingMessage = null; + this.hasPendingMessage = false; + } + } + processPendingHalfClose() { + if (this.hasPendingHalfClose) { + this.nextListener.onReceiveHalfClose(); + this.hasPendingHalfClose = false; + } + } + onReceiveMetadata(metadata) { + if (this.cancelled) { + return; + } + this.processingMetadata = true; + this.listener.onReceiveMetadata(metadata, interceptedMetadata => { + this.processingMetadata = false; + if (this.cancelled) { + return; + } + this.nextListener.onReceiveMetadata(interceptedMetadata); + this.processPendingMessage(); + this.processPendingHalfClose(); + }); + } + onReceiveMessage(message) { + if (this.cancelled) { + return; + } + this.processingMessage = true; + this.listener.onReceiveMessage(message, msg => { + this.processingMessage = false; + if (this.cancelled) { + return; + } + if (this.processingMetadata) { + this.pendingMessage = msg; + this.hasPendingMessage = true; + } + else { + this.nextListener.onReceiveMessage(msg); + this.processPendingHalfClose(); + } + }); + } + onReceiveHalfClose() { + if (this.cancelled) { + return; + } + this.listener.onReceiveHalfClose(() => { + if (this.cancelled) { + return; + } + if (this.processingMetadata || this.processingMessage) { + this.hasPendingHalfClose = true; + } + else { + this.nextListener.onReceiveHalfClose(); + } + }); + } + onCancel() { + this.cancelled = true; + this.listener.onCancel(); + this.nextListener.onCancel(); + } +} +class ResponderBuilder { + constructor() { + this.start = undefined; + this.metadata = undefined; + this.message = undefined; + this.status = undefined; + } + withStart(start) { + this.start = start; + return this; + } + withSendMetadata(sendMetadata) { + this.metadata = sendMetadata; + return this; + } + withSendMessage(sendMessage) { + this.message = sendMessage; + return this; + } + withSendStatus(sendStatus) { + this.status = sendStatus; + return this; + } + build() { + return { + start: this.start, + sendMetadata: this.metadata, + sendMessage: this.message, + sendStatus: this.status, + }; + } +} +exports.ResponderBuilder = ResponderBuilder; +const defaultServerListener = { + onReceiveMetadata: (metadata, next) => { + next(metadata); + }, + onReceiveMessage: (message, next) => { + next(message); + }, + onReceiveHalfClose: next => { + next(); + }, + onCancel: () => { }, +}; +const defaultResponder = { + start: next => { + next(); + }, + sendMetadata: (metadata, next) => { + next(metadata); + }, + sendMessage: (message, next) => { + next(message); + }, + sendStatus: (status, next) => { + next(status); + }, +}; +class ServerInterceptingCall { + constructor(nextCall, responder) { + var _a, _b, _c, _d; + this.nextCall = nextCall; + this.processingMetadata = false; + this.processingMessage = false; + this.pendingMessage = null; + this.pendingMessageCallback = null; + this.pendingStatus = null; + this.responder = { + start: (_a = responder === null || responder === void 0 ? void 0 : responder.start) !== null && _a !== void 0 ? _a : defaultResponder.start, + sendMetadata: (_b = responder === null || responder === void 0 ? void 0 : responder.sendMetadata) !== null && _b !== void 0 ? _b : defaultResponder.sendMetadata, + sendMessage: (_c = responder === null || responder === void 0 ? void 0 : responder.sendMessage) !== null && _c !== void 0 ? _c : defaultResponder.sendMessage, + sendStatus: (_d = responder === null || responder === void 0 ? void 0 : responder.sendStatus) !== null && _d !== void 0 ? _d : defaultResponder.sendStatus, + }; + } + processPendingMessage() { + if (this.pendingMessageCallback) { + this.nextCall.sendMessage(this.pendingMessage, this.pendingMessageCallback); + this.pendingMessage = null; + this.pendingMessageCallback = null; + } + } + processPendingStatus() { + if (this.pendingStatus) { + this.nextCall.sendStatus(this.pendingStatus); + this.pendingStatus = null; + } + } + start(listener) { + this.responder.start(interceptedListener => { + var _a, _b, _c, _d; + const fullInterceptedListener = { + onReceiveMetadata: (_a = interceptedListener === null || interceptedListener === void 0 ? void 0 : interceptedListener.onReceiveMetadata) !== null && _a !== void 0 ? _a : defaultServerListener.onReceiveMetadata, + onReceiveMessage: (_b = interceptedListener === null || interceptedListener === void 0 ? void 0 : interceptedListener.onReceiveMessage) !== null && _b !== void 0 ? _b : defaultServerListener.onReceiveMessage, + onReceiveHalfClose: (_c = interceptedListener === null || interceptedListener === void 0 ? void 0 : interceptedListener.onReceiveHalfClose) !== null && _c !== void 0 ? _c : defaultServerListener.onReceiveHalfClose, + onCancel: (_d = interceptedListener === null || interceptedListener === void 0 ? void 0 : interceptedListener.onCancel) !== null && _d !== void 0 ? _d : defaultServerListener.onCancel, + }; + const finalInterceptingListener = new InterceptingServerListenerImpl(fullInterceptedListener, listener); + this.nextCall.start(finalInterceptingListener); + }); + } + sendMetadata(metadata) { + this.processingMetadata = true; + this.responder.sendMetadata(metadata, interceptedMetadata => { + this.processingMetadata = false; + this.nextCall.sendMetadata(interceptedMetadata); + this.processPendingMessage(); + this.processPendingStatus(); + }); + } + sendMessage(message, callback) { + this.processingMessage = true; + this.responder.sendMessage(message, interceptedMessage => { + this.processingMessage = false; + if (this.processingMetadata) { + this.pendingMessage = interceptedMessage; + this.pendingMessageCallback = callback; + } + else { + this.nextCall.sendMessage(interceptedMessage, callback); + } + }); + } + sendStatus(status) { + this.responder.sendStatus(status, interceptedStatus => { + if (this.processingMetadata || this.processingMessage) { + this.pendingStatus = interceptedStatus; + } + else { + this.nextCall.sendStatus(interceptedStatus); + } + }); + } + startRead() { + this.nextCall.startRead(); + } + getPeer() { + return this.nextCall.getPeer(); + } + getDeadline() { + return this.nextCall.getDeadline(); + } + getHost() { + return this.nextCall.getHost(); + } +} +exports.ServerInterceptingCall = ServerInterceptingCall; +const GRPC_ACCEPT_ENCODING_HEADER = 'grpc-accept-encoding'; +const GRPC_ENCODING_HEADER = 'grpc-encoding'; +const GRPC_MESSAGE_HEADER = 'grpc-message'; +const GRPC_STATUS_HEADER = 'grpc-status'; +const GRPC_TIMEOUT_HEADER = 'grpc-timeout'; +const DEADLINE_REGEX = /(\d{1,8})\s*([HMSmun])/; +const deadlineUnitsToMs = { + H: 3600000, + M: 60000, + S: 1000, + m: 1, + u: 0.001, + n: 0.000001, +}; +const defaultCompressionHeaders = { + // TODO(cjihrig): Remove these encoding headers from the default response + // once compression is integrated. + [GRPC_ACCEPT_ENCODING_HEADER]: 'identity,deflate,gzip', + [GRPC_ENCODING_HEADER]: 'identity', +}; +const defaultResponseHeaders = { + [http2.constants.HTTP2_HEADER_STATUS]: http2.constants.HTTP_STATUS_OK, + [http2.constants.HTTP2_HEADER_CONTENT_TYPE]: 'application/grpc+proto', +}; +const defaultResponseOptions = { + waitForTrailers: true, +}; +class BaseServerInterceptingCall { + constructor(stream, headers, callEventTracker, handler, options) { + var _a; + this.stream = stream; + this.callEventTracker = callEventTracker; + this.handler = handler; + this.listener = null; + this.deadlineTimer = null; + this.deadline = Infinity; + this.maxSendMessageSize = constants_1.DEFAULT_MAX_SEND_MESSAGE_LENGTH; + this.maxReceiveMessageSize = constants_1.DEFAULT_MAX_RECEIVE_MESSAGE_LENGTH; + this.cancelled = false; + this.metadataSent = false; + this.wantTrailers = false; + this.cancelNotified = false; + this.incomingEncoding = 'identity'; + this.readQueue = []; + this.isReadPending = false; + this.receivedHalfClose = false; + this.streamEnded = false; + this.stream.once('error', (err) => { + /* We need an error handler to avoid uncaught error event exceptions, but + * there is nothing we can reasonably do here. Any error event should + * have a corresponding close event, which handles emitting the cancelled + * event. And the stream is now in a bad state, so we can't reasonably + * expect to be able to send an error over it. */ + }); + this.stream.once('close', () => { + var _a; + trace('Request to method ' + + ((_a = this.handler) === null || _a === void 0 ? void 0 : _a.path) + + ' stream closed with rstCode ' + + this.stream.rstCode); + if (this.callEventTracker && !this.streamEnded) { + this.streamEnded = true; + this.callEventTracker.onStreamEnd(false); + this.callEventTracker.onCallEnd({ + code: constants_1.Status.CANCELLED, + details: 'Stream closed before sending status', + metadata: null, + }); + } + this.notifyOnCancel(); + }); + this.stream.on('data', (data) => { + this.handleDataFrame(data); + }); + this.stream.pause(); + this.stream.on('end', () => { + this.handleEndEvent(); + }); + if ('grpc.max_send_message_length' in options) { + this.maxSendMessageSize = options['grpc.max_send_message_length']; + } + if ('grpc.max_receive_message_length' in options) { + this.maxReceiveMessageSize = options['grpc.max_receive_message_length']; + } + this.host = (_a = headers[':authority']) !== null && _a !== void 0 ? _a : headers.host; + this.decoder = new stream_decoder_1.StreamDecoder(this.maxReceiveMessageSize); + const metadata = metadata_1.Metadata.fromHttp2Headers(headers); + if (logging.isTracerEnabled(TRACER_NAME)) { + trace('Request to ' + + this.handler.path + + ' received headers ' + + JSON.stringify(metadata.toJSON())); + } + const timeoutHeader = metadata.get(GRPC_TIMEOUT_HEADER); + if (timeoutHeader.length > 0) { + this.handleTimeoutHeader(timeoutHeader[0]); + } + const encodingHeader = metadata.get(GRPC_ENCODING_HEADER); + if (encodingHeader.length > 0) { + this.incomingEncoding = encodingHeader[0]; + } + // Remove several headers that should not be propagated to the application + metadata.remove(GRPC_TIMEOUT_HEADER); + metadata.remove(GRPC_ENCODING_HEADER); + metadata.remove(GRPC_ACCEPT_ENCODING_HEADER); + metadata.remove(http2.constants.HTTP2_HEADER_ACCEPT_ENCODING); + metadata.remove(http2.constants.HTTP2_HEADER_TE); + metadata.remove(http2.constants.HTTP2_HEADER_CONTENT_TYPE); + this.metadata = metadata; + } + handleTimeoutHeader(timeoutHeader) { + const match = timeoutHeader.toString().match(DEADLINE_REGEX); + if (match === null) { + const status = { + code: constants_1.Status.INTERNAL, + details: `Invalid ${GRPC_TIMEOUT_HEADER} value "${timeoutHeader}"`, + metadata: null, + }; + // Wait for the constructor to complete before sending the error. + process.nextTick(() => { + this.sendStatus(status); + }); + return; + } + const timeout = (+match[1] * deadlineUnitsToMs[match[2]]) | 0; + const now = new Date(); + this.deadline = now.setMilliseconds(now.getMilliseconds() + timeout); + this.deadlineTimer = setTimeout(() => { + const status = { + code: constants_1.Status.DEADLINE_EXCEEDED, + details: 'Deadline exceeded', + metadata: null, + }; + this.sendStatus(status); + }, timeout); + } + checkCancelled() { + /* In some cases the stream can become destroyed before the close event + * fires. That creates a race condition that this check works around */ + if (!this.cancelled && (this.stream.destroyed || this.stream.closed)) { + this.notifyOnCancel(); + this.cancelled = true; + } + return this.cancelled; + } + notifyOnCancel() { + if (this.cancelNotified) { + return; + } + this.cancelNotified = true; + this.cancelled = true; + process.nextTick(() => { + var _a; + (_a = this.listener) === null || _a === void 0 ? void 0 : _a.onCancel(); + }); + if (this.deadlineTimer) { + clearTimeout(this.deadlineTimer); + } + // Flush incoming data frames + this.stream.resume(); + } + /** + * A server handler can start sending messages without explicitly sending + * metadata. In that case, we need to send headers before sending any + * messages. This function does that if necessary. + */ + maybeSendMetadata() { + if (!this.metadataSent) { + this.sendMetadata(new metadata_1.Metadata()); + } + } + /** + * Serialize a message to a length-delimited byte string. + * @param value + * @returns + */ + serializeMessage(value) { + const messageBuffer = this.handler.serialize(value); + const byteLength = messageBuffer.byteLength; + const output = Buffer.allocUnsafe(byteLength + 5); + /* Note: response compression is currently not supported, so this + * compressed bit is always 0. */ + output.writeUInt8(0, 0); + output.writeUInt32BE(byteLength, 1); + messageBuffer.copy(output, 5); + return output; + } + decompressMessage(message, encoding) { + const messageContents = message.subarray(5); + if (encoding === 'identity') { + return messageContents; + } + else if (encoding === 'deflate' || encoding === 'gzip') { + let decompresser; + if (encoding === 'deflate') { + decompresser = zlib.createInflate(); + } + else { + decompresser = zlib.createGunzip(); + } + return new Promise((resolve, reject) => { + let totalLength = 0; + const messageParts = []; + decompresser.on('data', (chunk) => { + messageParts.push(chunk); + totalLength += chunk.byteLength; + if (this.maxReceiveMessageSize !== -1 && totalLength > this.maxReceiveMessageSize) { + decompresser.destroy(); + reject({ + code: constants_1.Status.RESOURCE_EXHAUSTED, + details: `Received message that decompresses to a size larger than ${this.maxReceiveMessageSize}` + }); + } + }); + decompresser.on('end', () => { + resolve(Buffer.concat(messageParts)); + }); + decompresser.write(messageContents); + decompresser.end(); + }); + } + else { + return Promise.reject({ + code: constants_1.Status.UNIMPLEMENTED, + details: `Received message compressed with unsupported encoding "${encoding}"`, + }); + } + } + async decompressAndMaybePush(queueEntry) { + if (queueEntry.type !== 'COMPRESSED') { + throw new Error(`Invalid queue entry type: ${queueEntry.type}`); + } + const compressed = queueEntry.compressedMessage.readUInt8(0) === 1; + const compressedMessageEncoding = compressed + ? this.incomingEncoding + : 'identity'; + let decompressedMessage; + try { + decompressedMessage = await this.decompressMessage(queueEntry.compressedMessage, compressedMessageEncoding); + } + catch (err) { + this.sendStatus(err); + return; + } + try { + queueEntry.parsedMessage = this.handler.deserialize(decompressedMessage); + } + catch (err) { + this.sendStatus({ + code: constants_1.Status.INTERNAL, + details: `Error deserializing request: ${err.message}`, + }); + return; + } + queueEntry.type = 'READABLE'; + this.maybePushNextMessage(); + } + maybePushNextMessage() { + if (this.listener && + this.isReadPending && + this.readQueue.length > 0 && + this.readQueue[0].type !== 'COMPRESSED') { + this.isReadPending = false; + const nextQueueEntry = this.readQueue.shift(); + if (nextQueueEntry.type === 'READABLE') { + this.listener.onReceiveMessage(nextQueueEntry.parsedMessage); + } + else { + // nextQueueEntry.type === 'HALF_CLOSE' + this.listener.onReceiveHalfClose(); + } + } + } + handleDataFrame(data) { + var _a; + if (this.checkCancelled()) { + return; + } + trace('Request to ' + + this.handler.path + + ' received data frame of size ' + + data.length); + let rawMessages; + try { + rawMessages = this.decoder.write(data); + } + catch (e) { + this.sendStatus({ code: constants_1.Status.RESOURCE_EXHAUSTED, details: e.message }); + return; + } + for (const messageBytes of rawMessages) { + this.stream.pause(); + const queueEntry = { + type: 'COMPRESSED', + compressedMessage: messageBytes, + parsedMessage: null, + }; + this.readQueue.push(queueEntry); + this.decompressAndMaybePush(queueEntry); + (_a = this.callEventTracker) === null || _a === void 0 ? void 0 : _a.addMessageReceived(); + } + } + handleEndEvent() { + this.readQueue.push({ + type: 'HALF_CLOSE', + compressedMessage: null, + parsedMessage: null, + }); + this.receivedHalfClose = true; + this.maybePushNextMessage(); + } + start(listener) { + trace('Request to ' + this.handler.path + ' start called'); + if (this.checkCancelled()) { + return; + } + this.listener = listener; + listener.onReceiveMetadata(this.metadata); + } + sendMetadata(metadata) { + if (this.checkCancelled()) { + return; + } + if (this.metadataSent) { + return; + } + this.metadataSent = true; + const custom = metadata ? metadata.toHttp2Headers() : null; + const headers = Object.assign(Object.assign(Object.assign({}, defaultResponseHeaders), defaultCompressionHeaders), custom); + this.stream.respond(headers, defaultResponseOptions); + } + sendMessage(message, callback) { + if (this.checkCancelled()) { + return; + } + let response; + try { + response = this.serializeMessage(message); + } + catch (e) { + this.sendStatus({ + code: constants_1.Status.INTERNAL, + details: `Error serializing response: ${(0, error_1.getErrorMessage)(e)}`, + metadata: null, + }); + return; + } + if (this.maxSendMessageSize !== -1 && + response.length - 5 > this.maxSendMessageSize) { + this.sendStatus({ + code: constants_1.Status.RESOURCE_EXHAUSTED, + details: `Sent message larger than max (${response.length} vs. ${this.maxSendMessageSize})`, + metadata: null, + }); + return; + } + this.maybeSendMetadata(); + trace('Request to ' + + this.handler.path + + ' sent data frame of size ' + + response.length); + this.stream.write(response, error => { + var _a; + if (error) { + this.sendStatus({ + code: constants_1.Status.INTERNAL, + details: `Error writing message: ${(0, error_1.getErrorMessage)(error)}`, + metadata: null, + }); + return; + } + (_a = this.callEventTracker) === null || _a === void 0 ? void 0 : _a.addMessageSent(); + callback(); + }); + } + sendStatus(status) { + var _a, _b; + if (this.checkCancelled()) { + return; + } + trace('Request to method ' + + ((_a = this.handler) === null || _a === void 0 ? void 0 : _a.path) + + ' ended with status code: ' + + constants_1.Status[status.code] + + ' details: ' + + status.details); + if (this.metadataSent) { + if (!this.wantTrailers) { + this.wantTrailers = true; + this.stream.once('wantTrailers', () => { + var _a; + if (this.callEventTracker && !this.streamEnded) { + this.streamEnded = true; + this.callEventTracker.onStreamEnd(true); + this.callEventTracker.onCallEnd(status); + } + const trailersToSend = Object.assign({ [GRPC_STATUS_HEADER]: status.code, [GRPC_MESSAGE_HEADER]: encodeURI(status.details) }, (_a = status.metadata) === null || _a === void 0 ? void 0 : _a.toHttp2Headers()); + this.stream.sendTrailers(trailersToSend); + this.notifyOnCancel(); + }); + this.stream.end(); + } + else { + this.notifyOnCancel(); + } + } + else { + if (this.callEventTracker && !this.streamEnded) { + this.streamEnded = true; + this.callEventTracker.onStreamEnd(true); + this.callEventTracker.onCallEnd(status); + } + // Trailers-only response + const trailersToSend = Object.assign(Object.assign({ [GRPC_STATUS_HEADER]: status.code, [GRPC_MESSAGE_HEADER]: encodeURI(status.details) }, defaultResponseHeaders), (_b = status.metadata) === null || _b === void 0 ? void 0 : _b.toHttp2Headers()); + this.stream.respond(trailersToSend, { endStream: true }); + this.notifyOnCancel(); + } + } + startRead() { + trace('Request to ' + this.handler.path + ' startRead called'); + if (this.checkCancelled()) { + return; + } + this.isReadPending = true; + if (this.readQueue.length === 0) { + if (!this.receivedHalfClose) { + this.stream.resume(); + } + } + else { + this.maybePushNextMessage(); + } + } + getPeer() { + var _a; + const socket = (_a = this.stream.session) === null || _a === void 0 ? void 0 : _a.socket; + if (socket === null || socket === void 0 ? void 0 : socket.remoteAddress) { + if (socket.remotePort) { + return `${socket.remoteAddress}:${socket.remotePort}`; + } + else { + return socket.remoteAddress; + } + } + else { + return 'unknown'; + } + } + getDeadline() { + return this.deadline; + } + getHost() { + return this.host; + } +} +exports.BaseServerInterceptingCall = BaseServerInterceptingCall; +function getServerInterceptingCall(interceptors, stream, headers, callEventTracker, handler, options) { + const methodDefinition = { + path: handler.path, + requestStream: handler.type === 'clientStream' || handler.type === 'bidi', + responseStream: handler.type === 'serverStream' || handler.type === 'bidi', + requestDeserialize: handler.deserialize, + responseSerialize: handler.serialize, + }; + const baseCall = new BaseServerInterceptingCall(stream, headers, callEventTracker, handler, options); + return interceptors.reduce((call, interceptor) => { + return interceptor(methodDefinition, call); + }, baseCall); +} +exports.getServerInterceptingCall = getServerInterceptingCall; +//# sourceMappingURL=server-interceptors.js.map + +/***/ }), + +/***/ 33389: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +var __runInitializers = (this && this.__runInitializers) || function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +}; +var __esDecorate = (this && this.__esDecorate) || function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Server = void 0; +const http2 = __nccwpck_require__(85158); +const util = __nccwpck_require__(73837); +const constants_1 = __nccwpck_require__(90634); +const server_call_1 = __nccwpck_require__(62533); +const server_credentials_1 = __nccwpck_require__(63828); +const resolver_1 = __nccwpck_require__(31594); +const logging = __nccwpck_require__(35993); +const subchannel_address_1 = __nccwpck_require__(78021); +const uri_parser_1 = __nccwpck_require__(65974); +const channelz_1 = __nccwpck_require__(79975); +const server_interceptors_1 = __nccwpck_require__(20998); +const UNLIMITED_CONNECTION_AGE_MS = ~(1 << 31); +const KEEPALIVE_MAX_TIME_MS = ~(1 << 31); +const KEEPALIVE_TIMEOUT_MS = 20000; +const MAX_CONNECTION_IDLE_MS = ~(1 << 31); +const { HTTP2_HEADER_PATH } = http2.constants; +const TRACER_NAME = 'server'; +const kMaxAge = Buffer.from('max_age'); +function noop() { } +/** + * Decorator to wrap a class method with util.deprecate + * @param message The message to output if the deprecated method is called + * @returns + */ +function deprecate(message) { + return function (target, context) { + return util.deprecate(target, message); + }; +} +function getUnimplementedStatusResponse(methodName) { + return { + code: constants_1.Status.UNIMPLEMENTED, + details: `The server does not implement the method ${methodName}`, + }; +} +function getDefaultHandler(handlerType, methodName) { + const unimplementedStatusResponse = getUnimplementedStatusResponse(methodName); + switch (handlerType) { + case 'unary': + return (call, callback) => { + callback(unimplementedStatusResponse, null); + }; + case 'clientStream': + return (call, callback) => { + callback(unimplementedStatusResponse, null); + }; + case 'serverStream': + return (call) => { + call.emit('error', unimplementedStatusResponse); + }; + case 'bidi': + return (call) => { + call.emit('error', unimplementedStatusResponse); + }; + default: + throw new Error(`Invalid handlerType ${handlerType}`); + } +} +let Server = (() => { + var _a; + let _instanceExtraInitializers = []; + let _start_decorators; + return _a = class Server { + constructor(options) { + var _b, _c, _d, _e, _f, _g; + this.boundPorts = (__runInitializers(this, _instanceExtraInitializers), new Map()); + this.http2Servers = new Map(); + this.sessionIdleTimeouts = new Map(); + this.handlers = new Map(); + this.sessions = new Map(); + /** + * This field only exists to ensure that the start method throws an error if + * it is called twice, as it did previously. + */ + this.started = false; + this.shutdown = false; + this.serverAddressString = 'null'; + // Channelz Info + this.channelzEnabled = true; + this.options = options !== null && options !== void 0 ? options : {}; + if (this.options['grpc.enable_channelz'] === 0) { + this.channelzEnabled = false; + this.channelzTrace = new channelz_1.ChannelzTraceStub(); + this.callTracker = new channelz_1.ChannelzCallTrackerStub(); + this.listenerChildrenTracker = new channelz_1.ChannelzChildrenTrackerStub(); + this.sessionChildrenTracker = new channelz_1.ChannelzChildrenTrackerStub(); + } + else { + this.channelzTrace = new channelz_1.ChannelzTrace(); + this.callTracker = new channelz_1.ChannelzCallTracker(); + this.listenerChildrenTracker = new channelz_1.ChannelzChildrenTracker(); + this.sessionChildrenTracker = new channelz_1.ChannelzChildrenTracker(); + } + this.channelzRef = (0, channelz_1.registerChannelzServer)('server', () => this.getChannelzInfo(), this.channelzEnabled); + this.channelzTrace.addTrace('CT_INFO', 'Server created'); + this.maxConnectionAgeMs = + (_b = this.options['grpc.max_connection_age_ms']) !== null && _b !== void 0 ? _b : UNLIMITED_CONNECTION_AGE_MS; + this.maxConnectionAgeGraceMs = + (_c = this.options['grpc.max_connection_age_grace_ms']) !== null && _c !== void 0 ? _c : UNLIMITED_CONNECTION_AGE_MS; + this.keepaliveTimeMs = + (_d = this.options['grpc.keepalive_time_ms']) !== null && _d !== void 0 ? _d : KEEPALIVE_MAX_TIME_MS; + this.keepaliveTimeoutMs = + (_e = this.options['grpc.keepalive_timeout_ms']) !== null && _e !== void 0 ? _e : KEEPALIVE_TIMEOUT_MS; + this.sessionIdleTimeout = + (_f = this.options['grpc.max_connection_idle_ms']) !== null && _f !== void 0 ? _f : MAX_CONNECTION_IDLE_MS; + this.commonServerOptions = { + maxSendHeaderBlockLength: Number.MAX_SAFE_INTEGER, + }; + if ('grpc-node.max_session_memory' in this.options) { + this.commonServerOptions.maxSessionMemory = + this.options['grpc-node.max_session_memory']; + } + else { + /* By default, set a very large max session memory limit, to effectively + * disable enforcement of the limit. Some testing indicates that Node's + * behavior degrades badly when this limit is reached, so we solve that + * by disabling the check entirely. */ + this.commonServerOptions.maxSessionMemory = Number.MAX_SAFE_INTEGER; + } + if ('grpc.max_concurrent_streams' in this.options) { + this.commonServerOptions.settings = { + maxConcurrentStreams: this.options['grpc.max_concurrent_streams'], + }; + } + this.interceptors = (_g = this.options.interceptors) !== null && _g !== void 0 ? _g : []; + this.trace('Server constructed'); + } + getChannelzInfo() { + return { + trace: this.channelzTrace, + callTracker: this.callTracker, + listenerChildren: this.listenerChildrenTracker.getChildLists(), + sessionChildren: this.sessionChildrenTracker.getChildLists(), + }; + } + getChannelzSessionInfo(session) { + var _b, _c, _d; + const sessionInfo = this.sessions.get(session); + const sessionSocket = session.socket; + const remoteAddress = sessionSocket.remoteAddress + ? (0, subchannel_address_1.stringToSubchannelAddress)(sessionSocket.remoteAddress, sessionSocket.remotePort) + : null; + const localAddress = sessionSocket.localAddress + ? (0, subchannel_address_1.stringToSubchannelAddress)(sessionSocket.localAddress, sessionSocket.localPort) + : null; + let tlsInfo; + if (session.encrypted) { + const tlsSocket = sessionSocket; + const cipherInfo = tlsSocket.getCipher(); + const certificate = tlsSocket.getCertificate(); + const peerCertificate = tlsSocket.getPeerCertificate(); + tlsInfo = { + cipherSuiteStandardName: (_b = cipherInfo.standardName) !== null && _b !== void 0 ? _b : null, + cipherSuiteOtherName: cipherInfo.standardName ? null : cipherInfo.name, + localCertificate: certificate && 'raw' in certificate ? certificate.raw : null, + remoteCertificate: peerCertificate && 'raw' in peerCertificate + ? peerCertificate.raw + : null, + }; + } + else { + tlsInfo = null; + } + const socketInfo = { + remoteAddress: remoteAddress, + localAddress: localAddress, + security: tlsInfo, + remoteName: null, + streamsStarted: sessionInfo.streamTracker.callsStarted, + streamsSucceeded: sessionInfo.streamTracker.callsSucceeded, + streamsFailed: sessionInfo.streamTracker.callsFailed, + messagesSent: sessionInfo.messagesSent, + messagesReceived: sessionInfo.messagesReceived, + keepAlivesSent: sessionInfo.keepAlivesSent, + lastLocalStreamCreatedTimestamp: null, + lastRemoteStreamCreatedTimestamp: sessionInfo.streamTracker.lastCallStartedTimestamp, + lastMessageSentTimestamp: sessionInfo.lastMessageSentTimestamp, + lastMessageReceivedTimestamp: sessionInfo.lastMessageReceivedTimestamp, + localFlowControlWindow: (_c = session.state.localWindowSize) !== null && _c !== void 0 ? _c : null, + remoteFlowControlWindow: (_d = session.state.remoteWindowSize) !== null && _d !== void 0 ? _d : null, + }; + return socketInfo; + } + trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, '(' + this.channelzRef.id + ') ' + text); + } + keepaliveTrace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, 'keepalive', '(' + this.channelzRef.id + ') ' + text); + } + addProtoService() { + throw new Error('Not implemented. Use addService() instead'); + } + addService(service, implementation) { + if (service === null || + typeof service !== 'object' || + implementation === null || + typeof implementation !== 'object') { + throw new Error('addService() requires two objects as arguments'); + } + const serviceKeys = Object.keys(service); + if (serviceKeys.length === 0) { + throw new Error('Cannot add an empty service to a server'); + } + serviceKeys.forEach(name => { + const attrs = service[name]; + let methodType; + if (attrs.requestStream) { + if (attrs.responseStream) { + methodType = 'bidi'; + } + else { + methodType = 'clientStream'; + } + } + else { + if (attrs.responseStream) { + methodType = 'serverStream'; + } + else { + methodType = 'unary'; + } + } + let implFn = implementation[name]; + let impl; + if (implFn === undefined && typeof attrs.originalName === 'string') { + implFn = implementation[attrs.originalName]; + } + if (implFn !== undefined) { + impl = implFn.bind(implementation); + } + else { + impl = getDefaultHandler(methodType, name); + } + const success = this.register(attrs.path, impl, attrs.responseSerialize, attrs.requestDeserialize, methodType); + if (success === false) { + throw new Error(`Method handler for ${attrs.path} already provided.`); + } + }); + } + removeService(service) { + if (service === null || typeof service !== 'object') { + throw new Error('removeService() requires object as argument'); + } + const serviceKeys = Object.keys(service); + serviceKeys.forEach(name => { + const attrs = service[name]; + this.unregister(attrs.path); + }); + } + bind(port, creds) { + throw new Error('Not implemented. Use bindAsync() instead'); + } + registerListenerToChannelz(boundAddress) { + return (0, channelz_1.registerChannelzSocket)((0, subchannel_address_1.subchannelAddressToString)(boundAddress), () => { + return { + localAddress: boundAddress, + remoteAddress: null, + security: null, + remoteName: null, + streamsStarted: 0, + streamsSucceeded: 0, + streamsFailed: 0, + messagesSent: 0, + messagesReceived: 0, + keepAlivesSent: 0, + lastLocalStreamCreatedTimestamp: null, + lastRemoteStreamCreatedTimestamp: null, + lastMessageSentTimestamp: null, + lastMessageReceivedTimestamp: null, + localFlowControlWindow: null, + remoteFlowControlWindow: null, + }; + }, this.channelzEnabled); + } + createHttp2Server(credentials) { + let http2Server; + if (credentials._isSecure()) { + const credentialsSettings = credentials._getSettings(); + const secureServerOptions = Object.assign(Object.assign(Object.assign({}, this.commonServerOptions), credentialsSettings), { enableTrace: this.options['grpc-node.tls_enable_trace'] === 1 }); + let areCredentialsValid = credentialsSettings !== null; + http2Server = http2.createSecureServer(secureServerOptions); + http2Server.on('connection', (socket) => { + if (!areCredentialsValid) { + socket.destroy(); + } + }); + http2Server.on('secureConnection', (socket) => { + /* These errors need to be handled by the user of Http2SecureServer, + * according to https://github.com/nodejs/node/issues/35824 */ + socket.on('error', (e) => { + this.trace('An incoming TLS connection closed with error: ' + e.message); + }); + }); + const credsWatcher = options => { + if (options) { + http2Server.setSecureContext(options); + } + areCredentialsValid = options !== null; + }; + credentials._addWatcher(credsWatcher); + http2Server.on('close', () => { + credentials._removeWatcher(credsWatcher); + }); + } + else { + http2Server = http2.createServer(this.commonServerOptions); + } + http2Server.setTimeout(0, noop); + this._setupHandlers(http2Server, credentials._getInterceptors()); + return http2Server; + } + bindOneAddress(address, boundPortObject) { + this.trace('Attempting to bind ' + (0, subchannel_address_1.subchannelAddressToString)(address)); + const http2Server = this.createHttp2Server(boundPortObject.credentials); + return new Promise((resolve, reject) => { + const onError = (err) => { + this.trace('Failed to bind ' + + (0, subchannel_address_1.subchannelAddressToString)(address) + + ' with error ' + + err.message); + resolve({ + port: 'port' in address ? address.port : 1, + error: err.message, + }); + }; + http2Server.once('error', onError); + http2Server.listen(address, () => { + const boundAddress = http2Server.address(); + let boundSubchannelAddress; + if (typeof boundAddress === 'string') { + boundSubchannelAddress = { + path: boundAddress, + }; + } + else { + boundSubchannelAddress = { + host: boundAddress.address, + port: boundAddress.port, + }; + } + const channelzRef = this.registerListenerToChannelz(boundSubchannelAddress); + this.listenerChildrenTracker.refChild(channelzRef); + this.http2Servers.set(http2Server, { + channelzRef: channelzRef, + sessions: new Set(), + }); + boundPortObject.listeningServers.add(http2Server); + this.trace('Successfully bound ' + + (0, subchannel_address_1.subchannelAddressToString)(boundSubchannelAddress)); + resolve({ + port: 'port' in boundSubchannelAddress ? boundSubchannelAddress.port : 1, + }); + http2Server.removeListener('error', onError); + }); + }); + } + async bindManyPorts(addressList, boundPortObject) { + if (addressList.length === 0) { + return { + count: 0, + port: 0, + errors: [], + }; + } + if ((0, subchannel_address_1.isTcpSubchannelAddress)(addressList[0]) && addressList[0].port === 0) { + /* If binding to port 0, first try to bind the first address, then bind + * the rest of the address list to the specific port that it binds. */ + const firstAddressResult = await this.bindOneAddress(addressList[0], boundPortObject); + if (firstAddressResult.error) { + /* If the first address fails to bind, try the same operation starting + * from the second item in the list. */ + const restAddressResult = await this.bindManyPorts(addressList.slice(1), boundPortObject); + return Object.assign(Object.assign({}, restAddressResult), { errors: [firstAddressResult.error, ...restAddressResult.errors] }); + } + else { + const restAddresses = addressList + .slice(1) + .map(address => (0, subchannel_address_1.isTcpSubchannelAddress)(address) + ? { host: address.host, port: firstAddressResult.port } + : address); + const restAddressResult = await Promise.all(restAddresses.map(address => this.bindOneAddress(address, boundPortObject))); + const allResults = [firstAddressResult, ...restAddressResult]; + return { + count: allResults.filter(result => result.error === undefined).length, + port: firstAddressResult.port, + errors: allResults + .filter(result => result.error) + .map(result => result.error), + }; + } + } + else { + const allResults = await Promise.all(addressList.map(address => this.bindOneAddress(address, boundPortObject))); + return { + count: allResults.filter(result => result.error === undefined).length, + port: allResults[0].port, + errors: allResults + .filter(result => result.error) + .map(result => result.error), + }; + } + } + async bindAddressList(addressList, boundPortObject) { + const bindResult = await this.bindManyPorts(addressList, boundPortObject); + if (bindResult.count > 0) { + if (bindResult.count < addressList.length) { + logging.log(constants_1.LogVerbosity.INFO, `WARNING Only ${bindResult.count} addresses added out of total ${addressList.length} resolved`); + } + return bindResult.port; + } + else { + const errorString = `No address added out of total ${addressList.length} resolved`; + logging.log(constants_1.LogVerbosity.ERROR, errorString); + throw new Error(`${errorString} errors: [${bindResult.errors.join(',')}]`); + } + } + resolvePort(port) { + return new Promise((resolve, reject) => { + const resolverListener = { + onSuccessfulResolution: (endpointList, serviceConfig, serviceConfigError) => { + // We only want one resolution result. Discard all future results + resolverListener.onSuccessfulResolution = () => { }; + const addressList = [].concat(...endpointList.map(endpoint => endpoint.addresses)); + if (addressList.length === 0) { + reject(new Error(`No addresses resolved for port ${port}`)); + return; + } + resolve(addressList); + }, + onError: error => { + reject(new Error(error.details)); + }, + }; + const resolver = (0, resolver_1.createResolver)(port, resolverListener, this.options); + resolver.updateResolution(); + }); + } + async bindPort(port, boundPortObject) { + const addressList = await this.resolvePort(port); + if (boundPortObject.cancelled) { + this.completeUnbind(boundPortObject); + throw new Error('bindAsync operation cancelled by unbind call'); + } + const portNumber = await this.bindAddressList(addressList, boundPortObject); + if (boundPortObject.cancelled) { + this.completeUnbind(boundPortObject); + throw new Error('bindAsync operation cancelled by unbind call'); + } + return portNumber; + } + normalizePort(port) { + const initialPortUri = (0, uri_parser_1.parseUri)(port); + if (initialPortUri === null) { + throw new Error(`Could not parse port "${port}"`); + } + const portUri = (0, resolver_1.mapUriDefaultScheme)(initialPortUri); + if (portUri === null) { + throw new Error(`Could not get a default scheme for port "${port}"`); + } + return portUri; + } + bindAsync(port, creds, callback) { + if (this.shutdown) { + throw new Error('bindAsync called after shutdown'); + } + if (typeof port !== 'string') { + throw new TypeError('port must be a string'); + } + if (creds === null || !(creds instanceof server_credentials_1.ServerCredentials)) { + throw new TypeError('creds must be a ServerCredentials object'); + } + if (typeof callback !== 'function') { + throw new TypeError('callback must be a function'); + } + this.trace('bindAsync port=' + port); + const portUri = this.normalizePort(port); + const deferredCallback = (error, port) => { + process.nextTick(() => callback(error, port)); + }; + /* First, if this port is already bound or that bind operation is in + * progress, use that result. */ + let boundPortObject = this.boundPorts.get((0, uri_parser_1.uriToString)(portUri)); + if (boundPortObject) { + if (!creds._equals(boundPortObject.credentials)) { + deferredCallback(new Error(`${port} already bound with incompatible credentials`), 0); + return; + } + /* If that operation has previously been cancelled by an unbind call, + * uncancel it. */ + boundPortObject.cancelled = false; + if (boundPortObject.completionPromise) { + boundPortObject.completionPromise.then(portNum => callback(null, portNum), error => callback(error, 0)); + } + else { + deferredCallback(null, boundPortObject.portNumber); + } + return; + } + boundPortObject = { + mapKey: (0, uri_parser_1.uriToString)(portUri), + originalUri: portUri, + completionPromise: null, + cancelled: false, + portNumber: 0, + credentials: creds, + listeningServers: new Set(), + }; + const splitPort = (0, uri_parser_1.splitHostPort)(portUri.path); + const completionPromise = this.bindPort(portUri, boundPortObject); + boundPortObject.completionPromise = completionPromise; + /* If the port number is 0, defer populating the map entry until after the + * bind operation completes and we have a specific port number. Otherwise, + * populate it immediately. */ + if ((splitPort === null || splitPort === void 0 ? void 0 : splitPort.port) === 0) { + completionPromise.then(portNum => { + const finalUri = { + scheme: portUri.scheme, + authority: portUri.authority, + path: (0, uri_parser_1.combineHostPort)({ host: splitPort.host, port: portNum }), + }; + boundPortObject.mapKey = (0, uri_parser_1.uriToString)(finalUri); + boundPortObject.completionPromise = null; + boundPortObject.portNumber = portNum; + this.boundPorts.set(boundPortObject.mapKey, boundPortObject); + callback(null, portNum); + }, error => { + callback(error, 0); + }); + } + else { + this.boundPorts.set(boundPortObject.mapKey, boundPortObject); + completionPromise.then(portNum => { + boundPortObject.completionPromise = null; + boundPortObject.portNumber = portNum; + callback(null, portNum); + }, error => { + callback(error, 0); + }); + } + } + registerInjectorToChannelz() { + return (0, channelz_1.registerChannelzSocket)('injector', () => { + return { + localAddress: null, + remoteAddress: null, + security: null, + remoteName: null, + streamsStarted: 0, + streamsSucceeded: 0, + streamsFailed: 0, + messagesSent: 0, + messagesReceived: 0, + keepAlivesSent: 0, + lastLocalStreamCreatedTimestamp: null, + lastRemoteStreamCreatedTimestamp: null, + lastMessageSentTimestamp: null, + lastMessageReceivedTimestamp: null, + localFlowControlWindow: null, + remoteFlowControlWindow: null, + }; + }, this.channelzEnabled); + } + createConnectionInjector(credentials) { + if (credentials === null || !(credentials instanceof server_credentials_1.ServerCredentials)) { + throw new TypeError('creds must be a ServerCredentials object'); + } + const server = this.createHttp2Server(credentials); + const channelzRef = this.registerInjectorToChannelz(); + if (this.channelzEnabled) { + this.listenerChildrenTracker.refChild(channelzRef); + } + const sessionsSet = new Set(); + this.http2Servers.set(server, { + channelzRef: channelzRef, + sessions: sessionsSet + }); + return { + injectConnection: (connection) => { + server.emit('connection', connection); + }, + drain: (graceTimeMs) => { + var _b, _c; + for (const session of sessionsSet) { + this.closeSession(session); + } + (_c = (_b = setTimeout(() => { + for (const session of sessionsSet) { + session.destroy(http2.constants.NGHTTP2_CANCEL); + } + }, graceTimeMs)).unref) === null || _c === void 0 ? void 0 : _c.call(_b); + }, + destroy: () => { + this.closeServer(server); + for (const session of sessionsSet) { + this.closeSession(session); + } + } + }; + } + closeServer(server, callback) { + this.trace('Closing server with address ' + JSON.stringify(server.address())); + const serverInfo = this.http2Servers.get(server); + server.close(() => { + if (serverInfo) { + this.listenerChildrenTracker.unrefChild(serverInfo.channelzRef); + (0, channelz_1.unregisterChannelzRef)(serverInfo.channelzRef); + } + this.http2Servers.delete(server); + callback === null || callback === void 0 ? void 0 : callback(); + }); + } + closeSession(session, callback) { + var _b; + this.trace('Closing session initiated by ' + ((_b = session.socket) === null || _b === void 0 ? void 0 : _b.remoteAddress)); + const sessionInfo = this.sessions.get(session); + const closeCallback = () => { + if (sessionInfo) { + this.sessionChildrenTracker.unrefChild(sessionInfo.ref); + (0, channelz_1.unregisterChannelzRef)(sessionInfo.ref); + } + callback === null || callback === void 0 ? void 0 : callback(); + }; + if (session.closed) { + queueMicrotask(closeCallback); + } + else { + session.close(closeCallback); + } + } + completeUnbind(boundPortObject) { + for (const server of boundPortObject.listeningServers) { + const serverInfo = this.http2Servers.get(server); + this.closeServer(server, () => { + boundPortObject.listeningServers.delete(server); + }); + if (serverInfo) { + for (const session of serverInfo.sessions) { + this.closeSession(session); + } + } + } + this.boundPorts.delete(boundPortObject.mapKey); + } + /** + * Unbind a previously bound port, or cancel an in-progress bindAsync + * operation. If port 0 was bound, only the actual bound port can be + * unbound. For example, if bindAsync was called with "localhost:0" and the + * bound port result was 54321, it can be unbound as "localhost:54321". + * @param port + */ + unbind(port) { + this.trace('unbind port=' + port); + const portUri = this.normalizePort(port); + const splitPort = (0, uri_parser_1.splitHostPort)(portUri.path); + if ((splitPort === null || splitPort === void 0 ? void 0 : splitPort.port) === 0) { + throw new Error('Cannot unbind port 0'); + } + const boundPortObject = this.boundPorts.get((0, uri_parser_1.uriToString)(portUri)); + if (boundPortObject) { + this.trace('unbinding ' + + boundPortObject.mapKey + + ' originally bound as ' + + (0, uri_parser_1.uriToString)(boundPortObject.originalUri)); + /* If the bind operation is pending, the cancelled flag will trigger + * the unbind operation later. */ + if (boundPortObject.completionPromise) { + boundPortObject.cancelled = true; + } + else { + this.completeUnbind(boundPortObject); + } + } + } + /** + * Gracefully close all connections associated with a previously bound port. + * After the grace time, forcefully close all remaining open connections. + * + * If port 0 was bound, only the actual bound port can be + * drained. For example, if bindAsync was called with "localhost:0" and the + * bound port result was 54321, it can be drained as "localhost:54321". + * @param port + * @param graceTimeMs + * @returns + */ + drain(port, graceTimeMs) { + var _b, _c; + this.trace('drain port=' + port + ' graceTimeMs=' + graceTimeMs); + const portUri = this.normalizePort(port); + const splitPort = (0, uri_parser_1.splitHostPort)(portUri.path); + if ((splitPort === null || splitPort === void 0 ? void 0 : splitPort.port) === 0) { + throw new Error('Cannot drain port 0'); + } + const boundPortObject = this.boundPorts.get((0, uri_parser_1.uriToString)(portUri)); + if (!boundPortObject) { + return; + } + const allSessions = new Set(); + for (const http2Server of boundPortObject.listeningServers) { + const serverEntry = this.http2Servers.get(http2Server); + if (serverEntry) { + for (const session of serverEntry.sessions) { + allSessions.add(session); + this.closeSession(session, () => { + allSessions.delete(session); + }); + } + } + } + /* After the grace time ends, send another goaway to all remaining sessions + * with the CANCEL code. */ + (_c = (_b = setTimeout(() => { + for (const session of allSessions) { + session.destroy(http2.constants.NGHTTP2_CANCEL); + } + }, graceTimeMs)).unref) === null || _c === void 0 ? void 0 : _c.call(_b); + } + forceShutdown() { + for (const boundPortObject of this.boundPorts.values()) { + boundPortObject.cancelled = true; + } + this.boundPorts.clear(); + // Close the server if it is still running. + for (const server of this.http2Servers.keys()) { + this.closeServer(server); + } + // Always destroy any available sessions. It's possible that one or more + // tryShutdown() calls are in progress. Don't wait on them to finish. + this.sessions.forEach((channelzInfo, session) => { + this.closeSession(session); + // Cast NGHTTP2_CANCEL to any because TypeScript doesn't seem to + // recognize destroy(code) as a valid signature. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + session.destroy(http2.constants.NGHTTP2_CANCEL); + }); + this.sessions.clear(); + (0, channelz_1.unregisterChannelzRef)(this.channelzRef); + this.shutdown = true; + } + register(name, handler, serialize, deserialize, type) { + if (this.handlers.has(name)) { + return false; + } + this.handlers.set(name, { + func: handler, + serialize, + deserialize, + type, + path: name, + }); + return true; + } + unregister(name) { + return this.handlers.delete(name); + } + /** + * @deprecated No longer needed as of version 1.10.x + */ + start() { + if (this.http2Servers.size === 0 || + [...this.http2Servers.keys()].every(server => !server.listening)) { + throw new Error('server must be bound in order to start'); + } + if (this.started === true) { + throw new Error('server is already started'); + } + this.started = true; + } + tryShutdown(callback) { + var _b; + const wrappedCallback = (error) => { + (0, channelz_1.unregisterChannelzRef)(this.channelzRef); + callback(error); + }; + let pendingChecks = 0; + function maybeCallback() { + pendingChecks--; + if (pendingChecks === 0) { + wrappedCallback(); + } + } + this.shutdown = true; + for (const [serverKey, server] of this.http2Servers.entries()) { + pendingChecks++; + const serverString = server.channelzRef.name; + this.trace('Waiting for server ' + serverString + ' to close'); + this.closeServer(serverKey, () => { + this.trace('Server ' + serverString + ' finished closing'); + maybeCallback(); + }); + for (const session of server.sessions.keys()) { + pendingChecks++; + const sessionString = (_b = session.socket) === null || _b === void 0 ? void 0 : _b.remoteAddress; + this.trace('Waiting for session ' + sessionString + ' to close'); + this.closeSession(session, () => { + this.trace('Session ' + sessionString + ' finished closing'); + maybeCallback(); + }); + } + } + if (pendingChecks === 0) { + wrappedCallback(); + } + } + addHttp2Port() { + throw new Error('Not yet implemented'); + } + /** + * Get the channelz reference object for this server. The returned value is + * garbage if channelz is disabled for this server. + * @returns + */ + getChannelzRef() { + return this.channelzRef; + } + _verifyContentType(stream, headers) { + const contentType = headers[http2.constants.HTTP2_HEADER_CONTENT_TYPE]; + if (typeof contentType !== 'string' || + !contentType.startsWith('application/grpc')) { + stream.respond({ + [http2.constants.HTTP2_HEADER_STATUS]: http2.constants.HTTP_STATUS_UNSUPPORTED_MEDIA_TYPE, + }, { endStream: true }); + return false; + } + return true; + } + _retrieveHandler(path) { + this.trace('Received call to method ' + + path + + ' at address ' + + this.serverAddressString); + const handler = this.handlers.get(path); + if (handler === undefined) { + this.trace('No handler registered for method ' + + path + + '. Sending UNIMPLEMENTED status.'); + return null; + } + return handler; + } + _respondWithError(err, stream, channelzSessionInfo = null) { + var _b, _c; + const trailersToSend = Object.assign({ 'grpc-status': (_b = err.code) !== null && _b !== void 0 ? _b : constants_1.Status.INTERNAL, 'grpc-message': err.details, [http2.constants.HTTP2_HEADER_STATUS]: http2.constants.HTTP_STATUS_OK, [http2.constants.HTTP2_HEADER_CONTENT_TYPE]: 'application/grpc+proto' }, (_c = err.metadata) === null || _c === void 0 ? void 0 : _c.toHttp2Headers()); + stream.respond(trailersToSend, { endStream: true }); + this.callTracker.addCallFailed(); + channelzSessionInfo === null || channelzSessionInfo === void 0 ? void 0 : channelzSessionInfo.streamTracker.addCallFailed(); + } + _channelzHandler(extraInterceptors, stream, headers) { + // for handling idle timeout + this.onStreamOpened(stream); + const channelzSessionInfo = this.sessions.get(stream.session); + this.callTracker.addCallStarted(); + channelzSessionInfo === null || channelzSessionInfo === void 0 ? void 0 : channelzSessionInfo.streamTracker.addCallStarted(); + if (!this._verifyContentType(stream, headers)) { + this.callTracker.addCallFailed(); + channelzSessionInfo === null || channelzSessionInfo === void 0 ? void 0 : channelzSessionInfo.streamTracker.addCallFailed(); + return; + } + const path = headers[HTTP2_HEADER_PATH]; + const handler = this._retrieveHandler(path); + if (!handler) { + this._respondWithError(getUnimplementedStatusResponse(path), stream, channelzSessionInfo); + return; + } + const callEventTracker = { + addMessageSent: () => { + if (channelzSessionInfo) { + channelzSessionInfo.messagesSent += 1; + channelzSessionInfo.lastMessageSentTimestamp = new Date(); + } + }, + addMessageReceived: () => { + if (channelzSessionInfo) { + channelzSessionInfo.messagesReceived += 1; + channelzSessionInfo.lastMessageReceivedTimestamp = new Date(); + } + }, + onCallEnd: status => { + if (status.code === constants_1.Status.OK) { + this.callTracker.addCallSucceeded(); + } + else { + this.callTracker.addCallFailed(); + } + }, + onStreamEnd: success => { + if (channelzSessionInfo) { + if (success) { + channelzSessionInfo.streamTracker.addCallSucceeded(); + } + else { + channelzSessionInfo.streamTracker.addCallFailed(); + } + } + }, + }; + const call = (0, server_interceptors_1.getServerInterceptingCall)([...extraInterceptors, ...this.interceptors], stream, headers, callEventTracker, handler, this.options); + if (!this._runHandlerForCall(call, handler)) { + this.callTracker.addCallFailed(); + channelzSessionInfo === null || channelzSessionInfo === void 0 ? void 0 : channelzSessionInfo.streamTracker.addCallFailed(); + call.sendStatus({ + code: constants_1.Status.INTERNAL, + details: `Unknown handler type: ${handler.type}`, + }); + } + } + _streamHandler(extraInterceptors, stream, headers) { + // for handling idle timeout + this.onStreamOpened(stream); + if (this._verifyContentType(stream, headers) !== true) { + return; + } + const path = headers[HTTP2_HEADER_PATH]; + const handler = this._retrieveHandler(path); + if (!handler) { + this._respondWithError(getUnimplementedStatusResponse(path), stream, null); + return; + } + const call = (0, server_interceptors_1.getServerInterceptingCall)([...extraInterceptors, ...this.interceptors], stream, headers, null, handler, this.options); + if (!this._runHandlerForCall(call, handler)) { + call.sendStatus({ + code: constants_1.Status.INTERNAL, + details: `Unknown handler type: ${handler.type}`, + }); + } + } + _runHandlerForCall(call, handler) { + const { type } = handler; + if (type === 'unary') { + handleUnary(call, handler); + } + else if (type === 'clientStream') { + handleClientStreaming(call, handler); + } + else if (type === 'serverStream') { + handleServerStreaming(call, handler); + } + else if (type === 'bidi') { + handleBidiStreaming(call, handler); + } + else { + return false; + } + return true; + } + _setupHandlers(http2Server, extraInterceptors) { + if (http2Server === null) { + return; + } + const serverAddress = http2Server.address(); + let serverAddressString = 'null'; + if (serverAddress) { + if (typeof serverAddress === 'string') { + serverAddressString = serverAddress; + } + else { + serverAddressString = serverAddress.address + ':' + serverAddress.port; + } + } + this.serverAddressString = serverAddressString; + const handler = this.channelzEnabled + ? this._channelzHandler + : this._streamHandler; + const sessionHandler = this.channelzEnabled + ? this._channelzSessionHandler(http2Server) + : this._sessionHandler(http2Server); + http2Server.on('stream', handler.bind(this, extraInterceptors)); + http2Server.on('session', sessionHandler); + } + _sessionHandler(http2Server) { + return (session) => { + var _b, _c; + (_b = this.http2Servers.get(http2Server)) === null || _b === void 0 ? void 0 : _b.sessions.add(session); + let connectionAgeTimer = null; + let connectionAgeGraceTimer = null; + let keepaliveTimer = null; + let sessionClosedByServer = false; + const idleTimeoutObj = this.enableIdleTimeout(session); + if (this.maxConnectionAgeMs !== UNLIMITED_CONNECTION_AGE_MS) { + // Apply a random jitter within a +/-10% range + const jitterMagnitude = this.maxConnectionAgeMs / 10; + const jitter = Math.random() * jitterMagnitude * 2 - jitterMagnitude; + connectionAgeTimer = setTimeout(() => { + var _b, _c; + sessionClosedByServer = true; + this.trace('Connection dropped by max connection age: ' + + ((_b = session.socket) === null || _b === void 0 ? void 0 : _b.remoteAddress)); + try { + session.goaway(http2.constants.NGHTTP2_NO_ERROR, ~(1 << 31), kMaxAge); + } + catch (e) { + // The goaway can't be sent because the session is already closed + session.destroy(); + return; + } + session.close(); + /* Allow a grace period after sending the GOAWAY before forcibly + * closing the connection. */ + if (this.maxConnectionAgeGraceMs !== UNLIMITED_CONNECTION_AGE_MS) { + connectionAgeGraceTimer = setTimeout(() => { + session.destroy(); + }, this.maxConnectionAgeGraceMs); + (_c = connectionAgeGraceTimer.unref) === null || _c === void 0 ? void 0 : _c.call(connectionAgeGraceTimer); + } + }, this.maxConnectionAgeMs + jitter); + (_c = connectionAgeTimer.unref) === null || _c === void 0 ? void 0 : _c.call(connectionAgeTimer); + } + const clearKeepaliveTimeout = () => { + if (keepaliveTimer) { + clearTimeout(keepaliveTimer); + keepaliveTimer = null; + } + }; + const canSendPing = () => { + return (!session.destroyed && + this.keepaliveTimeMs < KEEPALIVE_MAX_TIME_MS && + this.keepaliveTimeMs > 0); + }; + /* eslint-disable-next-line prefer-const */ + let sendPing; // hoisted for use in maybeStartKeepalivePingTimer + const maybeStartKeepalivePingTimer = () => { + var _b; + if (!canSendPing()) { + return; + } + this.keepaliveTrace('Starting keepalive timer for ' + this.keepaliveTimeMs + 'ms'); + keepaliveTimer = setTimeout(() => { + clearKeepaliveTimeout(); + sendPing(); + }, this.keepaliveTimeMs); + (_b = keepaliveTimer.unref) === null || _b === void 0 ? void 0 : _b.call(keepaliveTimer); + }; + sendPing = () => { + var _b; + if (!canSendPing()) { + return; + } + this.keepaliveTrace('Sending ping with timeout ' + this.keepaliveTimeoutMs + 'ms'); + let pingSendError = ''; + try { + const pingSentSuccessfully = session.ping((err, duration, payload) => { + clearKeepaliveTimeout(); + if (err) { + this.keepaliveTrace('Ping failed with error: ' + err.message); + sessionClosedByServer = true; + session.close(); + } + else { + this.keepaliveTrace('Received ping response'); + maybeStartKeepalivePingTimer(); + } + }); + if (!pingSentSuccessfully) { + pingSendError = 'Ping returned false'; + } + } + catch (e) { + // grpc/grpc-node#2139 + pingSendError = + (e instanceof Error ? e.message : '') || 'Unknown error'; + } + if (pingSendError) { + this.keepaliveTrace('Ping send failed: ' + pingSendError); + this.trace('Connection dropped due to ping send error: ' + pingSendError); + sessionClosedByServer = true; + session.close(); + return; + } + keepaliveTimer = setTimeout(() => { + clearKeepaliveTimeout(); + this.keepaliveTrace('Ping timeout passed without response'); + this.trace('Connection dropped by keepalive timeout'); + sessionClosedByServer = true; + session.close(); + }, this.keepaliveTimeoutMs); + (_b = keepaliveTimer.unref) === null || _b === void 0 ? void 0 : _b.call(keepaliveTimer); + }; + maybeStartKeepalivePingTimer(); + session.on('close', () => { + var _b, _c; + if (!sessionClosedByServer) { + this.trace(`Connection dropped by client ${(_b = session.socket) === null || _b === void 0 ? void 0 : _b.remoteAddress}`); + } + if (connectionAgeTimer) { + clearTimeout(connectionAgeTimer); + } + if (connectionAgeGraceTimer) { + clearTimeout(connectionAgeGraceTimer); + } + clearKeepaliveTimeout(); + if (idleTimeoutObj !== null) { + clearTimeout(idleTimeoutObj.timeout); + this.sessionIdleTimeouts.delete(session); + } + (_c = this.http2Servers.get(http2Server)) === null || _c === void 0 ? void 0 : _c.sessions.delete(session); + }); + }; + } + _channelzSessionHandler(http2Server) { + return (session) => { + var _b, _c, _d, _e; + const channelzRef = (0, channelz_1.registerChannelzSocket)((_c = (_b = session.socket) === null || _b === void 0 ? void 0 : _b.remoteAddress) !== null && _c !== void 0 ? _c : 'unknown', this.getChannelzSessionInfo.bind(this, session), this.channelzEnabled); + const channelzSessionInfo = { + ref: channelzRef, + streamTracker: new channelz_1.ChannelzCallTracker(), + messagesSent: 0, + messagesReceived: 0, + keepAlivesSent: 0, + lastMessageSentTimestamp: null, + lastMessageReceivedTimestamp: null, + }; + (_d = this.http2Servers.get(http2Server)) === null || _d === void 0 ? void 0 : _d.sessions.add(session); + this.sessions.set(session, channelzSessionInfo); + const clientAddress = `${session.socket.remoteAddress}:${session.socket.remotePort}`; + this.channelzTrace.addTrace('CT_INFO', 'Connection established by client ' + clientAddress); + this.trace('Connection established by client ' + clientAddress); + this.sessionChildrenTracker.refChild(channelzRef); + let connectionAgeTimer = null; + let connectionAgeGraceTimer = null; + let keepaliveTimeout = null; + let sessionClosedByServer = false; + const idleTimeoutObj = this.enableIdleTimeout(session); + if (this.maxConnectionAgeMs !== UNLIMITED_CONNECTION_AGE_MS) { + // Apply a random jitter within a +/-10% range + const jitterMagnitude = this.maxConnectionAgeMs / 10; + const jitter = Math.random() * jitterMagnitude * 2 - jitterMagnitude; + connectionAgeTimer = setTimeout(() => { + var _b; + sessionClosedByServer = true; + this.channelzTrace.addTrace('CT_INFO', 'Connection dropped by max connection age from ' + clientAddress); + try { + session.goaway(http2.constants.NGHTTP2_NO_ERROR, ~(1 << 31), kMaxAge); + } + catch (e) { + // The goaway can't be sent because the session is already closed + session.destroy(); + return; + } + session.close(); + /* Allow a grace period after sending the GOAWAY before forcibly + * closing the connection. */ + if (this.maxConnectionAgeGraceMs !== UNLIMITED_CONNECTION_AGE_MS) { + connectionAgeGraceTimer = setTimeout(() => { + session.destroy(); + }, this.maxConnectionAgeGraceMs); + (_b = connectionAgeGraceTimer.unref) === null || _b === void 0 ? void 0 : _b.call(connectionAgeGraceTimer); + } + }, this.maxConnectionAgeMs + jitter); + (_e = connectionAgeTimer.unref) === null || _e === void 0 ? void 0 : _e.call(connectionAgeTimer); + } + const clearKeepaliveTimeout = () => { + if (keepaliveTimeout) { + clearTimeout(keepaliveTimeout); + keepaliveTimeout = null; + } + }; + const canSendPing = () => { + return (!session.destroyed && + this.keepaliveTimeMs < KEEPALIVE_MAX_TIME_MS && + this.keepaliveTimeMs > 0); + }; + /* eslint-disable-next-line prefer-const */ + let sendPing; // hoisted for use in maybeStartKeepalivePingTimer + const maybeStartKeepalivePingTimer = () => { + var _b; + if (!canSendPing()) { + return; + } + this.keepaliveTrace('Starting keepalive timer for ' + this.keepaliveTimeMs + 'ms'); + keepaliveTimeout = setTimeout(() => { + clearKeepaliveTimeout(); + sendPing(); + }, this.keepaliveTimeMs); + (_b = keepaliveTimeout.unref) === null || _b === void 0 ? void 0 : _b.call(keepaliveTimeout); + }; + sendPing = () => { + var _b; + if (!canSendPing()) { + return; + } + this.keepaliveTrace('Sending ping with timeout ' + this.keepaliveTimeoutMs + 'ms'); + let pingSendError = ''; + try { + const pingSentSuccessfully = session.ping((err, duration, payload) => { + clearKeepaliveTimeout(); + if (err) { + this.keepaliveTrace('Ping failed with error: ' + err.message); + this.channelzTrace.addTrace('CT_INFO', 'Connection dropped due to error of a ping frame ' + + err.message + + ' return in ' + + duration); + sessionClosedByServer = true; + session.close(); + } + else { + this.keepaliveTrace('Received ping response'); + maybeStartKeepalivePingTimer(); + } + }); + if (!pingSentSuccessfully) { + pingSendError = 'Ping returned false'; + } + } + catch (e) { + // grpc/grpc-node#2139 + pingSendError = + (e instanceof Error ? e.message : '') || 'Unknown error'; + } + if (pingSendError) { + this.keepaliveTrace('Ping send failed: ' + pingSendError); + this.channelzTrace.addTrace('CT_INFO', 'Connection dropped due to ping send error: ' + pingSendError); + sessionClosedByServer = true; + session.close(); + return; + } + channelzSessionInfo.keepAlivesSent += 1; + keepaliveTimeout = setTimeout(() => { + clearKeepaliveTimeout(); + this.keepaliveTrace('Ping timeout passed without response'); + this.channelzTrace.addTrace('CT_INFO', 'Connection dropped by keepalive timeout from ' + clientAddress); + sessionClosedByServer = true; + session.close(); + }, this.keepaliveTimeoutMs); + (_b = keepaliveTimeout.unref) === null || _b === void 0 ? void 0 : _b.call(keepaliveTimeout); + }; + maybeStartKeepalivePingTimer(); + session.on('close', () => { + var _b; + if (!sessionClosedByServer) { + this.channelzTrace.addTrace('CT_INFO', 'Connection dropped by client ' + clientAddress); + } + this.sessionChildrenTracker.unrefChild(channelzRef); + (0, channelz_1.unregisterChannelzRef)(channelzRef); + if (connectionAgeTimer) { + clearTimeout(connectionAgeTimer); + } + if (connectionAgeGraceTimer) { + clearTimeout(connectionAgeGraceTimer); + } + clearKeepaliveTimeout(); + if (idleTimeoutObj !== null) { + clearTimeout(idleTimeoutObj.timeout); + this.sessionIdleTimeouts.delete(session); + } + (_b = this.http2Servers.get(http2Server)) === null || _b === void 0 ? void 0 : _b.sessions.delete(session); + this.sessions.delete(session); + }); + }; + } + enableIdleTimeout(session) { + var _b, _c; + if (this.sessionIdleTimeout >= MAX_CONNECTION_IDLE_MS) { + return null; + } + const idleTimeoutObj = { + activeStreams: 0, + lastIdle: Date.now(), + onClose: this.onStreamClose.bind(this, session), + timeout: setTimeout(this.onIdleTimeout, this.sessionIdleTimeout, this, session), + }; + (_c = (_b = idleTimeoutObj.timeout).unref) === null || _c === void 0 ? void 0 : _c.call(_b); + this.sessionIdleTimeouts.set(session, idleTimeoutObj); + const { socket } = session; + this.trace('Enable idle timeout for ' + + socket.remoteAddress + + ':' + + socket.remotePort); + return idleTimeoutObj; + } + onIdleTimeout(ctx, session) { + const { socket } = session; + const sessionInfo = ctx.sessionIdleTimeouts.get(session); + // if it is called while we have activeStreams - timer will not be rescheduled + // until last active stream is closed, then it will call .refresh() on the timer + // important part is to not clearTimeout(timer) or it becomes unusable + // for future refreshes + if (sessionInfo !== undefined && + sessionInfo.activeStreams === 0) { + if (Date.now() - sessionInfo.lastIdle >= ctx.sessionIdleTimeout) { + ctx.trace('Session idle timeout triggered for ' + + (socket === null || socket === void 0 ? void 0 : socket.remoteAddress) + + ':' + + (socket === null || socket === void 0 ? void 0 : socket.remotePort) + + ' last idle at ' + + sessionInfo.lastIdle); + ctx.closeSession(session); + } + else { + sessionInfo.timeout.refresh(); + } + } + } + onStreamOpened(stream) { + const session = stream.session; + const idleTimeoutObj = this.sessionIdleTimeouts.get(session); + if (idleTimeoutObj) { + idleTimeoutObj.activeStreams += 1; + stream.once('close', idleTimeoutObj.onClose); + } + } + onStreamClose(session) { + var _b, _c; + const idleTimeoutObj = this.sessionIdleTimeouts.get(session); + if (idleTimeoutObj) { + idleTimeoutObj.activeStreams -= 1; + if (idleTimeoutObj.activeStreams === 0) { + idleTimeoutObj.lastIdle = Date.now(); + idleTimeoutObj.timeout.refresh(); + this.trace('Session onStreamClose' + + ((_b = session.socket) === null || _b === void 0 ? void 0 : _b.remoteAddress) + + ':' + + ((_c = session.socket) === null || _c === void 0 ? void 0 : _c.remotePort) + + ' at ' + + idleTimeoutObj.lastIdle); + } + } + } + }, + (() => { + const _metadata = typeof Symbol === "function" && Symbol.metadata ? Object.create(null) : void 0; + _start_decorators = [deprecate('Calling start() is no longer necessary. It can be safely omitted.')]; + __esDecorate(_a, null, _start_decorators, { kind: "method", name: "start", static: false, private: false, access: { has: obj => "start" in obj, get: obj => obj.start }, metadata: _metadata }, null, _instanceExtraInitializers); + if (_metadata) Object.defineProperty(_a, Symbol.metadata, { enumerable: true, configurable: true, writable: true, value: _metadata }); + })(), + _a; +})(); +exports.Server = Server; +async function handleUnary(call, handler) { + let stream; + function respond(err, value, trailer, flags) { + if (err) { + call.sendStatus((0, server_call_1.serverErrorToStatus)(err, trailer)); + return; + } + call.sendMessage(value, () => { + call.sendStatus({ + code: constants_1.Status.OK, + details: 'OK', + metadata: trailer !== null && trailer !== void 0 ? trailer : null, + }); + }); + } + let requestMetadata; + let requestMessage = null; + call.start({ + onReceiveMetadata(metadata) { + requestMetadata = metadata; + call.startRead(); + }, + onReceiveMessage(message) { + if (requestMessage) { + call.sendStatus({ + code: constants_1.Status.UNIMPLEMENTED, + details: `Received a second request message for server streaming method ${handler.path}`, + metadata: null, + }); + return; + } + requestMessage = message; + call.startRead(); + }, + onReceiveHalfClose() { + if (!requestMessage) { + call.sendStatus({ + code: constants_1.Status.UNIMPLEMENTED, + details: `Received no request message for server streaming method ${handler.path}`, + metadata: null, + }); + return; + } + stream = new server_call_1.ServerWritableStreamImpl(handler.path, call, requestMetadata, requestMessage); + try { + handler.func(stream, respond); + } + catch (err) { + call.sendStatus({ + code: constants_1.Status.UNKNOWN, + details: `Server method handler threw error ${err.message}`, + metadata: null, + }); + } + }, + onCancel() { + if (stream) { + stream.cancelled = true; + stream.emit('cancelled', 'cancelled'); + } + }, + }); +} +function handleClientStreaming(call, handler) { + let stream; + function respond(err, value, trailer, flags) { + if (err) { + call.sendStatus((0, server_call_1.serverErrorToStatus)(err, trailer)); + return; + } + call.sendMessage(value, () => { + call.sendStatus({ + code: constants_1.Status.OK, + details: 'OK', + metadata: trailer !== null && trailer !== void 0 ? trailer : null, + }); + }); + } + call.start({ + onReceiveMetadata(metadata) { + stream = new server_call_1.ServerDuplexStreamImpl(handler.path, call, metadata); + try { + handler.func(stream, respond); + } + catch (err) { + call.sendStatus({ + code: constants_1.Status.UNKNOWN, + details: `Server method handler threw error ${err.message}`, + metadata: null, + }); + } + }, + onReceiveMessage(message) { + stream.push(message); + }, + onReceiveHalfClose() { + stream.push(null); + }, + onCancel() { + if (stream) { + stream.cancelled = true; + stream.emit('cancelled', 'cancelled'); + stream.destroy(); + } + }, + }); +} +function handleServerStreaming(call, handler) { + let stream; + let requestMetadata; + let requestMessage = null; + call.start({ + onReceiveMetadata(metadata) { + requestMetadata = metadata; + call.startRead(); + }, + onReceiveMessage(message) { + if (requestMessage) { + call.sendStatus({ + code: constants_1.Status.UNIMPLEMENTED, + details: `Received a second request message for server streaming method ${handler.path}`, + metadata: null, + }); + return; + } + requestMessage = message; + call.startRead(); + }, + onReceiveHalfClose() { + if (!requestMessage) { + call.sendStatus({ + code: constants_1.Status.UNIMPLEMENTED, + details: `Received no request message for server streaming method ${handler.path}`, + metadata: null, + }); + return; + } + stream = new server_call_1.ServerWritableStreamImpl(handler.path, call, requestMetadata, requestMessage); + try { + handler.func(stream); + } + catch (err) { + call.sendStatus({ + code: constants_1.Status.UNKNOWN, + details: `Server method handler threw error ${err.message}`, + metadata: null, + }); + } + }, + onCancel() { + if (stream) { + stream.cancelled = true; + stream.emit('cancelled', 'cancelled'); + stream.destroy(); + } + }, + }); +} +function handleBidiStreaming(call, handler) { + let stream; + call.start({ + onReceiveMetadata(metadata) { + stream = new server_call_1.ServerDuplexStreamImpl(handler.path, call, metadata); + try { + handler.func(stream); + } + catch (err) { + call.sendStatus({ + code: constants_1.Status.UNKNOWN, + details: `Server method handler threw error ${err.message}`, + metadata: null, + }); + } + }, + onReceiveMessage(message) { + stream.push(message); + }, + onReceiveHalfClose() { + stream.push(null); + }, + onCancel() { + if (stream) { + stream.cancelled = true; + stream.emit('cancelled', 'cancelled'); + stream.destroy(); + } + }, + }); +} +//# sourceMappingURL=server.js.map + +/***/ }), + +/***/ 21761: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.extractAndSelectServiceConfig = exports.validateServiceConfig = exports.validateRetryThrottling = void 0; +/* This file implements gRFC A2 and the service config spec: + * https://github.com/grpc/proposal/blob/master/A2-service-configs-in-dns.md + * https://github.com/grpc/grpc/blob/master/doc/service_config.md. Each + * function here takes an object with unknown structure and returns its + * specific object type if the input has the right structure, and throws an + * error otherwise. */ +/* The any type is purposely used here. All functions validate their input at + * runtime */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +const os = __nccwpck_require__(22037); +const constants_1 = __nccwpck_require__(90634); +/** + * Recognizes a number with up to 9 digits after the decimal point, followed by + * an "s", representing a number of seconds. + */ +const DURATION_REGEX = /^\d+(\.\d{1,9})?s$/; +/** + * Client language name used for determining whether this client matches a + * `ServiceConfigCanaryConfig`'s `clientLanguage` list. + */ +const CLIENT_LANGUAGE_STRING = 'node'; +function validateName(obj) { + // In this context, and unset field and '' are considered the same + if ('service' in obj && obj.service !== '') { + if (typeof obj.service !== 'string') { + throw new Error(`Invalid method config name: invalid service: expected type string, got ${typeof obj.service}`); + } + if ('method' in obj && obj.method !== '') { + if (typeof obj.method !== 'string') { + throw new Error(`Invalid method config name: invalid method: expected type string, got ${typeof obj.service}`); + } + return { + service: obj.service, + method: obj.method, + }; + } + else { + return { + service: obj.service, + }; + } + } + else { + if ('method' in obj && obj.method !== undefined) { + throw new Error(`Invalid method config name: method set with empty or unset service`); + } + return {}; + } +} +function validateRetryPolicy(obj) { + if (!('maxAttempts' in obj) || + !Number.isInteger(obj.maxAttempts) || + obj.maxAttempts < 2) { + throw new Error('Invalid method config retry policy: maxAttempts must be an integer at least 2'); + } + if (!('initialBackoff' in obj) || + typeof obj.initialBackoff !== 'string' || + !DURATION_REGEX.test(obj.initialBackoff)) { + throw new Error('Invalid method config retry policy: initialBackoff must be a string consisting of a positive integer or decimal followed by s'); + } + if (!('maxBackoff' in obj) || + typeof obj.maxBackoff !== 'string' || + !DURATION_REGEX.test(obj.maxBackoff)) { + throw new Error('Invalid method config retry policy: maxBackoff must be a string consisting of a positive integer or decimal followed by s'); + } + if (!('backoffMultiplier' in obj) || + typeof obj.backoffMultiplier !== 'number' || + obj.backoffMultiplier <= 0) { + throw new Error('Invalid method config retry policy: backoffMultiplier must be a number greater than 0'); + } + if (!('retryableStatusCodes' in obj && Array.isArray(obj.retryableStatusCodes))) { + throw new Error('Invalid method config retry policy: retryableStatusCodes is required'); + } + if (obj.retryableStatusCodes.length === 0) { + throw new Error('Invalid method config retry policy: retryableStatusCodes must be non-empty'); + } + for (const value of obj.retryableStatusCodes) { + if (typeof value === 'number') { + if (!Object.values(constants_1.Status).includes(value)) { + throw new Error('Invalid method config retry policy: retryableStatusCodes value not in status code range'); + } + } + else if (typeof value === 'string') { + if (!Object.values(constants_1.Status).includes(value.toUpperCase())) { + throw new Error('Invalid method config retry policy: retryableStatusCodes value not a status code name'); + } + } + else { + throw new Error('Invalid method config retry policy: retryableStatusCodes value must be a string or number'); + } + } + return { + maxAttempts: obj.maxAttempts, + initialBackoff: obj.initialBackoff, + maxBackoff: obj.maxBackoff, + backoffMultiplier: obj.backoffMultiplier, + retryableStatusCodes: obj.retryableStatusCodes, + }; +} +function validateHedgingPolicy(obj) { + if (!('maxAttempts' in obj) || + !Number.isInteger(obj.maxAttempts) || + obj.maxAttempts < 2) { + throw new Error('Invalid method config hedging policy: maxAttempts must be an integer at least 2'); + } + if ('hedgingDelay' in obj && + (typeof obj.hedgingDelay !== 'string' || + !DURATION_REGEX.test(obj.hedgingDelay))) { + throw new Error('Invalid method config hedging policy: hedgingDelay must be a string consisting of a positive integer followed by s'); + } + if ('nonFatalStatusCodes' in obj && Array.isArray(obj.nonFatalStatusCodes)) { + for (const value of obj.nonFatalStatusCodes) { + if (typeof value === 'number') { + if (!Object.values(constants_1.Status).includes(value)) { + throw new Error('Invalid method config hedging policy: nonFatalStatusCodes value not in status code range'); + } + } + else if (typeof value === 'string') { + if (!Object.values(constants_1.Status).includes(value.toUpperCase())) { + throw new Error('Invalid method config hedging policy: nonFatalStatusCodes value not a status code name'); + } + } + else { + throw new Error('Invalid method config hedging policy: nonFatalStatusCodes value must be a string or number'); + } + } + } + const result = { + maxAttempts: obj.maxAttempts, + }; + if (obj.hedgingDelay) { + result.hedgingDelay = obj.hedgingDelay; + } + if (obj.nonFatalStatusCodes) { + result.nonFatalStatusCodes = obj.nonFatalStatusCodes; + } + return result; +} +function validateMethodConfig(obj) { + var _a; + const result = { + name: [], + }; + if (!('name' in obj) || !Array.isArray(obj.name)) { + throw new Error('Invalid method config: invalid name array'); + } + for (const name of obj.name) { + result.name.push(validateName(name)); + } + if ('waitForReady' in obj) { + if (typeof obj.waitForReady !== 'boolean') { + throw new Error('Invalid method config: invalid waitForReady'); + } + result.waitForReady = obj.waitForReady; + } + if ('timeout' in obj) { + if (typeof obj.timeout === 'object') { + if (!('seconds' in obj.timeout) || + !(typeof obj.timeout.seconds === 'number')) { + throw new Error('Invalid method config: invalid timeout.seconds'); + } + if (!('nanos' in obj.timeout) || + !(typeof obj.timeout.nanos === 'number')) { + throw new Error('Invalid method config: invalid timeout.nanos'); + } + result.timeout = obj.timeout; + } + else if (typeof obj.timeout === 'string' && + DURATION_REGEX.test(obj.timeout)) { + const timeoutParts = obj.timeout + .substring(0, obj.timeout.length - 1) + .split('.'); + result.timeout = { + seconds: timeoutParts[0] | 0, + nanos: ((_a = timeoutParts[1]) !== null && _a !== void 0 ? _a : 0) | 0, + }; + } + else { + throw new Error('Invalid method config: invalid timeout'); + } + } + if ('maxRequestBytes' in obj) { + if (typeof obj.maxRequestBytes !== 'number') { + throw new Error('Invalid method config: invalid maxRequestBytes'); + } + result.maxRequestBytes = obj.maxRequestBytes; + } + if ('maxResponseBytes' in obj) { + if (typeof obj.maxResponseBytes !== 'number') { + throw new Error('Invalid method config: invalid maxRequestBytes'); + } + result.maxResponseBytes = obj.maxResponseBytes; + } + if ('retryPolicy' in obj) { + if ('hedgingPolicy' in obj) { + throw new Error('Invalid method config: retryPolicy and hedgingPolicy cannot both be specified'); + } + else { + result.retryPolicy = validateRetryPolicy(obj.retryPolicy); + } + } + else if ('hedgingPolicy' in obj) { + result.hedgingPolicy = validateHedgingPolicy(obj.hedgingPolicy); + } + return result; +} +function validateRetryThrottling(obj) { + if (!('maxTokens' in obj) || + typeof obj.maxTokens !== 'number' || + obj.maxTokens <= 0 || + obj.maxTokens > 1000) { + throw new Error('Invalid retryThrottling: maxTokens must be a number in (0, 1000]'); + } + if (!('tokenRatio' in obj) || + typeof obj.tokenRatio !== 'number' || + obj.tokenRatio <= 0) { + throw new Error('Invalid retryThrottling: tokenRatio must be a number greater than 0'); + } + return { + maxTokens: +obj.maxTokens.toFixed(3), + tokenRatio: +obj.tokenRatio.toFixed(3), + }; +} +exports.validateRetryThrottling = validateRetryThrottling; +function validateLoadBalancingConfig(obj) { + if (!(typeof obj === 'object' && obj !== null)) { + throw new Error(`Invalid loadBalancingConfig: unexpected type ${typeof obj}`); + } + const keys = Object.keys(obj); + if (keys.length > 1) { + throw new Error(`Invalid loadBalancingConfig: unexpected multiple keys ${keys}`); + } + if (keys.length === 0) { + throw new Error('Invalid loadBalancingConfig: load balancing policy name required'); + } + return { + [keys[0]]: obj[keys[0]], + }; +} +function validateServiceConfig(obj) { + const result = { + loadBalancingConfig: [], + methodConfig: [], + }; + if ('loadBalancingPolicy' in obj) { + if (typeof obj.loadBalancingPolicy === 'string') { + result.loadBalancingPolicy = obj.loadBalancingPolicy; + } + else { + throw new Error('Invalid service config: invalid loadBalancingPolicy'); + } + } + if ('loadBalancingConfig' in obj) { + if (Array.isArray(obj.loadBalancingConfig)) { + for (const config of obj.loadBalancingConfig) { + result.loadBalancingConfig.push(validateLoadBalancingConfig(config)); + } + } + else { + throw new Error('Invalid service config: invalid loadBalancingConfig'); + } + } + if ('methodConfig' in obj) { + if (Array.isArray(obj.methodConfig)) { + for (const methodConfig of obj.methodConfig) { + result.methodConfig.push(validateMethodConfig(methodConfig)); + } + } + } + if ('retryThrottling' in obj) { + result.retryThrottling = validateRetryThrottling(obj.retryThrottling); + } + // Validate method name uniqueness + const seenMethodNames = []; + for (const methodConfig of result.methodConfig) { + for (const name of methodConfig.name) { + for (const seenName of seenMethodNames) { + if (name.service === seenName.service && + name.method === seenName.method) { + throw new Error(`Invalid service config: duplicate name ${name.service}/${name.method}`); + } + } + seenMethodNames.push(name); + } + } + return result; +} +exports.validateServiceConfig = validateServiceConfig; +function validateCanaryConfig(obj) { + if (!('serviceConfig' in obj)) { + throw new Error('Invalid service config choice: missing service config'); + } + const result = { + serviceConfig: validateServiceConfig(obj.serviceConfig), + }; + if ('clientLanguage' in obj) { + if (Array.isArray(obj.clientLanguage)) { + result.clientLanguage = []; + for (const lang of obj.clientLanguage) { + if (typeof lang === 'string') { + result.clientLanguage.push(lang); + } + else { + throw new Error('Invalid service config choice: invalid clientLanguage'); + } + } + } + else { + throw new Error('Invalid service config choice: invalid clientLanguage'); + } + } + if ('clientHostname' in obj) { + if (Array.isArray(obj.clientHostname)) { + result.clientHostname = []; + for (const lang of obj.clientHostname) { + if (typeof lang === 'string') { + result.clientHostname.push(lang); + } + else { + throw new Error('Invalid service config choice: invalid clientHostname'); + } + } + } + else { + throw new Error('Invalid service config choice: invalid clientHostname'); + } + } + if ('percentage' in obj) { + if (typeof obj.percentage === 'number' && + 0 <= obj.percentage && + obj.percentage <= 100) { + result.percentage = obj.percentage; + } + else { + throw new Error('Invalid service config choice: invalid percentage'); + } + } + // Validate that no unexpected fields are present + const allowedFields = [ + 'clientLanguage', + 'percentage', + 'clientHostname', + 'serviceConfig', + ]; + for (const field in obj) { + if (!allowedFields.includes(field)) { + throw new Error(`Invalid service config choice: unexpected field ${field}`); + } + } + return result; +} +function validateAndSelectCanaryConfig(obj, percentage) { + if (!Array.isArray(obj)) { + throw new Error('Invalid service config list'); + } + for (const config of obj) { + const validatedConfig = validateCanaryConfig(config); + /* For each field, we check if it is present, then only discard the + * config if the field value does not match the current client */ + if (typeof validatedConfig.percentage === 'number' && + percentage > validatedConfig.percentage) { + continue; + } + if (Array.isArray(validatedConfig.clientHostname)) { + let hostnameMatched = false; + for (const hostname of validatedConfig.clientHostname) { + if (hostname === os.hostname()) { + hostnameMatched = true; + } + } + if (!hostnameMatched) { + continue; + } + } + if (Array.isArray(validatedConfig.clientLanguage)) { + let languageMatched = false; + for (const language of validatedConfig.clientLanguage) { + if (language === CLIENT_LANGUAGE_STRING) { + languageMatched = true; + } + } + if (!languageMatched) { + continue; + } + } + return validatedConfig.serviceConfig; + } + throw new Error('No matching service config found'); +} +/** + * Find the "grpc_config" record among the TXT records, parse its value as JSON, validate its contents, + * and select a service config with selection fields that all match this client. Most of these steps + * can fail with an error; the caller must handle any errors thrown this way. + * @param txtRecord The TXT record array that is output from a successful call to dns.resolveTxt + * @param percentage A number chosen from the range [0, 100) that is used to select which config to use + * @return The service configuration to use, given the percentage value, or null if the service config + * data has a valid format but none of the options match the current client. + */ +function extractAndSelectServiceConfig(txtRecord, percentage) { + for (const record of txtRecord) { + if (record.length > 0 && record[0].startsWith('grpc_config=')) { + /* Treat the list of strings in this record as a single string and remove + * "grpc_config=" from the beginning. The rest should be a JSON string */ + const recordString = record.join('').substring('grpc_config='.length); + const recordJson = JSON.parse(recordString); + return validateAndSelectCanaryConfig(recordJson, percentage); + } + } + return null; +} +exports.extractAndSelectServiceConfig = extractAndSelectServiceConfig; +//# sourceMappingURL=service-config.js.map + +/***/ }), + +/***/ 73155: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StatusBuilder = void 0; +/** + * A builder for gRPC status objects. + */ +class StatusBuilder { + constructor() { + this.code = null; + this.details = null; + this.metadata = null; + } + /** + * Adds a status code to the builder. + */ + withCode(code) { + this.code = code; + return this; + } + /** + * Adds details to the builder. + */ + withDetails(details) { + this.details = details; + return this; + } + /** + * Adds metadata to the builder. + */ + withMetadata(metadata) { + this.metadata = metadata; + return this; + } + /** + * Builds the status object. + */ + build() { + const status = {}; + if (this.code !== null) { + status.code = this.code; + } + if (this.details !== null) { + status.details = this.details; + } + if (this.metadata !== null) { + status.metadata = this.metadata; + } + return status; + } +} +exports.StatusBuilder = StatusBuilder; +//# sourceMappingURL=status-builder.js.map + +/***/ }), + +/***/ 16575: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StreamDecoder = void 0; +var ReadState; +(function (ReadState) { + ReadState[ReadState["NO_DATA"] = 0] = "NO_DATA"; + ReadState[ReadState["READING_SIZE"] = 1] = "READING_SIZE"; + ReadState[ReadState["READING_MESSAGE"] = 2] = "READING_MESSAGE"; +})(ReadState || (ReadState = {})); +class StreamDecoder { + constructor(maxReadMessageLength) { + this.maxReadMessageLength = maxReadMessageLength; + this.readState = ReadState.NO_DATA; + this.readCompressFlag = Buffer.alloc(1); + this.readPartialSize = Buffer.alloc(4); + this.readSizeRemaining = 4; + this.readMessageSize = 0; + this.readPartialMessage = []; + this.readMessageRemaining = 0; + } + write(data) { + let readHead = 0; + let toRead; + const result = []; + while (readHead < data.length) { + switch (this.readState) { + case ReadState.NO_DATA: + this.readCompressFlag = data.slice(readHead, readHead + 1); + readHead += 1; + this.readState = ReadState.READING_SIZE; + this.readPartialSize.fill(0); + this.readSizeRemaining = 4; + this.readMessageSize = 0; + this.readMessageRemaining = 0; + this.readPartialMessage = []; + break; + case ReadState.READING_SIZE: + toRead = Math.min(data.length - readHead, this.readSizeRemaining); + data.copy(this.readPartialSize, 4 - this.readSizeRemaining, readHead, readHead + toRead); + this.readSizeRemaining -= toRead; + readHead += toRead; + // readSizeRemaining >=0 here + if (this.readSizeRemaining === 0) { + this.readMessageSize = this.readPartialSize.readUInt32BE(0); + if (this.maxReadMessageLength !== -1 && this.readMessageSize > this.maxReadMessageLength) { + throw new Error(`Received message larger than max (${this.readMessageSize} vs ${this.maxReadMessageLength})`); + } + this.readMessageRemaining = this.readMessageSize; + if (this.readMessageRemaining > 0) { + this.readState = ReadState.READING_MESSAGE; + } + else { + const message = Buffer.concat([this.readCompressFlag, this.readPartialSize], 5); + this.readState = ReadState.NO_DATA; + result.push(message); + } + } + break; + case ReadState.READING_MESSAGE: + toRead = Math.min(data.length - readHead, this.readMessageRemaining); + this.readPartialMessage.push(data.slice(readHead, readHead + toRead)); + this.readMessageRemaining -= toRead; + readHead += toRead; + // readMessageRemaining >=0 here + if (this.readMessageRemaining === 0) { + // At this point, we have read a full message + const framedMessageBuffers = [ + this.readCompressFlag, + this.readPartialSize, + ].concat(this.readPartialMessage); + const framedMessage = Buffer.concat(framedMessageBuffers, this.readMessageSize + 5); + this.readState = ReadState.NO_DATA; + result.push(framedMessage); + } + break; + default: + throw new Error('Unexpected read state'); + } + } + return result; + } +} +exports.StreamDecoder = StreamDecoder; +//# sourceMappingURL=stream-decoder.js.map + +/***/ }), + +/***/ 78021: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2021 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointMap = exports.endpointHasAddress = exports.endpointToString = exports.endpointEqual = exports.stringToSubchannelAddress = exports.subchannelAddressToString = exports.subchannelAddressEqual = exports.isTcpSubchannelAddress = void 0; +const net_1 = __nccwpck_require__(41808); +function isTcpSubchannelAddress(address) { + return 'port' in address; +} +exports.isTcpSubchannelAddress = isTcpSubchannelAddress; +function subchannelAddressEqual(address1, address2) { + if (!address1 && !address2) { + return true; + } + if (!address1 || !address2) { + return false; + } + if (isTcpSubchannelAddress(address1)) { + return (isTcpSubchannelAddress(address2) && + address1.host === address2.host && + address1.port === address2.port); + } + else { + return !isTcpSubchannelAddress(address2) && address1.path === address2.path; + } +} +exports.subchannelAddressEqual = subchannelAddressEqual; +function subchannelAddressToString(address) { + if (isTcpSubchannelAddress(address)) { + if ((0, net_1.isIPv6)(address.host)) { + return '[' + address.host + ']:' + address.port; + } + else { + return address.host + ':' + address.port; + } + } + else { + return address.path; + } +} +exports.subchannelAddressToString = subchannelAddressToString; +const DEFAULT_PORT = 443; +function stringToSubchannelAddress(addressString, port) { + if ((0, net_1.isIP)(addressString)) { + return { + host: addressString, + port: port !== null && port !== void 0 ? port : DEFAULT_PORT, + }; + } + else { + return { + path: addressString, + }; + } +} +exports.stringToSubchannelAddress = stringToSubchannelAddress; +function endpointEqual(endpoint1, endpoint2) { + if (endpoint1.addresses.length !== endpoint2.addresses.length) { + return false; + } + for (let i = 0; i < endpoint1.addresses.length; i++) { + if (!subchannelAddressEqual(endpoint1.addresses[i], endpoint2.addresses[i])) { + return false; + } + } + return true; +} +exports.endpointEqual = endpointEqual; +function endpointToString(endpoint) { + return ('[' + endpoint.addresses.map(subchannelAddressToString).join(', ') + ']'); +} +exports.endpointToString = endpointToString; +function endpointHasAddress(endpoint, expectedAddress) { + for (const address of endpoint.addresses) { + if (subchannelAddressEqual(address, expectedAddress)) { + return true; + } + } + return false; +} +exports.endpointHasAddress = endpointHasAddress; +function endpointEqualUnordered(endpoint1, endpoint2) { + if (endpoint1.addresses.length !== endpoint2.addresses.length) { + return false; + } + for (const address1 of endpoint1.addresses) { + let matchFound = false; + for (const address2 of endpoint2.addresses) { + if (subchannelAddressEqual(address1, address2)) { + matchFound = true; + break; + } + } + if (!matchFound) { + return false; + } + } + return true; +} +class EndpointMap { + constructor() { + this.map = new Set(); + } + get size() { + return this.map.size; + } + getForSubchannelAddress(address) { + for (const entry of this.map) { + if (endpointHasAddress(entry.key, address)) { + return entry.value; + } + } + return undefined; + } + /** + * Delete any entries in this map with keys that are not in endpoints + * @param endpoints + */ + deleteMissing(endpoints) { + const removedValues = []; + for (const entry of this.map) { + let foundEntry = false; + for (const endpoint of endpoints) { + if (endpointEqualUnordered(endpoint, entry.key)) { + foundEntry = true; + } + } + if (!foundEntry) { + removedValues.push(entry.value); + this.map.delete(entry); + } + } + return removedValues; + } + get(endpoint) { + for (const entry of this.map) { + if (endpointEqualUnordered(endpoint, entry.key)) { + return entry.value; + } + } + return undefined; + } + set(endpoint, mapEntry) { + for (const entry of this.map) { + if (endpointEqualUnordered(endpoint, entry.key)) { + entry.value = mapEntry; + return; + } + } + this.map.add({ key: endpoint, value: mapEntry }); + } + delete(endpoint) { + for (const entry of this.map) { + if (endpointEqualUnordered(endpoint, entry.key)) { + this.map.delete(entry); + return; + } + } + } + has(endpoint) { + for (const entry of this.map) { + if (endpointEqualUnordered(endpoint, entry.key)) { + return true; + } + } + return false; + } + clear() { + this.map.clear(); + } + *keys() { + for (const entry of this.map) { + yield entry.key; + } + } + *values() { + for (const entry of this.map) { + yield entry.value; + } + } + *entries() { + for (const entry of this.map) { + yield [entry.key, entry.value]; + } + } +} +exports.EndpointMap = EndpointMap; +//# sourceMappingURL=subchannel-address.js.map + +/***/ }), + +/***/ 86940: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Http2SubchannelCall = void 0; +const http2 = __nccwpck_require__(85158); +const os = __nccwpck_require__(22037); +const constants_1 = __nccwpck_require__(90634); +const metadata_1 = __nccwpck_require__(83665); +const stream_decoder_1 = __nccwpck_require__(16575); +const logging = __nccwpck_require__(35993); +const constants_2 = __nccwpck_require__(90634); +const TRACER_NAME = 'subchannel_call'; +/** + * Should do approximately the same thing as util.getSystemErrorName but the + * TypeScript types don't have that function for some reason so I just made my + * own. + * @param errno + */ +function getSystemErrorName(errno) { + for (const [name, num] of Object.entries(os.constants.errno)) { + if (num === errno) { + return name; + } + } + return 'Unknown system error ' + errno; +} +function mapHttpStatusCode(code) { + const details = `Received HTTP status code ${code}`; + let mappedStatusCode; + switch (code) { + // TODO(murgatroid99): handle 100 and 101 + case 400: + mappedStatusCode = constants_1.Status.INTERNAL; + break; + case 401: + mappedStatusCode = constants_1.Status.UNAUTHENTICATED; + break; + case 403: + mappedStatusCode = constants_1.Status.PERMISSION_DENIED; + break; + case 404: + mappedStatusCode = constants_1.Status.UNIMPLEMENTED; + break; + case 429: + case 502: + case 503: + case 504: + mappedStatusCode = constants_1.Status.UNAVAILABLE; + break; + default: + mappedStatusCode = constants_1.Status.UNKNOWN; + } + return { + code: mappedStatusCode, + details: details, + metadata: new metadata_1.Metadata() + }; +} +class Http2SubchannelCall { + constructor(http2Stream, callEventTracker, listener, transport, callId) { + var _a; + this.http2Stream = http2Stream; + this.callEventTracker = callEventTracker; + this.listener = listener; + this.transport = transport; + this.callId = callId; + this.isReadFilterPending = false; + this.isPushPending = false; + this.canPush = false; + /** + * Indicates that an 'end' event has come from the http2 stream, so there + * will be no more data events. + */ + this.readsClosed = false; + this.statusOutput = false; + this.unpushedReadMessages = []; + // This is populated (non-null) if and only if the call has ended + this.finalStatus = null; + this.internalError = null; + this.serverEndedCall = false; + const maxReceiveMessageLength = (_a = transport.getOptions()['grpc.max_receive_message_length']) !== null && _a !== void 0 ? _a : constants_1.DEFAULT_MAX_RECEIVE_MESSAGE_LENGTH; + this.decoder = new stream_decoder_1.StreamDecoder(maxReceiveMessageLength); + http2Stream.on('response', (headers, flags) => { + let headersString = ''; + for (const header of Object.keys(headers)) { + headersString += '\t\t' + header + ': ' + headers[header] + '\n'; + } + this.trace('Received server headers:\n' + headersString); + this.httpStatusCode = headers[':status']; + if (flags & http2.constants.NGHTTP2_FLAG_END_STREAM) { + this.handleTrailers(headers); + } + else { + let metadata; + try { + metadata = metadata_1.Metadata.fromHttp2Headers(headers); + } + catch (error) { + this.endCall({ + code: constants_1.Status.UNKNOWN, + details: error.message, + metadata: new metadata_1.Metadata(), + }); + return; + } + this.listener.onReceiveMetadata(metadata); + } + }); + http2Stream.on('trailers', (headers) => { + this.handleTrailers(headers); + }); + http2Stream.on('data', (data) => { + /* If the status has already been output, allow the http2 stream to + * drain without processing the data. */ + if (this.statusOutput) { + return; + } + this.trace('receive HTTP/2 data frame of length ' + data.length); + let messages; + try { + messages = this.decoder.write(data); + } + catch (e) { + this.cancelWithStatus(constants_1.Status.RESOURCE_EXHAUSTED, e.message); + return; + } + for (const message of messages) { + this.trace('parsed message of length ' + message.length); + this.callEventTracker.addMessageReceived(); + this.tryPush(message); + } + }); + http2Stream.on('end', () => { + this.readsClosed = true; + this.maybeOutputStatus(); + }); + http2Stream.on('close', () => { + this.serverEndedCall = true; + /* Use process.next tick to ensure that this code happens after any + * "error" event that may be emitted at about the same time, so that + * we can bubble up the error message from that event. */ + process.nextTick(() => { + var _a; + this.trace('HTTP/2 stream closed with code ' + http2Stream.rstCode); + /* If we have a final status with an OK status code, that means that + * we have received all of the messages and we have processed the + * trailers and the call completed successfully, so it doesn't matter + * how the stream ends after that */ + if (((_a = this.finalStatus) === null || _a === void 0 ? void 0 : _a.code) === constants_1.Status.OK) { + return; + } + let code; + let details = ''; + switch (http2Stream.rstCode) { + case http2.constants.NGHTTP2_NO_ERROR: + /* If we get a NO_ERROR code and we already have a status, the + * stream completed properly and we just haven't fully processed + * it yet */ + if (this.finalStatus !== null) { + return; + } + if (this.httpStatusCode && this.httpStatusCode !== 200) { + const mappedStatus = mapHttpStatusCode(this.httpStatusCode); + code = mappedStatus.code; + details = mappedStatus.details; + } + else { + code = constants_1.Status.INTERNAL; + details = `Received RST_STREAM with code ${http2Stream.rstCode} (Call ended without gRPC status)`; + } + break; + case http2.constants.NGHTTP2_REFUSED_STREAM: + code = constants_1.Status.UNAVAILABLE; + details = 'Stream refused by server'; + break; + case http2.constants.NGHTTP2_CANCEL: + code = constants_1.Status.CANCELLED; + details = 'Call cancelled'; + break; + case http2.constants.NGHTTP2_ENHANCE_YOUR_CALM: + code = constants_1.Status.RESOURCE_EXHAUSTED; + details = 'Bandwidth exhausted or memory limit exceeded'; + break; + case http2.constants.NGHTTP2_INADEQUATE_SECURITY: + code = constants_1.Status.PERMISSION_DENIED; + details = 'Protocol not secure enough'; + break; + case http2.constants.NGHTTP2_INTERNAL_ERROR: + code = constants_1.Status.INTERNAL; + if (this.internalError === null) { + /* This error code was previously handled in the default case, and + * there are several instances of it online, so I wanted to + * preserve the original error message so that people find existing + * information in searches, but also include the more recognizable + * "Internal server error" message. */ + details = `Received RST_STREAM with code ${http2Stream.rstCode} (Internal server error)`; + } + else { + if (this.internalError.code === 'ECONNRESET' || + this.internalError.code === 'ETIMEDOUT') { + code = constants_1.Status.UNAVAILABLE; + details = this.internalError.message; + } + else { + /* The "Received RST_STREAM with code ..." error is preserved + * here for continuity with errors reported online, but the + * error message at the end will probably be more relevant in + * most cases. */ + details = `Received RST_STREAM with code ${http2Stream.rstCode} triggered by internal client error: ${this.internalError.message}`; + } + } + break; + default: + code = constants_1.Status.INTERNAL; + details = `Received RST_STREAM with code ${http2Stream.rstCode}`; + } + // This is a no-op if trailers were received at all. + // This is OK, because status codes emitted here correspond to more + // catastrophic issues that prevent us from receiving trailers in the + // first place. + this.endCall({ + code, + details, + metadata: new metadata_1.Metadata(), + rstCode: http2Stream.rstCode, + }); + }); + }); + http2Stream.on('error', (err) => { + /* We need an error handler here to stop "Uncaught Error" exceptions + * from bubbling up. However, errors here should all correspond to + * "close" events, where we will handle the error more granularly */ + /* Specifically looking for stream errors that were *not* constructed + * from a RST_STREAM response here: + * https://github.com/nodejs/node/blob/8b8620d580314050175983402dfddf2674e8e22a/lib/internal/http2/core.js#L2267 + */ + if (err.code !== 'ERR_HTTP2_STREAM_ERROR') { + this.trace('Node error event: message=' + + err.message + + ' code=' + + err.code + + ' errno=' + + getSystemErrorName(err.errno) + + ' syscall=' + + err.syscall); + this.internalError = err; + } + this.callEventTracker.onStreamEnd(false); + }); + } + getDeadlineInfo() { + return [`remote_addr=${this.getPeer()}`]; + } + onDisconnect() { + this.endCall({ + code: constants_1.Status.UNAVAILABLE, + details: 'Connection dropped', + metadata: new metadata_1.Metadata(), + }); + } + outputStatus() { + /* Precondition: this.finalStatus !== null */ + if (!this.statusOutput) { + this.statusOutput = true; + this.trace('ended with status: code=' + + this.finalStatus.code + + ' details="' + + this.finalStatus.details + + '"'); + this.callEventTracker.onCallEnd(this.finalStatus); + /* We delay the actual action of bubbling up the status to insulate the + * cleanup code in this class from any errors that may be thrown in the + * upper layers as a result of bubbling up the status. In particular, + * if the status is not OK, the "error" event may be emitted + * synchronously at the top level, which will result in a thrown error if + * the user does not handle that event. */ + process.nextTick(() => { + this.listener.onReceiveStatus(this.finalStatus); + }); + /* Leave the http2 stream in flowing state to drain incoming messages, to + * ensure that the stream closure completes. The call stream already does + * not push more messages after the status is output, so the messages go + * nowhere either way. */ + this.http2Stream.resume(); + } + } + trace(text) { + logging.trace(constants_2.LogVerbosity.DEBUG, TRACER_NAME, '[' + this.callId + '] ' + text); + } + /** + * On first call, emits a 'status' event with the given StatusObject. + * Subsequent calls are no-ops. + * @param status The status of the call. + */ + endCall(status) { + /* If the status is OK and a new status comes in (e.g. from a + * deserialization failure), that new status takes priority */ + if (this.finalStatus === null || this.finalStatus.code === constants_1.Status.OK) { + this.finalStatus = status; + this.maybeOutputStatus(); + } + this.destroyHttp2Stream(); + } + maybeOutputStatus() { + if (this.finalStatus !== null) { + /* The combination check of readsClosed and that the two message buffer + * arrays are empty checks that there all incoming data has been fully + * processed */ + if (this.finalStatus.code !== constants_1.Status.OK || + (this.readsClosed && + this.unpushedReadMessages.length === 0 && + !this.isReadFilterPending && + !this.isPushPending)) { + this.outputStatus(); + } + } + } + push(message) { + this.trace('pushing to reader message of length ' + + (message instanceof Buffer ? message.length : null)); + this.canPush = false; + this.isPushPending = true; + process.nextTick(() => { + this.isPushPending = false; + /* If we have already output the status any later messages should be + * ignored, and can cause out-of-order operation errors higher up in the + * stack. Checking as late as possible here to avoid any race conditions. + */ + if (this.statusOutput) { + return; + } + this.listener.onReceiveMessage(message); + this.maybeOutputStatus(); + }); + } + tryPush(messageBytes) { + if (this.canPush) { + this.http2Stream.pause(); + this.push(messageBytes); + } + else { + this.trace('unpushedReadMessages.push message of length ' + messageBytes.length); + this.unpushedReadMessages.push(messageBytes); + } + } + handleTrailers(headers) { + this.serverEndedCall = true; + this.callEventTracker.onStreamEnd(true); + let headersString = ''; + for (const header of Object.keys(headers)) { + headersString += '\t\t' + header + ': ' + headers[header] + '\n'; + } + this.trace('Received server trailers:\n' + headersString); + let metadata; + try { + metadata = metadata_1.Metadata.fromHttp2Headers(headers); + } + catch (e) { + metadata = new metadata_1.Metadata(); + } + const metadataMap = metadata.getMap(); + let status; + if (typeof metadataMap['grpc-status'] === 'string') { + const receivedStatus = Number(metadataMap['grpc-status']); + this.trace('received status code ' + receivedStatus + ' from server'); + metadata.remove('grpc-status'); + let details = ''; + if (typeof metadataMap['grpc-message'] === 'string') { + try { + details = decodeURI(metadataMap['grpc-message']); + } + catch (e) { + details = metadataMap['grpc-message']; + } + metadata.remove('grpc-message'); + this.trace('received status details string "' + details + '" from server'); + } + status = { + code: receivedStatus, + details: details, + metadata: metadata + }; + } + else if (this.httpStatusCode) { + status = mapHttpStatusCode(this.httpStatusCode); + status.metadata = metadata; + } + else { + status = { + code: constants_1.Status.UNKNOWN, + details: 'No status information received', + metadata: metadata + }; + } + // This is a no-op if the call was already ended when handling headers. + this.endCall(status); + } + destroyHttp2Stream() { + var _a; + // The http2 stream could already have been destroyed if cancelWithStatus + // is called in response to an internal http2 error. + if (this.http2Stream.destroyed) { + return; + } + /* If the server ended the call, sending an RST_STREAM is redundant, so we + * just half close on the client side instead to finish closing the stream. + */ + if (this.serverEndedCall) { + this.http2Stream.end(); + } + else { + /* If the call has ended with an OK status, communicate that when closing + * the stream, partly to avoid a situation in which we detect an error + * RST_STREAM as a result after we have the status */ + let code; + if (((_a = this.finalStatus) === null || _a === void 0 ? void 0 : _a.code) === constants_1.Status.OK) { + code = http2.constants.NGHTTP2_NO_ERROR; + } + else { + code = http2.constants.NGHTTP2_CANCEL; + } + this.trace('close http2 stream with code ' + code); + this.http2Stream.close(code); + } + } + cancelWithStatus(status, details) { + this.trace('cancelWithStatus code: ' + status + ' details: "' + details + '"'); + this.endCall({ code: status, details, metadata: new metadata_1.Metadata() }); + } + getStatus() { + return this.finalStatus; + } + getPeer() { + return this.transport.getPeerName(); + } + getCallNumber() { + return this.callId; + } + startRead() { + /* If the stream has ended with an error, we should not emit any more + * messages and we should communicate that the stream has ended */ + if (this.finalStatus !== null && this.finalStatus.code !== constants_1.Status.OK) { + this.readsClosed = true; + this.maybeOutputStatus(); + return; + } + this.canPush = true; + if (this.unpushedReadMessages.length > 0) { + const nextMessage = this.unpushedReadMessages.shift(); + this.push(nextMessage); + return; + } + /* Only resume reading from the http2Stream if we don't have any pending + * messages to emit */ + this.http2Stream.resume(); + } + sendMessageWithContext(context, message) { + this.trace('write() called with message of length ' + message.length); + const cb = (error) => { + /* nextTick here ensures that no stream action can be taken in the call + * stack of the write callback, in order to hopefully work around + * https://github.com/nodejs/node/issues/49147 */ + process.nextTick(() => { + var _a; + let code = constants_1.Status.UNAVAILABLE; + if ((error === null || error === void 0 ? void 0 : error.code) === + 'ERR_STREAM_WRITE_AFTER_END') { + code = constants_1.Status.INTERNAL; + } + if (error) { + this.cancelWithStatus(code, `Write error: ${error.message}`); + } + (_a = context.callback) === null || _a === void 0 ? void 0 : _a.call(context); + }); + }; + this.trace('sending data chunk of length ' + message.length); + this.callEventTracker.addMessageSent(); + try { + this.http2Stream.write(message, cb); + } + catch (error) { + this.endCall({ + code: constants_1.Status.UNAVAILABLE, + details: `Write failed with error ${error.message}`, + metadata: new metadata_1.Metadata(), + }); + } + } + halfClose() { + this.trace('end() called'); + this.trace('calling end() on HTTP/2 stream'); + this.http2Stream.end(); + } +} +exports.Http2SubchannelCall = Http2SubchannelCall; +//# sourceMappingURL=subchannel-call.js.map + +/***/ }), + +/***/ 12258: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2022 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseSubchannelWrapper = void 0; +class BaseSubchannelWrapper { + constructor(child) { + this.child = child; + this.healthy = true; + this.healthListeners = new Set(); + child.addHealthStateWatcher(childHealthy => { + /* A change to the child health state only affects this wrapper's overall + * health state if this wrapper is reporting healthy. */ + if (this.healthy) { + this.updateHealthListeners(); + } + }); + } + updateHealthListeners() { + for (const listener of this.healthListeners) { + listener(this.isHealthy()); + } + } + getConnectivityState() { + return this.child.getConnectivityState(); + } + addConnectivityStateListener(listener) { + this.child.addConnectivityStateListener(listener); + } + removeConnectivityStateListener(listener) { + this.child.removeConnectivityStateListener(listener); + } + startConnecting() { + this.child.startConnecting(); + } + getAddress() { + return this.child.getAddress(); + } + throttleKeepalive(newKeepaliveTime) { + this.child.throttleKeepalive(newKeepaliveTime); + } + ref() { + this.child.ref(); + } + unref() { + this.child.unref(); + } + getChannelzRef() { + return this.child.getChannelzRef(); + } + isHealthy() { + return this.healthy && this.child.isHealthy(); + } + addHealthStateWatcher(listener) { + this.healthListeners.add(listener); + } + removeHealthStateWatcher(listener) { + this.healthListeners.delete(listener); + } + setHealthy(healthy) { + if (healthy !== this.healthy) { + this.healthy = healthy; + /* A change to this wrapper's health state only affects the overall + * reported health state if the child is healthy. */ + if (this.child.isHealthy()) { + this.updateHealthListeners(); + } + } + } + getRealSubchannel() { + return this.child.getRealSubchannel(); + } + realSubchannelEquals(other) { + return this.getRealSubchannel() === other.getRealSubchannel(); + } +} +exports.BaseSubchannelWrapper = BaseSubchannelWrapper; +//# sourceMappingURL=subchannel-interface.js.map + +/***/ }), + +/***/ 39780: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSubchannelPool = exports.SubchannelPool = void 0; +const channel_options_1 = __nccwpck_require__(99810); +const subchannel_1 = __nccwpck_require__(84764); +const subchannel_address_1 = __nccwpck_require__(78021); +const uri_parser_1 = __nccwpck_require__(65974); +const transport_1 = __nccwpck_require__(46690); +// 10 seconds in milliseconds. This value is arbitrary. +/** + * The amount of time in between checks for dropping subchannels that have no + * other references + */ +const REF_CHECK_INTERVAL = 10000; +class SubchannelPool { + /** + * A pool of subchannels use for making connections. Subchannels with the + * exact same parameters will be reused. + */ + constructor() { + this.pool = Object.create(null); + /** + * A timer of a task performing a periodic subchannel cleanup. + */ + this.cleanupTimer = null; + } + /** + * Unrefs all unused subchannels and cancels the cleanup task if all + * subchannels have been unrefed. + */ + unrefUnusedSubchannels() { + let allSubchannelsUnrefed = true; + /* These objects are created with Object.create(null), so they do not + * have a prototype, which means that for (... in ...) loops over them + * do not need to be filtered */ + // eslint-disable-disable-next-line:forin + for (const channelTarget in this.pool) { + const subchannelObjArray = this.pool[channelTarget]; + const refedSubchannels = subchannelObjArray.filter(value => !value.subchannel.unrefIfOneRef()); + if (refedSubchannels.length > 0) { + allSubchannelsUnrefed = false; + } + /* For each subchannel in the pool, try to unref it if it has + * exactly one ref (which is the ref from the pool itself). If that + * does happen, remove the subchannel from the pool */ + this.pool[channelTarget] = refedSubchannels; + } + /* Currently we do not delete keys with empty values. If that results + * in significant memory usage we should change it. */ + // Cancel the cleanup task if all subchannels have been unrefed. + if (allSubchannelsUnrefed && this.cleanupTimer !== null) { + clearInterval(this.cleanupTimer); + this.cleanupTimer = null; + } + } + /** + * Ensures that the cleanup task is spawned. + */ + ensureCleanupTask() { + var _a, _b; + if (this.cleanupTimer === null) { + this.cleanupTimer = setInterval(() => { + this.unrefUnusedSubchannels(); + }, REF_CHECK_INTERVAL); + // Unref because this timer should not keep the event loop running. + // Call unref only if it exists to address electron/electron#21162 + (_b = (_a = this.cleanupTimer).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + } + /** + * Get a subchannel if one already exists with exactly matching parameters. + * Otherwise, create and save a subchannel with those parameters. + * @param channelTarget + * @param subchannelTarget + * @param channelArguments + * @param channelCredentials + */ + getOrCreateSubchannel(channelTargetUri, subchannelTarget, channelArguments, channelCredentials) { + this.ensureCleanupTask(); + const channelTarget = (0, uri_parser_1.uriToString)(channelTargetUri); + if (channelTarget in this.pool) { + const subchannelObjArray = this.pool[channelTarget]; + for (const subchannelObj of subchannelObjArray) { + if ((0, subchannel_address_1.subchannelAddressEqual)(subchannelTarget, subchannelObj.subchannelAddress) && + (0, channel_options_1.channelOptionsEqual)(channelArguments, subchannelObj.channelArguments) && + channelCredentials._equals(subchannelObj.channelCredentials)) { + return subchannelObj.subchannel; + } + } + } + // If we get here, no matching subchannel was found + const subchannel = new subchannel_1.Subchannel(channelTargetUri, subchannelTarget, channelArguments, channelCredentials, new transport_1.Http2SubchannelConnector(channelTargetUri)); + if (!(channelTarget in this.pool)) { + this.pool[channelTarget] = []; + } + this.pool[channelTarget].push({ + subchannelAddress: subchannelTarget, + channelArguments, + channelCredentials, + subchannel, + }); + subchannel.ref(); + return subchannel; + } +} +exports.SubchannelPool = SubchannelPool; +const globalSubchannelPool = new SubchannelPool(); +/** + * Get either the global subchannel pool, or a new subchannel pool. + * @param global + */ +function getSubchannelPool(global) { + if (global) { + return globalSubchannelPool; + } + else { + return new SubchannelPool(); + } +} +exports.getSubchannelPool = getSubchannelPool; +//# sourceMappingURL=subchannel-pool.js.map + +/***/ }), + +/***/ 84764: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Subchannel = void 0; +const connectivity_state_1 = __nccwpck_require__(80878); +const backoff_timeout_1 = __nccwpck_require__(34186); +const logging = __nccwpck_require__(35993); +const constants_1 = __nccwpck_require__(90634); +const uri_parser_1 = __nccwpck_require__(65974); +const subchannel_address_1 = __nccwpck_require__(78021); +const channelz_1 = __nccwpck_require__(79975); +const TRACER_NAME = 'subchannel'; +/* setInterval and setTimeout only accept signed 32 bit integers. JS doesn't + * have a constant for the max signed 32 bit integer, so this is a simple way + * to calculate it */ +const KEEPALIVE_MAX_TIME_MS = ~(1 << 31); +class Subchannel { + /** + * A class representing a connection to a single backend. + * @param channelTarget The target string for the channel as a whole + * @param subchannelAddress The address for the backend that this subchannel + * will connect to + * @param options The channel options, plus any specific subchannel options + * for this subchannel + * @param credentials The channel credentials used to establish this + * connection + */ + constructor(channelTarget, subchannelAddress, options, credentials, connector) { + var _a; + this.channelTarget = channelTarget; + this.subchannelAddress = subchannelAddress; + this.options = options; + this.credentials = credentials; + this.connector = connector; + /** + * The subchannel's current connectivity state. Invariant: `session` === `null` + * if and only if `connectivityState` is IDLE or TRANSIENT_FAILURE. + */ + this.connectivityState = connectivity_state_1.ConnectivityState.IDLE; + /** + * The underlying http2 session used to make requests. + */ + this.transport = null; + /** + * Indicates that the subchannel should transition from TRANSIENT_FAILURE to + * CONNECTING instead of IDLE when the backoff timeout ends. + */ + this.continueConnecting = false; + /** + * A list of listener functions that will be called whenever the connectivity + * state changes. Will be modified by `addConnectivityStateListener` and + * `removeConnectivityStateListener` + */ + this.stateListeners = new Set(); + /** + * Tracks channels and subchannel pools with references to this subchannel + */ + this.refcount = 0; + // Channelz info + this.channelzEnabled = true; + const backoffOptions = { + initialDelay: options['grpc.initial_reconnect_backoff_ms'], + maxDelay: options['grpc.max_reconnect_backoff_ms'], + }; + this.backoffTimeout = new backoff_timeout_1.BackoffTimeout(() => { + this.handleBackoffTimer(); + }, backoffOptions); + this.backoffTimeout.unref(); + this.subchannelAddressString = (0, subchannel_address_1.subchannelAddressToString)(subchannelAddress); + this.keepaliveTime = (_a = options['grpc.keepalive_time_ms']) !== null && _a !== void 0 ? _a : -1; + if (options['grpc.enable_channelz'] === 0) { + this.channelzEnabled = false; + this.channelzTrace = new channelz_1.ChannelzTraceStub(); + this.callTracker = new channelz_1.ChannelzCallTrackerStub(); + this.childrenTracker = new channelz_1.ChannelzChildrenTrackerStub(); + this.streamTracker = new channelz_1.ChannelzCallTrackerStub(); + } + else { + this.channelzTrace = new channelz_1.ChannelzTrace(); + this.callTracker = new channelz_1.ChannelzCallTracker(); + this.childrenTracker = new channelz_1.ChannelzChildrenTracker(); + this.streamTracker = new channelz_1.ChannelzCallTracker(); + } + this.channelzRef = (0, channelz_1.registerChannelzSubchannel)(this.subchannelAddressString, () => this.getChannelzInfo(), this.channelzEnabled); + this.channelzTrace.addTrace('CT_INFO', 'Subchannel created'); + this.trace('Subchannel constructed with options ' + + JSON.stringify(options, undefined, 2)); + credentials._ref(); + } + getChannelzInfo() { + return { + state: this.connectivityState, + trace: this.channelzTrace, + callTracker: this.callTracker, + children: this.childrenTracker.getChildLists(), + target: this.subchannelAddressString, + }; + } + trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, '(' + + this.channelzRef.id + + ') ' + + this.subchannelAddressString + + ' ' + + text); + } + refTrace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, 'subchannel_refcount', '(' + + this.channelzRef.id + + ') ' + + this.subchannelAddressString + + ' ' + + text); + } + handleBackoffTimer() { + if (this.continueConnecting) { + this.transitionToState([connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE], connectivity_state_1.ConnectivityState.CONNECTING); + } + else { + this.transitionToState([connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE], connectivity_state_1.ConnectivityState.IDLE); + } + } + /** + * Start a backoff timer with the current nextBackoff timeout + */ + startBackoff() { + this.backoffTimeout.runOnce(); + } + stopBackoff() { + this.backoffTimeout.stop(); + this.backoffTimeout.reset(); + } + startConnectingInternal() { + let options = this.options; + if (options['grpc.keepalive_time_ms']) { + const adjustedKeepaliveTime = Math.min(this.keepaliveTime, KEEPALIVE_MAX_TIME_MS); + options = Object.assign(Object.assign({}, options), { 'grpc.keepalive_time_ms': adjustedKeepaliveTime }); + } + this.connector + .connect(this.subchannelAddress, this.credentials, options) + .then(transport => { + if (this.transitionToState([connectivity_state_1.ConnectivityState.CONNECTING], connectivity_state_1.ConnectivityState.READY)) { + this.transport = transport; + if (this.channelzEnabled) { + this.childrenTracker.refChild(transport.getChannelzRef()); + } + transport.addDisconnectListener(tooManyPings => { + this.transitionToState([connectivity_state_1.ConnectivityState.READY], connectivity_state_1.ConnectivityState.IDLE); + if (tooManyPings && this.keepaliveTime > 0) { + this.keepaliveTime *= 2; + logging.log(constants_1.LogVerbosity.ERROR, `Connection to ${(0, uri_parser_1.uriToString)(this.channelTarget)} at ${this.subchannelAddressString} rejected by server because of excess pings. Increasing ping interval to ${this.keepaliveTime} ms`); + } + }); + } + else { + /* If we can't transition from CONNECTING to READY here, we will + * not be using this transport, so release its resources. */ + transport.shutdown(); + } + }, error => { + this.transitionToState([connectivity_state_1.ConnectivityState.CONNECTING], connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE, `${error}`); + }); + } + /** + * Initiate a state transition from any element of oldStates to the new + * state. If the current connectivityState is not in oldStates, do nothing. + * @param oldStates The set of states to transition from + * @param newState The state to transition to + * @returns True if the state changed, false otherwise + */ + transitionToState(oldStates, newState, errorMessage) { + var _a, _b; + if (oldStates.indexOf(this.connectivityState) === -1) { + return false; + } + if (errorMessage) { + this.trace(connectivity_state_1.ConnectivityState[this.connectivityState] + + ' -> ' + + connectivity_state_1.ConnectivityState[newState] + + ' with error "' + errorMessage + '"'); + } + else { + this.trace(connectivity_state_1.ConnectivityState[this.connectivityState] + + ' -> ' + + connectivity_state_1.ConnectivityState[newState]); + } + if (this.channelzEnabled) { + this.channelzTrace.addTrace('CT_INFO', 'Connectivity state change to ' + connectivity_state_1.ConnectivityState[newState]); + } + const previousState = this.connectivityState; + this.connectivityState = newState; + switch (newState) { + case connectivity_state_1.ConnectivityState.READY: + this.stopBackoff(); + break; + case connectivity_state_1.ConnectivityState.CONNECTING: + this.startBackoff(); + this.startConnectingInternal(); + this.continueConnecting = false; + break; + case connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE: + if (this.channelzEnabled && this.transport) { + this.childrenTracker.unrefChild(this.transport.getChannelzRef()); + } + (_a = this.transport) === null || _a === void 0 ? void 0 : _a.shutdown(); + this.transport = null; + /* If the backoff timer has already ended by the time we get to the + * TRANSIENT_FAILURE state, we want to immediately transition out of + * TRANSIENT_FAILURE as though the backoff timer is ending right now */ + if (!this.backoffTimeout.isRunning()) { + process.nextTick(() => { + this.handleBackoffTimer(); + }); + } + break; + case connectivity_state_1.ConnectivityState.IDLE: + if (this.channelzEnabled && this.transport) { + this.childrenTracker.unrefChild(this.transport.getChannelzRef()); + } + (_b = this.transport) === null || _b === void 0 ? void 0 : _b.shutdown(); + this.transport = null; + break; + default: + throw new Error(`Invalid state: unknown ConnectivityState ${newState}`); + } + for (const listener of this.stateListeners) { + listener(this, previousState, newState, this.keepaliveTime, errorMessage); + } + return true; + } + ref() { + this.refTrace('refcount ' + this.refcount + ' -> ' + (this.refcount + 1)); + this.refcount += 1; + } + unref() { + this.refTrace('refcount ' + this.refcount + ' -> ' + (this.refcount - 1)); + this.refcount -= 1; + if (this.refcount === 0) { + this.channelzTrace.addTrace('CT_INFO', 'Shutting down'); + (0, channelz_1.unregisterChannelzRef)(this.channelzRef); + this.credentials._unref(); + process.nextTick(() => { + this.transitionToState([connectivity_state_1.ConnectivityState.CONNECTING, connectivity_state_1.ConnectivityState.READY], connectivity_state_1.ConnectivityState.IDLE); + }); + } + } + unrefIfOneRef() { + if (this.refcount === 1) { + this.unref(); + return true; + } + return false; + } + createCall(metadata, host, method, listener) { + if (!this.transport) { + throw new Error('Cannot create call, subchannel not READY'); + } + let statsTracker; + if (this.channelzEnabled) { + this.callTracker.addCallStarted(); + this.streamTracker.addCallStarted(); + statsTracker = { + onCallEnd: status => { + if (status.code === constants_1.Status.OK) { + this.callTracker.addCallSucceeded(); + } + else { + this.callTracker.addCallFailed(); + } + }, + }; + } + else { + statsTracker = {}; + } + return this.transport.createCall(metadata, host, method, listener, statsTracker); + } + /** + * If the subchannel is currently IDLE, start connecting and switch to the + * CONNECTING state. If the subchannel is current in TRANSIENT_FAILURE, + * the next time it would transition to IDLE, start connecting again instead. + * Otherwise, do nothing. + */ + startConnecting() { + process.nextTick(() => { + /* First, try to transition from IDLE to connecting. If that doesn't happen + * because the state is not currently IDLE, check if it is + * TRANSIENT_FAILURE, and if so indicate that it should go back to + * connecting after the backoff timer ends. Otherwise do nothing */ + if (!this.transitionToState([connectivity_state_1.ConnectivityState.IDLE], connectivity_state_1.ConnectivityState.CONNECTING)) { + if (this.connectivityState === connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE) { + this.continueConnecting = true; + } + } + }); + } + /** + * Get the subchannel's current connectivity state. + */ + getConnectivityState() { + return this.connectivityState; + } + /** + * Add a listener function to be called whenever the subchannel's + * connectivity state changes. + * @param listener + */ + addConnectivityStateListener(listener) { + this.stateListeners.add(listener); + } + /** + * Remove a listener previously added with `addConnectivityStateListener` + * @param listener A reference to a function previously passed to + * `addConnectivityStateListener` + */ + removeConnectivityStateListener(listener) { + this.stateListeners.delete(listener); + } + /** + * Reset the backoff timeout, and immediately start connecting if in backoff. + */ + resetBackoff() { + process.nextTick(() => { + this.backoffTimeout.reset(); + this.transitionToState([connectivity_state_1.ConnectivityState.TRANSIENT_FAILURE], connectivity_state_1.ConnectivityState.CONNECTING); + }); + } + getAddress() { + return this.subchannelAddressString; + } + getChannelzRef() { + return this.channelzRef; + } + isHealthy() { + return true; + } + addHealthStateWatcher(listener) { + // Do nothing with the listener + } + removeHealthStateWatcher(listener) { + // Do nothing with the listener + } + getRealSubchannel() { + return this; + } + realSubchannelEquals(other) { + return other.getRealSubchannel() === this; + } + throttleKeepalive(newKeepaliveTime) { + if (newKeepaliveTime > this.keepaliveTime) { + this.keepaliveTime = newKeepaliveTime; + } + } +} +exports.Subchannel = Subchannel; +//# sourceMappingURL=subchannel.js.map + +/***/ }), + +/***/ 86581: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDefaultRootsData = exports.CIPHER_SUITES = void 0; +const fs = __nccwpck_require__(57147); +exports.CIPHER_SUITES = process.env.GRPC_SSL_CIPHER_SUITES; +const DEFAULT_ROOTS_FILE_PATH = process.env.GRPC_DEFAULT_SSL_ROOTS_FILE_PATH; +let defaultRootsData = null; +function getDefaultRootsData() { + if (DEFAULT_ROOTS_FILE_PATH) { + if (defaultRootsData === null) { + defaultRootsData = fs.readFileSync(DEFAULT_ROOTS_FILE_PATH); + } + return defaultRootsData; + } + return null; +} +exports.getDefaultRootsData = getDefaultRootsData; +//# sourceMappingURL=tls-helpers.js.map + +/***/ }), + +/***/ 46690: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright 2023 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Http2SubchannelConnector = void 0; +const http2 = __nccwpck_require__(85158); +const tls_1 = __nccwpck_require__(24404); +const channelz_1 = __nccwpck_require__(79975); +const constants_1 = __nccwpck_require__(90634); +const http_proxy_1 = __nccwpck_require__(24000); +const logging = __nccwpck_require__(35993); +const resolver_1 = __nccwpck_require__(31594); +const subchannel_address_1 = __nccwpck_require__(78021); +const uri_parser_1 = __nccwpck_require__(65974); +const net = __nccwpck_require__(41808); +const subchannel_call_1 = __nccwpck_require__(86940); +const call_number_1 = __nccwpck_require__(70380); +const TRACER_NAME = 'transport'; +const FLOW_CONTROL_TRACER_NAME = 'transport_flowctrl'; +const clientVersion = (__nccwpck_require__(56569)/* .version */ .i8); +const { HTTP2_HEADER_AUTHORITY, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_METHOD, HTTP2_HEADER_PATH, HTTP2_HEADER_TE, HTTP2_HEADER_USER_AGENT, } = http2.constants; +const KEEPALIVE_TIMEOUT_MS = 20000; +const tooManyPingsData = Buffer.from('too_many_pings', 'ascii'); +class Http2Transport { + constructor(session, subchannelAddress, options, + /** + * Name of the remote server, if it is not the same as the subchannel + * address, i.e. if connecting through an HTTP CONNECT proxy. + */ + remoteName) { + this.session = session; + this.options = options; + this.remoteName = remoteName; + /** + * Timer reference indicating when to send the next ping or when the most recent ping will be considered lost. + */ + this.keepaliveTimer = null; + /** + * Indicates that the keepalive timer ran out while there were no active + * calls, and a ping should be sent the next time a call starts. + */ + this.pendingSendKeepalivePing = false; + this.activeCalls = new Set(); + this.disconnectListeners = []; + this.disconnectHandled = false; + this.channelzEnabled = true; + this.keepalivesSent = 0; + this.messagesSent = 0; + this.messagesReceived = 0; + this.lastMessageSentTimestamp = null; + this.lastMessageReceivedTimestamp = null; + /* Populate subchannelAddressString and channelzRef before doing anything + * else, because they are used in the trace methods. */ + this.subchannelAddressString = (0, subchannel_address_1.subchannelAddressToString)(subchannelAddress); + if (options['grpc.enable_channelz'] === 0) { + this.channelzEnabled = false; + this.streamTracker = new channelz_1.ChannelzCallTrackerStub(); + } + else { + this.streamTracker = new channelz_1.ChannelzCallTracker(); + } + this.channelzRef = (0, channelz_1.registerChannelzSocket)(this.subchannelAddressString, () => this.getChannelzInfo(), this.channelzEnabled); + // Build user-agent string. + this.userAgent = [ + options['grpc.primary_user_agent'], + `grpc-node-js/${clientVersion}`, + options['grpc.secondary_user_agent'], + ] + .filter(e => e) + .join(' '); // remove falsey values first + if ('grpc.keepalive_time_ms' in options) { + this.keepaliveTimeMs = options['grpc.keepalive_time_ms']; + } + else { + this.keepaliveTimeMs = -1; + } + if ('grpc.keepalive_timeout_ms' in options) { + this.keepaliveTimeoutMs = options['grpc.keepalive_timeout_ms']; + } + else { + this.keepaliveTimeoutMs = KEEPALIVE_TIMEOUT_MS; + } + if ('grpc.keepalive_permit_without_calls' in options) { + this.keepaliveWithoutCalls = + options['grpc.keepalive_permit_without_calls'] === 1; + } + else { + this.keepaliveWithoutCalls = false; + } + session.once('close', () => { + this.trace('session closed'); + this.handleDisconnect(); + }); + session.once('goaway', (errorCode, lastStreamID, opaqueData) => { + let tooManyPings = false; + /* See the last paragraph of + * https://github.com/grpc/proposal/blob/master/A8-client-side-keepalive.md#basic-keepalive */ + if (errorCode === http2.constants.NGHTTP2_ENHANCE_YOUR_CALM && + opaqueData && + opaqueData.equals(tooManyPingsData)) { + tooManyPings = true; + } + this.trace('connection closed by GOAWAY with code ' + + errorCode + + ' and data ' + + (opaqueData === null || opaqueData === void 0 ? void 0 : opaqueData.toString())); + this.reportDisconnectToOwner(tooManyPings); + }); + session.once('error', error => { + this.trace('connection closed with error ' + error.message); + this.handleDisconnect(); + }); + if (logging.isTracerEnabled(TRACER_NAME)) { + session.on('remoteSettings', (settings) => { + this.trace('new settings received' + + (this.session !== session ? ' on the old connection' : '') + + ': ' + + JSON.stringify(settings)); + }); + session.on('localSettings', (settings) => { + this.trace('local settings acknowledged by remote' + + (this.session !== session ? ' on the old connection' : '') + + ': ' + + JSON.stringify(settings)); + }); + } + /* Start the keepalive timer last, because this can trigger trace logs, + * which should only happen after everything else is set up. */ + if (this.keepaliveWithoutCalls) { + this.maybeStartKeepalivePingTimer(); + } + } + getChannelzInfo() { + var _a, _b, _c; + const sessionSocket = this.session.socket; + const remoteAddress = sessionSocket.remoteAddress + ? (0, subchannel_address_1.stringToSubchannelAddress)(sessionSocket.remoteAddress, sessionSocket.remotePort) + : null; + const localAddress = sessionSocket.localAddress + ? (0, subchannel_address_1.stringToSubchannelAddress)(sessionSocket.localAddress, sessionSocket.localPort) + : null; + let tlsInfo; + if (this.session.encrypted) { + const tlsSocket = sessionSocket; + const cipherInfo = tlsSocket.getCipher(); + const certificate = tlsSocket.getCertificate(); + const peerCertificate = tlsSocket.getPeerCertificate(); + tlsInfo = { + cipherSuiteStandardName: (_a = cipherInfo.standardName) !== null && _a !== void 0 ? _a : null, + cipherSuiteOtherName: cipherInfo.standardName ? null : cipherInfo.name, + localCertificate: certificate && 'raw' in certificate ? certificate.raw : null, + remoteCertificate: peerCertificate && 'raw' in peerCertificate + ? peerCertificate.raw + : null, + }; + } + else { + tlsInfo = null; + } + const socketInfo = { + remoteAddress: remoteAddress, + localAddress: localAddress, + security: tlsInfo, + remoteName: this.remoteName, + streamsStarted: this.streamTracker.callsStarted, + streamsSucceeded: this.streamTracker.callsSucceeded, + streamsFailed: this.streamTracker.callsFailed, + messagesSent: this.messagesSent, + messagesReceived: this.messagesReceived, + keepAlivesSent: this.keepalivesSent, + lastLocalStreamCreatedTimestamp: this.streamTracker.lastCallStartedTimestamp, + lastRemoteStreamCreatedTimestamp: null, + lastMessageSentTimestamp: this.lastMessageSentTimestamp, + lastMessageReceivedTimestamp: this.lastMessageReceivedTimestamp, + localFlowControlWindow: (_b = this.session.state.localWindowSize) !== null && _b !== void 0 ? _b : null, + remoteFlowControlWindow: (_c = this.session.state.remoteWindowSize) !== null && _c !== void 0 ? _c : null, + }; + return socketInfo; + } + trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, '(' + + this.channelzRef.id + + ') ' + + this.subchannelAddressString + + ' ' + + text); + } + keepaliveTrace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, 'keepalive', '(' + + this.channelzRef.id + + ') ' + + this.subchannelAddressString + + ' ' + + text); + } + flowControlTrace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, FLOW_CONTROL_TRACER_NAME, '(' + + this.channelzRef.id + + ') ' + + this.subchannelAddressString + + ' ' + + text); + } + internalsTrace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, 'transport_internals', '(' + + this.channelzRef.id + + ') ' + + this.subchannelAddressString + + ' ' + + text); + } + /** + * Indicate to the owner of this object that this transport should no longer + * be used. That happens if the connection drops, or if the server sends a + * GOAWAY. + * @param tooManyPings If true, this was triggered by a GOAWAY with data + * indicating that the session was closed becaues the client sent too many + * pings. + * @returns + */ + reportDisconnectToOwner(tooManyPings) { + if (this.disconnectHandled) { + return; + } + this.disconnectHandled = true; + this.disconnectListeners.forEach(listener => listener(tooManyPings)); + } + /** + * Handle connection drops, but not GOAWAYs. + */ + handleDisconnect() { + if (this.disconnectHandled) { + return; + } + this.clearKeepaliveTimeout(); + this.reportDisconnectToOwner(false); + /* Give calls an event loop cycle to finish naturally before reporting the + * disconnnection to them. */ + setImmediate(() => { + for (const call of this.activeCalls) { + call.onDisconnect(); + } + this.session.destroy(); + }); + } + addDisconnectListener(listener) { + this.disconnectListeners.push(listener); + } + canSendPing() { + return (!this.session.destroyed && + this.keepaliveTimeMs > 0 && + (this.keepaliveWithoutCalls || this.activeCalls.size > 0)); + } + maybeSendPing() { + var _a, _b; + if (!this.canSendPing()) { + this.pendingSendKeepalivePing = true; + return; + } + if (this.keepaliveTimer) { + console.error('keepaliveTimeout is not null'); + return; + } + if (this.channelzEnabled) { + this.keepalivesSent += 1; + } + this.keepaliveTrace('Sending ping with timeout ' + this.keepaliveTimeoutMs + 'ms'); + this.keepaliveTimer = setTimeout(() => { + this.keepaliveTimer = null; + this.keepaliveTrace('Ping timeout passed without response'); + this.handleDisconnect(); + }, this.keepaliveTimeoutMs); + (_b = (_a = this.keepaliveTimer).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + let pingSendError = ''; + try { + const pingSentSuccessfully = this.session.ping((err, duration, payload) => { + this.clearKeepaliveTimeout(); + if (err) { + this.keepaliveTrace('Ping failed with error ' + err.message); + this.handleDisconnect(); + } + else { + this.keepaliveTrace('Received ping response'); + this.maybeStartKeepalivePingTimer(); + } + }); + if (!pingSentSuccessfully) { + pingSendError = 'Ping returned false'; + } + } + catch (e) { + // grpc/grpc-node#2139 + pingSendError = (e instanceof Error ? e.message : '') || 'Unknown error'; + } + if (pingSendError) { + this.keepaliveTrace('Ping send failed: ' + pingSendError); + this.handleDisconnect(); + } + } + /** + * Starts the keepalive ping timer if appropriate. If the timer already ran + * out while there were no active requests, instead send a ping immediately. + * If the ping timer is already running or a ping is currently in flight, + * instead do nothing and wait for them to resolve. + */ + maybeStartKeepalivePingTimer() { + var _a, _b; + if (!this.canSendPing()) { + return; + } + if (this.pendingSendKeepalivePing) { + this.pendingSendKeepalivePing = false; + this.maybeSendPing(); + } + else if (!this.keepaliveTimer) { + this.keepaliveTrace('Starting keepalive timer for ' + this.keepaliveTimeMs + 'ms'); + this.keepaliveTimer = setTimeout(() => { + this.keepaliveTimer = null; + this.maybeSendPing(); + }, this.keepaliveTimeMs); + (_b = (_a = this.keepaliveTimer).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + /* Otherwise, there is already either a keepalive timer or a ping pending, + * wait for those to resolve. */ + } + /** + * Clears whichever keepalive timeout is currently active, if any. + */ + clearKeepaliveTimeout() { + if (this.keepaliveTimer) { + clearTimeout(this.keepaliveTimer); + this.keepaliveTimer = null; + } + } + removeActiveCall(call) { + this.activeCalls.delete(call); + if (this.activeCalls.size === 0) { + this.session.unref(); + } + } + addActiveCall(call) { + this.activeCalls.add(call); + if (this.activeCalls.size === 1) { + this.session.ref(); + if (!this.keepaliveWithoutCalls) { + this.maybeStartKeepalivePingTimer(); + } + } + } + createCall(metadata, host, method, listener, subchannelCallStatsTracker) { + const headers = metadata.toHttp2Headers(); + headers[HTTP2_HEADER_AUTHORITY] = host; + headers[HTTP2_HEADER_USER_AGENT] = this.userAgent; + headers[HTTP2_HEADER_CONTENT_TYPE] = 'application/grpc'; + headers[HTTP2_HEADER_METHOD] = 'POST'; + headers[HTTP2_HEADER_PATH] = method; + headers[HTTP2_HEADER_TE] = 'trailers'; + let http2Stream; + /* In theory, if an error is thrown by session.request because session has + * become unusable (e.g. because it has received a goaway), this subchannel + * should soon see the corresponding close or goaway event anyway and leave + * READY. But we have seen reports that this does not happen + * (https://github.com/googleapis/nodejs-firestore/issues/1023#issuecomment-653204096) + * so for defense in depth, we just discard the session when we see an + * error here. + */ + try { + http2Stream = this.session.request(headers); + } + catch (e) { + this.handleDisconnect(); + throw e; + } + this.flowControlTrace('local window size: ' + + this.session.state.localWindowSize + + ' remote window size: ' + + this.session.state.remoteWindowSize); + this.internalsTrace('session.closed=' + + this.session.closed + + ' session.destroyed=' + + this.session.destroyed + + ' session.socket.destroyed=' + + this.session.socket.destroyed); + let eventTracker; + // eslint-disable-next-line prefer-const + let call; + if (this.channelzEnabled) { + this.streamTracker.addCallStarted(); + eventTracker = { + addMessageSent: () => { + var _a; + this.messagesSent += 1; + this.lastMessageSentTimestamp = new Date(); + (_a = subchannelCallStatsTracker.addMessageSent) === null || _a === void 0 ? void 0 : _a.call(subchannelCallStatsTracker); + }, + addMessageReceived: () => { + var _a; + this.messagesReceived += 1; + this.lastMessageReceivedTimestamp = new Date(); + (_a = subchannelCallStatsTracker.addMessageReceived) === null || _a === void 0 ? void 0 : _a.call(subchannelCallStatsTracker); + }, + onCallEnd: status => { + var _a; + (_a = subchannelCallStatsTracker.onCallEnd) === null || _a === void 0 ? void 0 : _a.call(subchannelCallStatsTracker, status); + this.removeActiveCall(call); + }, + onStreamEnd: success => { + var _a; + if (success) { + this.streamTracker.addCallSucceeded(); + } + else { + this.streamTracker.addCallFailed(); + } + (_a = subchannelCallStatsTracker.onStreamEnd) === null || _a === void 0 ? void 0 : _a.call(subchannelCallStatsTracker, success); + }, + }; + } + else { + eventTracker = { + addMessageSent: () => { + var _a; + (_a = subchannelCallStatsTracker.addMessageSent) === null || _a === void 0 ? void 0 : _a.call(subchannelCallStatsTracker); + }, + addMessageReceived: () => { + var _a; + (_a = subchannelCallStatsTracker.addMessageReceived) === null || _a === void 0 ? void 0 : _a.call(subchannelCallStatsTracker); + }, + onCallEnd: status => { + var _a; + (_a = subchannelCallStatsTracker.onCallEnd) === null || _a === void 0 ? void 0 : _a.call(subchannelCallStatsTracker, status); + this.removeActiveCall(call); + }, + onStreamEnd: success => { + var _a; + (_a = subchannelCallStatsTracker.onStreamEnd) === null || _a === void 0 ? void 0 : _a.call(subchannelCallStatsTracker, success); + }, + }; + } + call = new subchannel_call_1.Http2SubchannelCall(http2Stream, eventTracker, listener, this, (0, call_number_1.getNextCallNumber)()); + this.addActiveCall(call); + return call; + } + getChannelzRef() { + return this.channelzRef; + } + getPeerName() { + return this.subchannelAddressString; + } + getOptions() { + return this.options; + } + shutdown() { + this.session.close(); + (0, channelz_1.unregisterChannelzRef)(this.channelzRef); + } +} +class Http2SubchannelConnector { + constructor(channelTarget) { + this.channelTarget = channelTarget; + this.session = null; + this.isShutdown = false; + } + trace(text) { + logging.trace(constants_1.LogVerbosity.DEBUG, TRACER_NAME, (0, uri_parser_1.uriToString)(this.channelTarget) + ' ' + text); + } + createSession(address, credentials, options, proxyConnectionResult) { + if (this.isShutdown) { + return Promise.reject(); + } + return new Promise((resolve, reject) => { + var _a, _b, _c, _d; + let remoteName; + if (proxyConnectionResult.realTarget) { + remoteName = (0, uri_parser_1.uriToString)(proxyConnectionResult.realTarget); + this.trace('creating HTTP/2 session through proxy to ' + + (0, uri_parser_1.uriToString)(proxyConnectionResult.realTarget)); + } + else { + remoteName = null; + this.trace('creating HTTP/2 session to ' + (0, subchannel_address_1.subchannelAddressToString)(address)); + } + const targetAuthority = (0, resolver_1.getDefaultAuthority)((_a = proxyConnectionResult.realTarget) !== null && _a !== void 0 ? _a : this.channelTarget); + let connectionOptions = credentials._getConnectionOptions(); + if (!connectionOptions) { + reject('Credentials not loaded'); + return; + } + connectionOptions.maxSendHeaderBlockLength = Number.MAX_SAFE_INTEGER; + if ('grpc-node.max_session_memory' in options) { + connectionOptions.maxSessionMemory = + options['grpc-node.max_session_memory']; + } + else { + /* By default, set a very large max session memory limit, to effectively + * disable enforcement of the limit. Some testing indicates that Node's + * behavior degrades badly when this limit is reached, so we solve that + * by disabling the check entirely. */ + connectionOptions.maxSessionMemory = Number.MAX_SAFE_INTEGER; + } + let addressScheme = 'http://'; + if ('secureContext' in connectionOptions) { + addressScheme = 'https://'; + // If provided, the value of grpc.ssl_target_name_override should be used + // to override the target hostname when checking server identity. + // This option is used for testing only. + if (options['grpc.ssl_target_name_override']) { + const sslTargetNameOverride = options['grpc.ssl_target_name_override']; + const originalCheckServerIdentity = (_b = connectionOptions.checkServerIdentity) !== null && _b !== void 0 ? _b : tls_1.checkServerIdentity; + connectionOptions.checkServerIdentity = (host, cert) => { + return originalCheckServerIdentity(sslTargetNameOverride, cert); + }; + connectionOptions.servername = sslTargetNameOverride; + } + else { + const authorityHostname = (_d = (_c = (0, uri_parser_1.splitHostPort)(targetAuthority)) === null || _c === void 0 ? void 0 : _c.host) !== null && _d !== void 0 ? _d : 'localhost'; + // We want to always set servername to support SNI + connectionOptions.servername = authorityHostname; + } + if (proxyConnectionResult.socket) { + /* This is part of the workaround for + * https://github.com/nodejs/node/issues/32922. Without that bug, + * proxyConnectionResult.socket would always be a plaintext socket and + * this would say + * connectionOptions.socket = proxyConnectionResult.socket; */ + connectionOptions.createConnection = (authority, option) => { + return proxyConnectionResult.socket; + }; + } + } + else { + /* In all but the most recent versions of Node, http2.connect does not use + * the options when establishing plaintext connections, so we need to + * establish that connection explicitly. */ + connectionOptions.createConnection = (authority, option) => { + if (proxyConnectionResult.socket) { + return proxyConnectionResult.socket; + } + else { + /* net.NetConnectOpts is declared in a way that is more restrictive + * than what net.connect will actually accept, so we use the type + * assertion to work around that. */ + return net.connect(address); + } + }; + } + connectionOptions = Object.assign(Object.assign(Object.assign({}, connectionOptions), address), { enableTrace: options['grpc-node.tls_enable_trace'] === 1 }); + /* http2.connect uses the options here: + * https://github.com/nodejs/node/blob/70c32a6d190e2b5d7b9ff9d5b6a459d14e8b7d59/lib/internal/http2/core.js#L3028-L3036 + * The spread operator overides earlier values with later ones, so any port + * or host values in the options will be used rather than any values extracted + * from the first argument. In addition, the path overrides the host and port, + * as documented for plaintext connections here: + * https://nodejs.org/api/net.html#net_socket_connect_options_connectlistener + * and for TLS connections here: + * https://nodejs.org/api/tls.html#tls_tls_connect_options_callback. In + * earlier versions of Node, http2.connect passes these options to + * tls.connect but not net.connect, so in the insecure case we still need + * to set the createConnection option above to create the connection + * explicitly. We cannot do that in the TLS case because http2.connect + * passes necessary additional options to tls.connect. + * The first argument just needs to be parseable as a URL and the scheme + * determines whether the connection will be established over TLS or not. + */ + const session = http2.connect(addressScheme + targetAuthority, connectionOptions); + this.session = session; + let errorMessage = 'Failed to connect'; + let reportedError = false; + session.unref(); + session.once('connect', () => { + session.removeAllListeners(); + resolve(new Http2Transport(session, address, options, remoteName)); + this.session = null; + }); + session.once('close', () => { + this.session = null; + // Leave time for error event to happen before rejecting + setImmediate(() => { + if (!reportedError) { + reportedError = true; + reject(`${errorMessage} (${new Date().toISOString()})`); + } + }); + }); + session.once('error', error => { + errorMessage = error.message; + this.trace('connection failed with error ' + errorMessage); + if (!reportedError) { + reportedError = true; + reject(`${errorMessage} (${new Date().toISOString()})`); + } + }); + }); + } + connect(address, credentials, options) { + var _a, _b, _c; + if (this.isShutdown) { + return Promise.reject(); + } + /* Pass connection options through to the proxy so that it's able to + * upgrade it's connection to support tls if needed. + * This is a workaround for https://github.com/nodejs/node/issues/32922 + * See https://github.com/grpc/grpc-node/pull/1369 for more info. */ + const connectionOptions = credentials._getConnectionOptions(); + if (!connectionOptions) { + return Promise.reject('Credentials not loaded'); + } + if ('secureContext' in connectionOptions) { + connectionOptions.ALPNProtocols = ['h2']; + // If provided, the value of grpc.ssl_target_name_override should be used + // to override the target hostname when checking server identity. + // This option is used for testing only. + if (options['grpc.ssl_target_name_override']) { + const sslTargetNameOverride = options['grpc.ssl_target_name_override']; + const originalCheckServerIdentity = (_a = connectionOptions.checkServerIdentity) !== null && _a !== void 0 ? _a : tls_1.checkServerIdentity; + connectionOptions.checkServerIdentity = (host, cert) => { + return originalCheckServerIdentity(sslTargetNameOverride, cert); + }; + connectionOptions.servername = sslTargetNameOverride; + } + else { + if ('grpc.http_connect_target' in options) { + /* This is more or less how servername will be set in createSession + * if a connection is successfully established through the proxy. + * If the proxy is not used, these connectionOptions are discarded + * anyway */ + const targetPath = (0, resolver_1.getDefaultAuthority)((_b = (0, uri_parser_1.parseUri)(options['grpc.http_connect_target'])) !== null && _b !== void 0 ? _b : { + path: 'localhost', + }); + const hostPort = (0, uri_parser_1.splitHostPort)(targetPath); + connectionOptions.servername = (_c = hostPort === null || hostPort === void 0 ? void 0 : hostPort.host) !== null && _c !== void 0 ? _c : targetPath; + } + } + if (options['grpc-node.tls_enable_trace']) { + connectionOptions.enableTrace = true; + } + } + return (0, http_proxy_1.getProxiedConnection)(address, options, connectionOptions).then(result => this.createSession(address, credentials, options, result)); + } + shutdown() { + var _a; + this.isShutdown = true; + (_a = this.session) === null || _a === void 0 ? void 0 : _a.close(); + this.session = null; + } +} +exports.Http2SubchannelConnector = Http2SubchannelConnector; +//# sourceMappingURL=transport.js.map + +/***/ }), + +/***/ 65974: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright 2020 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.uriToString = exports.combineHostPort = exports.splitHostPort = exports.parseUri = void 0; +/* + * The groups correspond to URI parts as follows: + * 1. scheme + * 2. authority + * 3. path + */ +const URI_REGEX = /^(?:([A-Za-z0-9+.-]+):)?(?:\/\/([^/]*)\/)?(.+)$/; +function parseUri(uriString) { + const parsedUri = URI_REGEX.exec(uriString); + if (parsedUri === null) { + return null; + } + return { + scheme: parsedUri[1], + authority: parsedUri[2], + path: parsedUri[3], + }; +} +exports.parseUri = parseUri; +const NUMBER_REGEX = /^\d+$/; +function splitHostPort(path) { + if (path.startsWith('[')) { + const hostEnd = path.indexOf(']'); + if (hostEnd === -1) { + return null; + } + const host = path.substring(1, hostEnd); + /* Only an IPv6 address should be in bracketed notation, and an IPv6 + * address should have at least one colon */ + if (host.indexOf(':') === -1) { + return null; + } + if (path.length > hostEnd + 1) { + if (path[hostEnd + 1] === ':') { + const portString = path.substring(hostEnd + 2); + if (NUMBER_REGEX.test(portString)) { + return { + host: host, + port: +portString, + }; + } + else { + return null; + } + } + else { + return null; + } + } + else { + return { + host, + }; + } + } + else { + const splitPath = path.split(':'); + /* Exactly one colon means that this is host:port. Zero colons means that + * there is no port. And multiple colons means that this is a bare IPv6 + * address with no port */ + if (splitPath.length === 2) { + if (NUMBER_REGEX.test(splitPath[1])) { + return { + host: splitPath[0], + port: +splitPath[1], + }; + } + else { + return null; + } + } + else { + return { + host: path, + }; + } + } +} +exports.splitHostPort = splitHostPort; +function combineHostPort(hostPort) { + if (hostPort.port === undefined) { + return hostPort.host; + } + else { + // Only an IPv6 host should include a colon + if (hostPort.host.includes(':')) { + return `[${hostPort.host}]:${hostPort.port}`; + } + else { + return `${hostPort.host}:${hostPort.port}`; + } + } +} +exports.combineHostPort = combineHostPort; +function uriToString(uri) { + let result = ''; + if (uri.scheme !== undefined) { + result += uri.scheme + ':'; + } + if (uri.authority !== undefined) { + result += '//' + uri.authority + '/'; + } + result += uri.path; + return result; +} +exports.uriToString = uriToString; +//# sourceMappingURL=uri-parser.js.map + +/***/ }), + +/***/ 98171: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * @license + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadFileDescriptorSetFromObject = exports.loadFileDescriptorSetFromBuffer = exports.fromJSON = exports.loadSync = exports.load = exports.IdempotencyLevel = exports.isAnyExtension = exports.Long = void 0; +const camelCase = __nccwpck_require__(7994); +const Protobuf = __nccwpck_require__(85881); +const descriptor = __nccwpck_require__(21629); +const util_1 = __nccwpck_require__(13245); +const Long = __nccwpck_require__(52694); +exports.Long = Long; +function isAnyExtension(obj) { + return ('@type' in obj) && (typeof obj['@type'] === 'string'); +} +exports.isAnyExtension = isAnyExtension; +var IdempotencyLevel; +(function (IdempotencyLevel) { + IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = "IDEMPOTENCY_UNKNOWN"; + IdempotencyLevel["NO_SIDE_EFFECTS"] = "NO_SIDE_EFFECTS"; + IdempotencyLevel["IDEMPOTENT"] = "IDEMPOTENT"; +})(IdempotencyLevel = exports.IdempotencyLevel || (exports.IdempotencyLevel = {})); +const descriptorOptions = { + longs: String, + enums: String, + bytes: String, + defaults: true, + oneofs: true, + json: true, +}; +function joinName(baseName, name) { + if (baseName === '') { + return name; + } + else { + return baseName + '.' + name; + } +} +function isHandledReflectionObject(obj) { + return (obj instanceof Protobuf.Service || + obj instanceof Protobuf.Type || + obj instanceof Protobuf.Enum); +} +function isNamespaceBase(obj) { + return obj instanceof Protobuf.Namespace || obj instanceof Protobuf.Root; +} +function getAllHandledReflectionObjects(obj, parentName) { + const objName = joinName(parentName, obj.name); + if (isHandledReflectionObject(obj)) { + return [[objName, obj]]; + } + else { + if (isNamespaceBase(obj) && typeof obj.nested !== 'undefined') { + return Object.keys(obj.nested) + .map(name => { + return getAllHandledReflectionObjects(obj.nested[name], objName); + }) + .reduce((accumulator, currentValue) => accumulator.concat(currentValue), []); + } + } + return []; +} +function createDeserializer(cls, options) { + return function deserialize(argBuf) { + return cls.toObject(cls.decode(argBuf), options); + }; +} +function createSerializer(cls) { + return function serialize(arg) { + if (Array.isArray(arg)) { + throw new Error(`Failed to serialize message: expected object with ${cls.name} structure, got array instead`); + } + const message = cls.fromObject(arg); + return cls.encode(message).finish(); + }; +} +function mapMethodOptions(options) { + return (options || []).reduce((obj, item) => { + for (const [key, value] of Object.entries(item)) { + switch (key) { + case 'uninterpreted_option': + obj.uninterpreted_option.push(item.uninterpreted_option); + break; + default: + obj[key] = value; + } + } + return obj; + }, { + deprecated: false, + idempotency_level: IdempotencyLevel.IDEMPOTENCY_UNKNOWN, + uninterpreted_option: [], + }); +} +function createMethodDefinition(method, serviceName, options, fileDescriptors) { + /* This is only ever called after the corresponding root.resolveAll(), so we + * can assume that the resolved request and response types are non-null */ + const requestType = method.resolvedRequestType; + const responseType = method.resolvedResponseType; + return { + path: '/' + serviceName + '/' + method.name, + requestStream: !!method.requestStream, + responseStream: !!method.responseStream, + requestSerialize: createSerializer(requestType), + requestDeserialize: createDeserializer(requestType, options), + responseSerialize: createSerializer(responseType), + responseDeserialize: createDeserializer(responseType, options), + // TODO(murgatroid99): Find a better way to handle this + originalName: camelCase(method.name), + requestType: createMessageDefinition(requestType, fileDescriptors), + responseType: createMessageDefinition(responseType, fileDescriptors), + options: mapMethodOptions(method.parsedOptions), + }; +} +function createServiceDefinition(service, name, options, fileDescriptors) { + const def = {}; + for (const method of service.methodsArray) { + def[method.name] = createMethodDefinition(method, name, options, fileDescriptors); + } + return def; +} +function createMessageDefinition(message, fileDescriptors) { + const messageDescriptor = message.toDescriptor('proto3'); + return { + format: 'Protocol Buffer 3 DescriptorProto', + type: messageDescriptor.$type.toObject(messageDescriptor, descriptorOptions), + fileDescriptorProtos: fileDescriptors, + }; +} +function createEnumDefinition(enumType, fileDescriptors) { + const enumDescriptor = enumType.toDescriptor('proto3'); + return { + format: 'Protocol Buffer 3 EnumDescriptorProto', + type: enumDescriptor.$type.toObject(enumDescriptor, descriptorOptions), + fileDescriptorProtos: fileDescriptors, + }; +} +/** + * function createDefinition(obj: Protobuf.Service, name: string, options: + * Options): ServiceDefinition; function createDefinition(obj: Protobuf.Type, + * name: string, options: Options): MessageTypeDefinition; function + * createDefinition(obj: Protobuf.Enum, name: string, options: Options): + * EnumTypeDefinition; + */ +function createDefinition(obj, name, options, fileDescriptors) { + if (obj instanceof Protobuf.Service) { + return createServiceDefinition(obj, name, options, fileDescriptors); + } + else if (obj instanceof Protobuf.Type) { + return createMessageDefinition(obj, fileDescriptors); + } + else if (obj instanceof Protobuf.Enum) { + return createEnumDefinition(obj, fileDescriptors); + } + else { + throw new Error('Type mismatch in reflection object handling'); + } +} +function createPackageDefinition(root, options) { + const def = {}; + root.resolveAll(); + const descriptorList = root.toDescriptor('proto3').file; + const bufferList = descriptorList.map(value => Buffer.from(descriptor.FileDescriptorProto.encode(value).finish())); + for (const [name, obj] of getAllHandledReflectionObjects(root, '')) { + def[name] = createDefinition(obj, name, options, bufferList); + } + return def; +} +function createPackageDefinitionFromDescriptorSet(decodedDescriptorSet, options) { + options = options || {}; + const root = Protobuf.Root.fromDescriptor(decodedDescriptorSet); + root.resolveAll(); + return createPackageDefinition(root, options); +} +/** + * Load a .proto file with the specified options. + * @param filename One or multiple file paths to load. Can be an absolute path + * or relative to an include path. + * @param options.keepCase Preserve field names. The default is to change them + * to camel case. + * @param options.longs The type that should be used to represent `long` values. + * Valid options are `Number` and `String`. Defaults to a `Long` object type + * from a library. + * @param options.enums The type that should be used to represent `enum` values. + * The only valid option is `String`. Defaults to the numeric value. + * @param options.bytes The type that should be used to represent `bytes` + * values. Valid options are `Array` and `String`. The default is to use + * `Buffer`. + * @param options.defaults Set default values on output objects. Defaults to + * `false`. + * @param options.arrays Set empty arrays for missing array values even if + * `defaults` is `false`. Defaults to `false`. + * @param options.objects Set empty objects for missing object values even if + * `defaults` is `false`. Defaults to `false`. + * @param options.oneofs Set virtual oneof properties to the present field's + * name + * @param options.json Represent Infinity and NaN as strings in float fields, + * and automatically decode google.protobuf.Any values. + * @param options.includeDirs Paths to search for imported `.proto` files. + */ +function load(filename, options) { + return (0, util_1.loadProtosWithOptions)(filename, options).then(loadedRoot => { + return createPackageDefinition(loadedRoot, options); + }); +} +exports.load = load; +function loadSync(filename, options) { + const loadedRoot = (0, util_1.loadProtosWithOptionsSync)(filename, options); + return createPackageDefinition(loadedRoot, options); +} +exports.loadSync = loadSync; +function fromJSON(json, options) { + options = options || {}; + const loadedRoot = Protobuf.Root.fromJSON(json); + loadedRoot.resolveAll(); + return createPackageDefinition(loadedRoot, options); +} +exports.fromJSON = fromJSON; +function loadFileDescriptorSetFromBuffer(descriptorSet, options) { + const decodedDescriptorSet = descriptor.FileDescriptorSet.decode(descriptorSet); + return createPackageDefinitionFromDescriptorSet(decodedDescriptorSet, options); +} +exports.loadFileDescriptorSetFromBuffer = loadFileDescriptorSetFromBuffer; +function loadFileDescriptorSetFromObject(descriptorSet, options) { + const decodedDescriptorSet = descriptor.FileDescriptorSet.fromObject(descriptorSet); + return createPackageDefinitionFromDescriptorSet(decodedDescriptorSet, options); +} +exports.loadFileDescriptorSetFromObject = loadFileDescriptorSetFromObject; +(0, util_1.addCommonProtos)(); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 13245: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * @license + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.addCommonProtos = exports.loadProtosWithOptionsSync = exports.loadProtosWithOptions = void 0; +const fs = __nccwpck_require__(57147); +const path = __nccwpck_require__(71017); +const Protobuf = __nccwpck_require__(85881); +function addIncludePathResolver(root, includePaths) { + const originalResolvePath = root.resolvePath; + root.resolvePath = (origin, target) => { + if (path.isAbsolute(target)) { + return target; + } + for (const directory of includePaths) { + const fullPath = path.join(directory, target); + try { + fs.accessSync(fullPath, fs.constants.R_OK); + return fullPath; + } + catch (err) { + continue; + } + } + process.emitWarning(`${target} not found in any of the include paths ${includePaths}`); + return originalResolvePath(origin, target); + }; +} +async function loadProtosWithOptions(filename, options) { + const root = new Protobuf.Root(); + options = options || {}; + if (!!options.includeDirs) { + if (!Array.isArray(options.includeDirs)) { + return Promise.reject(new Error('The includeDirs option must be an array')); + } + addIncludePathResolver(root, options.includeDirs); + } + const loadedRoot = await root.load(filename, options); + loadedRoot.resolveAll(); + return loadedRoot; +} +exports.loadProtosWithOptions = loadProtosWithOptions; +function loadProtosWithOptionsSync(filename, options) { + const root = new Protobuf.Root(); + options = options || {}; + if (!!options.includeDirs) { + if (!Array.isArray(options.includeDirs)) { + throw new Error('The includeDirs option must be an array'); + } + addIncludePathResolver(root, options.includeDirs); + } + const loadedRoot = root.loadSync(filename, options); + loadedRoot.resolveAll(); + return loadedRoot; +} +exports.loadProtosWithOptionsSync = loadProtosWithOptionsSync; +/** + * Load Google's well-known proto files that aren't exposed by Protobuf.js. + */ +function addCommonProtos() { + // Protobuf.js exposes: any, duration, empty, field_mask, struct, timestamp, + // and wrappers. compiler/plugin is excluded in Protobuf.js and here. + // Using constant strings for compatibility with tools like Webpack + const apiDescriptor = __nccwpck_require__(44784); + const descriptorDescriptor = __nccwpck_require__(43571); + const sourceContextDescriptor = __nccwpck_require__(73342); + const typeDescriptor = __nccwpck_require__(58783); + Protobuf.common('api', apiDescriptor.nested.google.nested.protobuf.nested); + Protobuf.common('descriptor', descriptorDescriptor.nested.google.nested.protobuf.nested); + Protobuf.common('source_context', sourceContextDescriptor.nested.google.nested.protobuf.nested); + Protobuf.common('type', typeDescriptor.nested.google.nested.protobuf.nested); +} +exports.addCommonProtos = addCommonProtos; +//# sourceMappingURL=util.js.map + +/***/ }), + +/***/ 12592: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "t", ({ + value: true +})); + +class TreeNode { + constructor(t, e, s = 1) { + this.i = undefined; + this.h = undefined; + this.o = undefined; + this.u = t; + this.l = e; + this.p = s; + } + I() { + let t = this; + const e = t.o.o === t; + if (e && t.p === 1) { + t = t.h; + } else if (t.i) { + t = t.i; + while (t.h) { + t = t.h; + } + } else { + if (e) { + return t.o; + } + let s = t.o; + while (s.i === t) { + t = s; + s = t.o; + } + t = s; + } + return t; + } + B() { + let t = this; + if (t.h) { + t = t.h; + while (t.i) { + t = t.i; + } + return t; + } else { + let e = t.o; + while (e.h === t) { + t = e; + e = t.o; + } + if (t.h !== e) { + return e; + } else return t; + } + } + _() { + const t = this.o; + const e = this.h; + const s = e.i; + if (t.o === this) t.o = e; else if (t.i === this) t.i = e; else t.h = e; + e.o = t; + e.i = this; + this.o = e; + this.h = s; + if (s) s.o = this; + return e; + } + g() { + const t = this.o; + const e = this.i; + const s = e.h; + if (t.o === this) t.o = e; else if (t.i === this) t.i = e; else t.h = e; + e.o = t; + e.h = this; + this.o = e; + this.i = s; + if (s) s.o = this; + return e; + } +} + +class TreeNodeEnableIndex extends TreeNode { + constructor() { + super(...arguments); + this.M = 1; + } + _() { + const t = super._(); + this.O(); + t.O(); + return t; + } + g() { + const t = super.g(); + this.O(); + t.O(); + return t; + } + O() { + this.M = 1; + if (this.i) { + this.M += this.i.M; + } + if (this.h) { + this.M += this.h.M; + } + } +} + +class ContainerIterator { + constructor(t = 0) { + this.iteratorType = t; + } + equals(t) { + return this.T === t.T; + } +} + +class Base { + constructor() { + this.m = 0; + } + get length() { + return this.m; + } + size() { + return this.m; + } + empty() { + return this.m === 0; + } +} + +class Container extends Base {} + +function throwIteratorAccessError() { + throw new RangeError("Iterator access denied!"); +} + +class TreeContainer extends Container { + constructor(t = function(t, e) { + if (t < e) return -1; + if (t > e) return 1; + return 0; + }, e = false) { + super(); + this.v = undefined; + this.A = t; + this.enableIndex = e; + this.N = e ? TreeNodeEnableIndex : TreeNode; + this.C = new this.N; + } + R(t, e) { + let s = this.C; + while (t) { + const i = this.A(t.u, e); + if (i < 0) { + t = t.h; + } else if (i > 0) { + s = t; + t = t.i; + } else return t; + } + return s; + } + K(t, e) { + let s = this.C; + while (t) { + const i = this.A(t.u, e); + if (i <= 0) { + t = t.h; + } else { + s = t; + t = t.i; + } + } + return s; + } + L(t, e) { + let s = this.C; + while (t) { + const i = this.A(t.u, e); + if (i < 0) { + s = t; + t = t.h; + } else if (i > 0) { + t = t.i; + } else return t; + } + return s; + } + k(t, e) { + let s = this.C; + while (t) { + const i = this.A(t.u, e); + if (i < 0) { + s = t; + t = t.h; + } else { + t = t.i; + } + } + return s; + } + P(t) { + while (true) { + const e = t.o; + if (e === this.C) return; + if (t.p === 1) { + t.p = 0; + return; + } + if (t === e.i) { + const s = e.h; + if (s.p === 1) { + s.p = 0; + e.p = 1; + if (e === this.v) { + this.v = e._(); + } else e._(); + } else { + if (s.h && s.h.p === 1) { + s.p = e.p; + e.p = 0; + s.h.p = 0; + if (e === this.v) { + this.v = e._(); + } else e._(); + return; + } else if (s.i && s.i.p === 1) { + s.p = 1; + s.i.p = 0; + s.g(); + } else { + s.p = 1; + t = e; + } + } + } else { + const s = e.i; + if (s.p === 1) { + s.p = 0; + e.p = 1; + if (e === this.v) { + this.v = e.g(); + } else e.g(); + } else { + if (s.i && s.i.p === 1) { + s.p = e.p; + e.p = 0; + s.i.p = 0; + if (e === this.v) { + this.v = e.g(); + } else e.g(); + return; + } else if (s.h && s.h.p === 1) { + s.p = 1; + s.h.p = 0; + s._(); + } else { + s.p = 1; + t = e; + } + } + } + } + } + S(t) { + if (this.m === 1) { + this.clear(); + return; + } + let e = t; + while (e.i || e.h) { + if (e.h) { + e = e.h; + while (e.i) e = e.i; + } else { + e = e.i; + } + const s = t.u; + t.u = e.u; + e.u = s; + const i = t.l; + t.l = e.l; + e.l = i; + t = e; + } + if (this.C.i === e) { + this.C.i = e.o; + } else if (this.C.h === e) { + this.C.h = e.o; + } + this.P(e); + let s = e.o; + if (e === s.i) { + s.i = undefined; + } else s.h = undefined; + this.m -= 1; + this.v.p = 0; + if (this.enableIndex) { + while (s !== this.C) { + s.M -= 1; + s = s.o; + } + } + } + U(t) { + const e = typeof t === "number" ? t : undefined; + const s = typeof t === "function" ? t : undefined; + const i = typeof t === "undefined" ? [] : undefined; + let r = 0; + let n = this.v; + const h = []; + while (h.length || n) { + if (n) { + h.push(n); + n = n.i; + } else { + n = h.pop(); + if (r === e) return n; + i && i.push(n); + s && s(n, r, this); + r += 1; + n = n.h; + } + } + return i; + } + j(t) { + while (true) { + const e = t.o; + if (e.p === 0) return; + const s = e.o; + if (e === s.i) { + const i = s.h; + if (i && i.p === 1) { + i.p = e.p = 0; + if (s === this.v) return; + s.p = 1; + t = s; + continue; + } else if (t === e.h) { + t.p = 0; + if (t.i) { + t.i.o = e; + } + if (t.h) { + t.h.o = s; + } + e.h = t.i; + s.i = t.h; + t.i = e; + t.h = s; + if (s === this.v) { + this.v = t; + this.C.o = t; + } else { + const e = s.o; + if (e.i === s) { + e.i = t; + } else e.h = t; + } + t.o = s.o; + e.o = t; + s.o = t; + s.p = 1; + } else { + e.p = 0; + if (s === this.v) { + this.v = s.g(); + } else s.g(); + s.p = 1; + return; + } + } else { + const i = s.i; + if (i && i.p === 1) { + i.p = e.p = 0; + if (s === this.v) return; + s.p = 1; + t = s; + continue; + } else if (t === e.i) { + t.p = 0; + if (t.i) { + t.i.o = s; + } + if (t.h) { + t.h.o = e; + } + s.h = t.i; + e.i = t.h; + t.i = s; + t.h = e; + if (s === this.v) { + this.v = t; + this.C.o = t; + } else { + const e = s.o; + if (e.i === s) { + e.i = t; + } else e.h = t; + } + t.o = s.o; + e.o = t; + s.o = t; + s.p = 1; + } else { + e.p = 0; + if (s === this.v) { + this.v = s._(); + } else s._(); + s.p = 1; + return; + } + } + if (this.enableIndex) { + e.O(); + s.O(); + t.O(); + } + return; + } + } + q(t, e, s) { + if (this.v === undefined) { + this.m += 1; + this.v = new this.N(t, e, 0); + this.v.o = this.C; + this.C.o = this.C.i = this.C.h = this.v; + return this.m; + } + let i; + const r = this.C.i; + const n = this.A(r.u, t); + if (n === 0) { + r.l = e; + return this.m; + } else if (n > 0) { + r.i = new this.N(t, e); + r.i.o = r; + i = r.i; + this.C.i = i; + } else { + const r = this.C.h; + const n = this.A(r.u, t); + if (n === 0) { + r.l = e; + return this.m; + } else if (n < 0) { + r.h = new this.N(t, e); + r.h.o = r; + i = r.h; + this.C.h = i; + } else { + if (s !== undefined) { + const r = s.T; + if (r !== this.C) { + const s = this.A(r.u, t); + if (s === 0) { + r.l = e; + return this.m; + } else if (s > 0) { + const s = r.I(); + const n = this.A(s.u, t); + if (n === 0) { + s.l = e; + return this.m; + } else if (n < 0) { + i = new this.N(t, e); + if (s.h === undefined) { + s.h = i; + i.o = s; + } else { + r.i = i; + i.o = r; + } + } + } + } + } + if (i === undefined) { + i = this.v; + while (true) { + const s = this.A(i.u, t); + if (s > 0) { + if (i.i === undefined) { + i.i = new this.N(t, e); + i.i.o = i; + i = i.i; + break; + } + i = i.i; + } else if (s < 0) { + if (i.h === undefined) { + i.h = new this.N(t, e); + i.h.o = i; + i = i.h; + break; + } + i = i.h; + } else { + i.l = e; + return this.m; + } + } + } + } + } + if (this.enableIndex) { + let t = i.o; + while (t !== this.C) { + t.M += 1; + t = t.o; + } + } + this.j(i); + this.m += 1; + return this.m; + } + H(t, e) { + while (t) { + const s = this.A(t.u, e); + if (s < 0) { + t = t.h; + } else if (s > 0) { + t = t.i; + } else return t; + } + return t || this.C; + } + clear() { + this.m = 0; + this.v = undefined; + this.C.o = undefined; + this.C.i = this.C.h = undefined; + } + updateKeyByIterator(t, e) { + const s = t.T; + if (s === this.C) { + throwIteratorAccessError(); + } + if (this.m === 1) { + s.u = e; + return true; + } + const i = s.B().u; + if (s === this.C.i) { + if (this.A(i, e) > 0) { + s.u = e; + return true; + } + return false; + } + const r = s.I().u; + if (s === this.C.h) { + if (this.A(r, e) < 0) { + s.u = e; + return true; + } + return false; + } + if (this.A(r, e) >= 0 || this.A(i, e) <= 0) return false; + s.u = e; + return true; + } + eraseElementByPos(t) { + if (t < 0 || t > this.m - 1) { + throw new RangeError; + } + const e = this.U(t); + this.S(e); + return this.m; + } + eraseElementByKey(t) { + if (this.m === 0) return false; + const e = this.H(this.v, t); + if (e === this.C) return false; + this.S(e); + return true; + } + eraseElementByIterator(t) { + const e = t.T; + if (e === this.C) { + throwIteratorAccessError(); + } + const s = e.h === undefined; + const i = t.iteratorType === 0; + if (i) { + if (s) t.next(); + } else { + if (!s || e.i === undefined) t.next(); + } + this.S(e); + return t; + } + getHeight() { + if (this.m === 0) return 0; + function traversal(t) { + if (!t) return 0; + return Math.max(traversal(t.i), traversal(t.h)) + 1; + } + return traversal(this.v); + } +} + +class TreeIterator extends ContainerIterator { + constructor(t, e, s) { + super(s); + this.T = t; + this.C = e; + if (this.iteratorType === 0) { + this.pre = function() { + if (this.T === this.C.i) { + throwIteratorAccessError(); + } + this.T = this.T.I(); + return this; + }; + this.next = function() { + if (this.T === this.C) { + throwIteratorAccessError(); + } + this.T = this.T.B(); + return this; + }; + } else { + this.pre = function() { + if (this.T === this.C.h) { + throwIteratorAccessError(); + } + this.T = this.T.B(); + return this; + }; + this.next = function() { + if (this.T === this.C) { + throwIteratorAccessError(); + } + this.T = this.T.I(); + return this; + }; + } + } + get index() { + let t = this.T; + const e = this.C.o; + if (t === this.C) { + if (e) { + return e.M - 1; + } + return 0; + } + let s = 0; + if (t.i) { + s += t.i.M; + } + while (t !== e) { + const e = t.o; + if (t === e.h) { + s += 1; + if (e.i) { + s += e.i.M; + } + } + t = e; + } + return s; + } + isAccessible() { + return this.T !== this.C; + } +} + +class OrderedMapIterator extends TreeIterator { + constructor(t, e, s, i) { + super(t, e, i); + this.container = s; + } + get pointer() { + if (this.T === this.C) { + throwIteratorAccessError(); + } + const t = this; + return new Proxy([], { + get(e, s) { + if (s === "0") return t.T.u; else if (s === "1") return t.T.l; + e[0] = t.T.u; + e[1] = t.T.l; + return e[s]; + }, + set(e, s, i) { + if (s !== "1") { + throw new TypeError("prop must be 1"); + } + t.T.l = i; + return true; + } + }); + } + copy() { + return new OrderedMapIterator(this.T, this.C, this.container, this.iteratorType); + } +} + +class OrderedMap extends TreeContainer { + constructor(t = [], e, s) { + super(e, s); + const i = this; + t.forEach((function(t) { + i.setElement(t[0], t[1]); + })); + } + begin() { + return new OrderedMapIterator(this.C.i || this.C, this.C, this); + } + end() { + return new OrderedMapIterator(this.C, this.C, this); + } + rBegin() { + return new OrderedMapIterator(this.C.h || this.C, this.C, this, 1); + } + rEnd() { + return new OrderedMapIterator(this.C, this.C, this, 1); + } + front() { + if (this.m === 0) return; + const t = this.C.i; + return [ t.u, t.l ]; + } + back() { + if (this.m === 0) return; + const t = this.C.h; + return [ t.u, t.l ]; + } + lowerBound(t) { + const e = this.R(this.v, t); + return new OrderedMapIterator(e, this.C, this); + } + upperBound(t) { + const e = this.K(this.v, t); + return new OrderedMapIterator(e, this.C, this); + } + reverseLowerBound(t) { + const e = this.L(this.v, t); + return new OrderedMapIterator(e, this.C, this); + } + reverseUpperBound(t) { + const e = this.k(this.v, t); + return new OrderedMapIterator(e, this.C, this); + } + forEach(t) { + this.U((function(e, s, i) { + t([ e.u, e.l ], s, i); + })); + } + setElement(t, e, s) { + return this.q(t, e, s); + } + getElementByPos(t) { + if (t < 0 || t > this.m - 1) { + throw new RangeError; + } + const e = this.U(t); + return [ e.u, e.l ]; + } + find(t) { + const e = this.H(this.v, t); + return new OrderedMapIterator(e, this.C, this); + } + getElementByKey(t) { + const e = this.H(this.v, t); + return e.l; + } + union(t) { + const e = this; + t.forEach((function(t) { + e.setElement(t[0], t[1]); + })); + return this.m; + } + * [Symbol.iterator]() { + const t = this.m; + const e = this.U(); + for (let s = 0; s < t; ++s) { + const t = e[s]; + yield [ t.u, t.l ]; + } + } +} + +exports.OrderedMap = OrderedMap; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 40334: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; +} + +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} + +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 76762: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(45030); +var import_before_after_hook = __nccwpck_require__(83682); +var import_request = __nccwpck_require__(6039); +var import_graphql = __nccwpck_require__(88467); +var import_auth_token = __nccwpck_require__(40334); + +// pkg/dist-src/version.js +var VERSION = "5.0.2"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; + } + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); + } + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 6039: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(59440); +var import_universal_user_agent = __nccwpck_require__(45030); + +// pkg/dist-src/version.js +var VERSION = "8.1.6"; + +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(10537); + +// pkg/dist-src/get-buffer-response.js +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +// pkg/dist-src/fetch-wrapper.js +function fetchWrapper(requestOptions) { + var _a, _b, _c; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + return `Unknown error: ${JSON.stringify(data)}`; +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); +} + +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } +}); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 59440: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(45030); + +// pkg/dist-src/version.js +var VERSION = "9.0.5"; + +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; + +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/util/merge-deep.js +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} + +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } + } + return obj; +} + +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + } + return mergedOptions; +} + +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; +} + +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); + } +} + +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); +} + +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); +} + +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 88467: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(36234); +var import_universal_user_agent = __nccwpck_require__(45030); + +// pkg/dist-src/version.js +var VERSION = "7.1.0"; + +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(36234); + +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(36234); + +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}; + +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} + +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 64193: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "9.1.5"; + +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} + +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + }) + }; +} + +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; + } + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); +} +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; + } + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator2, mapFn); + }); +} + +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { + iterator +}); + +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; + +// pkg/dist-src/paginating-endpoints.js +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + +// pkg/dist-src/index.js +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 83044: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "10.2.0"; + +// pkg/dist-src/generated/endpoints.js +var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ], - enableSelectedRepositoryGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + copilot: { + addCopilotForBusinessSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotForBusinessSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + } + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + } + ], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + } + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + } + ] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" ], - enableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" ], - forceCancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" ], - generateRunnerJitconfigForOrg: [ - "POST /orgs/{org}/actions/runners/generate-jitconfig" + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" ], - generateRunnerJitconfigForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" ], - getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], - getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: [ - "GET /orgs/{org}/actions/cache/usage-by-repository" + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" ], - getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/selected-actions" + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" ], - getAllowedActionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], - getEnvironmentSecret: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } ], - getEnvironmentVariable: [ - "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } ], - getGithubActionsDefaultWorkflowPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions/workflow" + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" ], - getGithubActionsDefaultWorkflowPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/workflow" + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" ], - getGithubActionsPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions" + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" ], - getGithubActionsPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions" + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" ], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], - getPendingDeploymentsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" ], - getRepoPermissions: [ - "GET /repos/{owner}/{repo}/actions/permissions", - {}, - { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" ], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], - getReviewsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" ], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/access" + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" ], - getWorkflowRunUsage: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" ], - getWorkflowUsage: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" ], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" ], - listEnvironmentVariables: [ - "GET /repositories/{repository_id}/environments/{environment_name}/variables" + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" ], - listJobsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" ], - listJobsForWorkflowRunAttempt: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" ], - listLabelsForSelfHostedRunnerForOrg: [ - "GET /orgs/{org}/actions/runners/{runner_id}/labels" + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" ], - listLabelsForSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" ], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listOrgVariables: ["GET /orgs/{org}/actions/variables"], - listRepoOrganizationSecrets: [ - "GET /repos/{owner}/{repo}/actions/organization-secrets" + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" ], - listRepoOrganizationVariables: [ - "GET /repos/{owner}/{repo}/actions/organization-variables" + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" ], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/downloads" + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" ], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ], - listSelectedReposForOrgVariable: [ - "GET /orgs/{org}/actions/variables/{name}/repositories" + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" ], - listSelectedRepositoriesEnabledGithubActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/repositories" + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" ], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ], - listWorkflowRuns: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" ], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" ], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ], - removeCustomLabelFromSelfHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" ], - removeCustomLabelFromSelfHostedRunnerForRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ], - removeSelectedRepoFromOrgVariable: [ - "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" ], - reviewCustomGatesForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" ], - reviewPendingDeploymentsForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" ], - setAllowedActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/selected-actions" + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" ], - setAllowedActionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" ], - setCustomLabelsForSelfHostedRunnerForOrg: [ - "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" ], - setCustomLabelsForSelfHostedRunnerForRepo: [ - "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" ], - setGithubActionsDefaultWorkflowPermissionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/workflow" + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" ], - setGithubActionsDefaultWorkflowPermissionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" ], - setGithubActionsPermissionsOrganization: [ - "PUT /orgs/{org}/actions/permissions" + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" ], - setGithubActionsPermissionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions" + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" ], - setSelectedReposForOrgVariable: [ - "PUT /orgs/{org}/actions/variables/{name}/repositories" + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" ], - setSelectedRepositoriesEnabledGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories" + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" ], - setWorkflowAccessToRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/access" + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" ], - updateEnvironmentVariable: [ - "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" ], - updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], - updateRepoVariable: [ - "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" ] }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: [ - "DELETE /notifications/threads/{thread_id}/subscription" - ], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: [ - "GET /notifications/threads/{thread_id}/subscription" + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } ], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: [ - "GET /users/{username}/events/orgs/{org}" + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" ], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: [ - "GET /users/{username}/received_events/public" + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } ], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/notifications" + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } ], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: [ - "PUT /notifications/threads/{thread_id}/subscription" + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } ], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: [ - "PUT /user/installations/{installation_id}/repositories/{repository_id}", + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, - { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + { mapToData: "users" } ], - addRepoToInstallationForAuthenticatedUser: [ - "PUT /user/installations/{installation_id}/repositories/{repository_id}" + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" ], - checkToken: ["POST /applications/{client_id}/token"], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: [ - "POST /app/installations/{installation_id}/access_tokens" + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" ], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: [ - "GET /marketplace_listing/accounts/{account_id}" + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" ], - getSubscriptionPlanForAccountStubbed: [ - "GET /marketplace_listing/stubbed/accounts/{account_id}" + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" ], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: [ - "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" ], - listInstallationReposForAuthenticatedUser: [ - "GET /user/installations/{installation_id}/repositories" + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" ], - listInstallationRequestsForAuthenticatedApp: [ - "GET /app/installation-requests" + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" ], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: [ - "GET /user/marketplace_purchases/stubbed" + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ], - listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: [ - "POST /app/hook/deliveries/{delivery_id}/attempts" + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" ], - removeRepoFromInstallation: [ - "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", {}, - { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } ], - removeRepoFromInstallationForAuthenticatedUser: [ - "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" ], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: [ - "DELETE /app/installations/{installation_id}/suspended" + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ], - updateWebhookConfigForApp: ["PATCH /app/hook/config"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: [ - "GET /users/{username}/settings/billing/actions" + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: [ - "GET /users/{username}/settings/billing/packages" + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" ], - getSharedStorageBillingOrg: [ - "GET /orgs/{org}/settings/billing/shared-storage" + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" ], - getSharedStorageBillingUser: [ - "GET /users/{username}/settings/billing/shared-storage" - ] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: [ - "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" ], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: [ - "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" ], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: [ - "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" ], - rerequestSuite: [ - "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" ], - setSuitesPreferences: [ - "PATCH /repos/{owner}/{repo}/check-suites/preferences" + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] - }, - codeScanning: { - deleteAnalysis: [ - "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" ], - getAlert: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", - {}, - { renamedParameters: { alert_id: "alert_number" } } + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" ], - getAnalysis: [ - "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" ], - getCodeqlDatabase: [ - "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" ], - getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertInstances: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" ], - listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", {}, - { renamed: ["codeScanning", "listAlertInstances"] } + { renamed: ["repos", "downloadZipballArchive"] } ], - listCodeqlDatabases: [ - "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" ], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" ], - updateDefaultSetup: [ - "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" ], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct"], - getConductCode: ["GET /codes_of_conduct/{key}"] - }, - codespaces: { - addRepositoryForSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" ], - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ], - checkPermissionsForDevcontainer: [ - "GET /repos/{owner}/{repo}/codespaces/permissions_check" + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ], - codespaceMachinesForAuthenticatedUser: [ - "GET /user/codespaces/{codespace_name}/machines" + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" ], - createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ], - createOrUpdateSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}" + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" ], - createWithPrForAuthenticatedUser: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" ], - createWithRepoForAuthenticatedUser: [ - "POST /repos/{owner}/{repo}/codespaces" + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" ], - deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: [ - "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" ], - deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" ], - deleteSecretForAuthenticatedUser: [ - "DELETE /user/codespaces/secrets/{secret_name}" + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" ], - exportForAuthenticatedUser: [ - "POST /user/codespaces/{codespace_name}/exports" + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" ], - getCodespacesForUserInOrg: [ - "GET /orgs/{org}/members/{username}/codespaces" + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ], - getExportDetailsForAuthenticatedUser: [ - "GET /user/codespaces/{codespace_name}/exports/{export_id}" + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" ], - getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], - getPublicKeyForAuthenticatedUser: [ - "GET /user/codespaces/secrets/public-key" + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ], - getRepoPublicKey: [ - "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ], - getRepoSecret: [ - "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ], - getSecretForAuthenticatedUser: [ - "GET /user/codespaces/secrets/{secret_name}" + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" ], - listDevcontainersInRepositoryForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/devcontainers" + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" ], - listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: [ - "GET /orgs/{org}/codespaces", + listActivities: ["GET /repos/{owner}/{repo}/activity"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, - { renamedParameters: { org_id: "org" } } + { mapToData: "apps" } ], - listInRepositoryForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces" + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" ], - listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: [ - "GET /user/codespaces/secrets/{secret_name}/repositories" + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } ], - listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ], - preFlightWithRepoForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/new" + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } ], - publishForAuthenticatedUser: [ - "POST /user/codespaces/{codespace_name}/publish" + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } ], - removeRepositoryForSecretForAuthenticatedUser: [ - "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } ], - repoMachinesForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/machines" + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } ], - setRepositoriesForSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}/repositories" + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } ], - startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], - stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: [ - "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" ], - updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] }, - copilot: { - addCopilotForBusinessSeatsForTeams: [ - "POST /orgs/{org}/copilot/billing/selected_teams" + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" ], - addCopilotForBusinessSeatsForUsers: [ - "POST /orgs/{org}/copilot/billing/selected_users" + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" ], - cancelCopilotSeatAssignmentForTeams: [ - "DELETE /orgs/{org}/copilot/billing/selected_teams" + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" ], - cancelCopilotSeatAssignmentForUsers: [ - "DELETE /orgs/{org}/copilot/billing/selected_users" + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" ], - getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], - getCopilotSeatDetailsForUser: [ - "GET /orgs/{org}/members/{username}/copilot" + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" ], - listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] }, - dependabot: { - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" ], - createOrUpdateOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" ], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ], - deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" ], - getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], - getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: [ - "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ], - getRepoSecret: [ - "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" ], - listAlertsForEnterprise: [ - "GET /enterprises/{enterprise}/dependabot/alerts" + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" ], - listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], - listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" - ] + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] }, - dependencyGraph: { - createRepositorySnapshot: [ - "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } ], - diffRange: [ - "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } ], - exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] - }, - emojis: { get: ["GET /emojis"] }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: [ - "GET /user/interaction-limits", + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", {}, - { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } ], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: [ - "DELETE /repos/{owner}/{repo}/interaction-limits" + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } ], - removeRestrictionsForYourPublicRepos: [ - "DELETE /user/interaction-limits", + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", {}, - { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } ], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: [ - "PUT /user/interaction-limits", + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", {}, - { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } - ] - }, - issues: { - addAssignees: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } ], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - checkUserCanBeAssignedToIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" ], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } ], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } ], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: [ - "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" ], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } ], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: [ - "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } ], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } ], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } ], - removeAssignees: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } ], - removeLabel: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } ], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: [ - "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" - ] + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; +var endpoints_default = Endpoints; + +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; }, - markdown: { - render: ["POST /markdown"], - renderRaw: [ - "POST /markdown/raw", - { headers: { "content-type": "text/plain; charset=utf-8" } } - ] + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); + } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } + } + return requestWithDefaults(options2); + } + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} + +// pkg/dist-src/index.js +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 10537: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(58932); +var import_once = __toESM(__nccwpck_require__(1223)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; + } + }); + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 36234: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(59440); +var import_universal_user_agent = __nccwpck_require__(45030); + +// pkg/dist-src/version.js +var VERSION = "8.4.0"; + +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(30013); + +// pkg/dist-src/get-buffer-response.js +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +// pkg/dist-src/fetch-wrapper.js +function fetchWrapper(requestOptions) { + var _a, _b, _c, _d; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { + method: requestOptions.method, + body: requestOptions.body, + redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, + headers: requestOptions.headers, + signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + let suffix; + if ("documentation_url" in data) { + suffix = ` - ${data.documentation_url}`; + } else { + suffix = ""; + } + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; + } + return `${data.message}${suffix}`; + } + return `Unknown error: ${JSON.stringify(data)}`; +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); +} + +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } +}); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 30013: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(58932); +var import_once = __toESM(__nccwpck_require__(1223)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; + } + }); + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 57171: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ContextAPI = void 0; +const NoopContextManager_1 = __nccwpck_require__(54118); +const global_utils_1 = __nccwpck_require__(63979); +const diag_1 = __nccwpck_require__(11877); +const API_NAME = 'context'; +const NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +class ContextAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { } + /** Get the singleton instance of the Context API */ + static getInstance() { + if (!this._instance) { + this._instance = new ContextAPI(); + } + return this._instance; + } + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager) { + return (0, global_utils_1.registerGlobal)(API_NAME, contextManager, diag_1.DiagAPI.instance()); + } + /** + * Get the currently active context + */ + active() { + return this._getContextManager().active(); + } + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with(context, fn, thisArg, ...args) { + return this._getContextManager().with(context, fn, thisArg, ...args); + } + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context, target) { + return this._getContextManager().bind(context, target); + } + _getContextManager() { + return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_CONTEXT_MANAGER; + } + /** Disable and remove the global context manager */ + disable() { + this._getContextManager().disable(); + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + } +} +exports.ContextAPI = ContextAPI; +//# sourceMappingURL=context.js.map + +/***/ }), + +/***/ 11877: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DiagAPI = void 0; +const ComponentLogger_1 = __nccwpck_require__(17978); +const logLevelLogger_1 = __nccwpck_require__(99639); +const types_1 = __nccwpck_require__(78077); +const global_utils_1 = __nccwpck_require__(63979); +const API_NAME = 'diag'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +class DiagAPI { + /** + * Private internal constructor + * @private + */ + constructor() { + function _logProxy(funcName) { + return function (...args) { + const logger = (0, global_utils_1.getGlobal)('diag'); + // shortcut if logger not set + if (!logger) + return; + return logger[funcName](...args); + }; + } + // Using self local variable for minification purposes as 'this' cannot be minified + const self = this; + // DiagAPI specific functions + const setLogger = (logger, optionsOrLogLevel = { logLevel: types_1.DiagLogLevel.INFO }) => { + var _a, _b, _c; + if (logger === self) { + // There isn't much we can do here. + // Logging to the console might break the user application. + // Try to log to self. If a logger was previously registered it will receive the log. + const err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); + self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); + return false; + } + if (typeof optionsOrLogLevel === 'number') { + optionsOrLogLevel = { + logLevel: optionsOrLogLevel, + }; + } + const oldLogger = (0, global_utils_1.getGlobal)('diag'); + const newLogger = (0, logLevelLogger_1.createLogLevelDiagLogger)((_b = optionsOrLogLevel.logLevel) !== null && _b !== void 0 ? _b : types_1.DiagLogLevel.INFO, logger); + // There already is an logger registered. We'll let it know before overwriting it. + if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { + const stack = (_c = new Error().stack) !== null && _c !== void 0 ? _c : ''; + oldLogger.warn(`Current logger will be overwritten from ${stack}`); + newLogger.warn(`Current logger will overwrite one already registered from ${stack}`); + } + return (0, global_utils_1.registerGlobal)('diag', newLogger, self, true); + }; + self.setLogger = setLogger; + self.disable = () => { + (0, global_utils_1.unregisterGlobal)(API_NAME, self); + }; + self.createComponentLogger = (options) => { + return new ComponentLogger_1.DiagComponentLogger(options); + }; + self.verbose = _logProxy('verbose'); + self.debug = _logProxy('debug'); + self.info = _logProxy('info'); + self.warn = _logProxy('warn'); + self.error = _logProxy('error'); + } + /** Get the singleton instance of the DiagAPI API */ + static instance() { + if (!this._instance) { + this._instance = new DiagAPI(); + } + return this._instance; + } +} +exports.DiagAPI = DiagAPI; +//# sourceMappingURL=diag.js.map + +/***/ }), + +/***/ 17696: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MetricsAPI = void 0; +const NoopMeterProvider_1 = __nccwpck_require__(72647); +const global_utils_1 = __nccwpck_require__(63979); +const diag_1 = __nccwpck_require__(11877); +const API_NAME = 'metrics'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Metrics API + */ +class MetricsAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { } + /** Get the singleton instance of the Metrics API */ + static getInstance() { + if (!this._instance) { + this._instance = new MetricsAPI(); + } + return this._instance; + } + /** + * Set the current global meter provider. + * Returns true if the meter provider was successfully registered, else false. + */ + setGlobalMeterProvider(provider) { + return (0, global_utils_1.registerGlobal)(API_NAME, provider, diag_1.DiagAPI.instance()); + } + /** + * Returns the global meter provider. + */ + getMeterProvider() { + return (0, global_utils_1.getGlobal)(API_NAME) || NoopMeterProvider_1.NOOP_METER_PROVIDER; + } + /** + * Returns a meter from the global meter provider. + */ + getMeter(name, version, options) { + return this.getMeterProvider().getMeter(name, version, options); + } + /** Remove the global meter provider */ + disable() { + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + } +} +exports.MetricsAPI = MetricsAPI; +//# sourceMappingURL=metrics.js.map + +/***/ }), + +/***/ 89909: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PropagationAPI = void 0; +const global_utils_1 = __nccwpck_require__(63979); +const NoopTextMapPropagator_1 = __nccwpck_require__(72368); +const TextMapPropagator_1 = __nccwpck_require__(80865); +const context_helpers_1 = __nccwpck_require__(37682); +const utils_1 = __nccwpck_require__(28136); +const diag_1 = __nccwpck_require__(11877); +const API_NAME = 'propagation'; +const NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +class PropagationAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { + this.createBaggage = utils_1.createBaggage; + this.getBaggage = context_helpers_1.getBaggage; + this.getActiveBaggage = context_helpers_1.getActiveBaggage; + this.setBaggage = context_helpers_1.setBaggage; + this.deleteBaggage = context_helpers_1.deleteBaggage; + } + /** Get the singleton instance of the Propagator API */ + static getInstance() { + if (!this._instance) { + this._instance = new PropagationAPI(); + } + return this._instance; + } + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator) { + return (0, global_utils_1.registerGlobal)(API_NAME, propagator, diag_1.DiagAPI.instance()); + } + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context, carrier, setter = TextMapPropagator_1.defaultTextMapSetter) { + return this._getGlobalPropagator().inject(context, carrier, setter); + } + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context, carrier, getter = TextMapPropagator_1.defaultTextMapGetter) { + return this._getGlobalPropagator().extract(context, carrier, getter); + } + /** + * Return a list of all fields which may be used by the propagator. + */ + fields() { + return this._getGlobalPropagator().fields(); + } + /** Remove the global propagator */ + disable() { + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + } + _getGlobalPropagator() { + return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + } +} +exports.PropagationAPI = PropagationAPI; +//# sourceMappingURL=propagation.js.map + +/***/ }), + +/***/ 81539: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TraceAPI = void 0; +const global_utils_1 = __nccwpck_require__(63979); +const ProxyTracerProvider_1 = __nccwpck_require__(2285); +const spancontext_utils_1 = __nccwpck_require__(49745); +const context_utils_1 = __nccwpck_require__(23326); +const diag_1 = __nccwpck_require__(11877); +const API_NAME = 'trace'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +class TraceAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { + this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + this.wrapSpanContext = spancontext_utils_1.wrapSpanContext; + this.isSpanContextValid = spancontext_utils_1.isSpanContextValid; + this.deleteSpan = context_utils_1.deleteSpan; + this.getSpan = context_utils_1.getSpan; + this.getActiveSpan = context_utils_1.getActiveSpan; + this.getSpanContext = context_utils_1.getSpanContext; + this.setSpan = context_utils_1.setSpan; + this.setSpanContext = context_utils_1.setSpanContext; + } + /** Get the singleton instance of the Trace API */ + static getInstance() { + if (!this._instance) { + this._instance = new TraceAPI(); + } + return this._instance; + } + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider) { + const success = (0, global_utils_1.registerGlobal)(API_NAME, this._proxyTracerProvider, diag_1.DiagAPI.instance()); + if (success) { + this._proxyTracerProvider.setDelegate(provider); + } + return success; + } + /** + * Returns the global tracer provider. + */ + getTracerProvider() { + return (0, global_utils_1.getGlobal)(API_NAME) || this._proxyTracerProvider; + } + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name, version) { + return this.getTracerProvider().getTracer(name, version); + } + /** Remove the global tracer provider */ + disable() { + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + } +} +exports.TraceAPI = TraceAPI; +//# sourceMappingURL=trace.js.map + +/***/ }), + +/***/ 37682: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deleteBaggage = exports.setBaggage = exports.getActiveBaggage = exports.getBaggage = void 0; +const context_1 = __nccwpck_require__(57171); +const context_2 = __nccwpck_require__(78242); +/** + * Baggage key + */ +const BAGGAGE_KEY = (0, context_2.createContextKey)('OpenTelemetry Baggage Key'); +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +exports.getBaggage = getBaggage; +/** + * Retrieve the current baggage from the active/current context + * + * @returns {Baggage} Extracted baggage from the context + */ +function getActiveBaggage() { + return getBaggage(context_1.ContextAPI.getInstance().active()); +} +exports.getActiveBaggage = getActiveBaggage; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +exports.setBaggage = setBaggage; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +function deleteBaggage(context) { + return context.deleteValue(BAGGAGE_KEY); +} +exports.deleteBaggage = deleteBaggage; +//# sourceMappingURL=context-helpers.js.map + +/***/ }), + +/***/ 84811: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaggageImpl = void 0; +class BaggageImpl { + constructor(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + getEntry(key) { + const entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + } + getAllEntries() { + return Array.from(this._entries.entries()).map(([k, v]) => [k, v]); + } + setEntry(key, entry) { + const newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + } + removeEntry(key) { + const newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + } + removeEntries(...keys) { + const newBaggage = new BaggageImpl(this._entries); + for (const key of keys) { + newBaggage._entries.delete(key); + } + return newBaggage; + } + clear() { + return new BaggageImpl(); + } +} +exports.BaggageImpl = BaggageImpl; +//# sourceMappingURL=baggage-impl.js.map + +/***/ }), + +/***/ 23542: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.baggageEntryMetadataSymbol = void 0; +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +exports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); +//# sourceMappingURL=symbol.js.map + +/***/ }), + +/***/ 28136: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; +const diag_1 = __nccwpck_require__(11877); +const baggage_impl_1 = __nccwpck_require__(84811); +const symbol_1 = __nccwpck_require__(23542); +const diag = diag_1.DiagAPI.instance(); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +function createBaggage(entries = {}) { + return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); +} +exports.createBaggage = createBaggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + diag.error(`Cannot create baggage metadata from unknown type: ${typeof str}`); + str = ''; + } + return { + __TYPE__: symbol_1.baggageEntryMetadataSymbol, + toString() { + return str; + }, + }; +} +exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 7393: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.context = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const context_1 = __nccwpck_require__(57171); +/** Entrypoint for context API */ +exports.context = context_1.ContextAPI.getInstance(); +//# sourceMappingURL=context-api.js.map + +/***/ }), + +/***/ 54118: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoopContextManager = void 0; +const context_1 = __nccwpck_require__(78242); +class NoopContextManager { + active() { + return context_1.ROOT_CONTEXT; + } + with(_context, fn, thisArg, ...args) { + return fn.call(thisArg, ...args); + } + bind(_context, target) { + return target; + } + enable() { + return this; + } + disable() { + return this; + } +} +exports.NoopContextManager = NoopContextManager; +//# sourceMappingURL=NoopContextManager.js.map + +/***/ }), + +/***/ 78242: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ROOT_CONTEXT = exports.createContextKey = void 0; +/** Get a key to uniquely identify a context value */ +function createContextKey(description) { + // The specification states that for the same input, multiple calls should + // return different keys. Due to the nature of the JS dependency management + // system, this creates problems where multiple versions of some package + // could hold different keys for the same property. + // + // Therefore, we use Symbol.for which returns the same key for the same input. + return Symbol.for(description); +} +exports.createContextKey = createContextKey; +class BaseContext { + /** + * Construct a new context which inherits values from an optional parent context. + * + * @param parentContext a context from which to inherit values + */ + constructor(parentContext) { + // for minification + const self = this; + self._currentContext = parentContext ? new Map(parentContext) : new Map(); + self.getValue = (key) => self._currentContext.get(key); + self.setValue = (key, value) => { + const context = new BaseContext(self._currentContext); + context._currentContext.set(key, value); + return context; + }; + self.deleteValue = (key) => { + const context = new BaseContext(self._currentContext); + context._currentContext.delete(key); + return context; + }; + } +} +/** The root context is used as the default parent context when there is no active context */ +exports.ROOT_CONTEXT = new BaseContext(); +//# sourceMappingURL=context.js.map + +/***/ }), + +/***/ 39721: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.diag = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const diag_1 = __nccwpck_require__(11877); +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +exports.diag = diag_1.DiagAPI.instance(); +//# sourceMappingURL=diag-api.js.map + +/***/ }), + +/***/ 17978: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DiagComponentLogger = void 0; +const global_utils_1 = __nccwpck_require__(63979); +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +class DiagComponentLogger { + constructor(props) { + this._namespace = props.namespace || 'DiagComponentLogger'; + } + debug(...args) { + return logProxy('debug', this._namespace, args); + } + error(...args) { + return logProxy('error', this._namespace, args); + } + info(...args) { + return logProxy('info', this._namespace, args); + } + warn(...args) { + return logProxy('warn', this._namespace, args); + } + verbose(...args) { + return logProxy('verbose', this._namespace, args); + } +} +exports.DiagComponentLogger = DiagComponentLogger; +function logProxy(funcName, namespace, args) { + const logger = (0, global_utils_1.getGlobal)('diag'); + // shortcut if logger not set + if (!logger) { + return; + } + args.unshift(namespace); + return logger[funcName](...args); +} +//# sourceMappingURL=ComponentLogger.js.map + +/***/ }), + +/***/ 3041: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DiagConsoleLogger = void 0; +const consoleMap = [ + { n: 'error', c: 'error' }, + { n: 'warn', c: 'warn' }, + { n: 'info', c: 'info' }, + { n: 'debug', c: 'debug' }, + { n: 'verbose', c: 'trace' }, +]; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +class DiagConsoleLogger { + constructor() { + function _consoleFunc(funcName) { + return function (...args) { + if (console) { + // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console + let theFunc = console[funcName]; + if (typeof theFunc !== 'function') { + // Not all environments support all functions + // eslint-disable-next-line no-console + theFunc = console.log; + } + // One last final check + if (typeof theFunc === 'function') { + return theFunc.apply(console, args); + } + } + }; + } + for (let i = 0; i < consoleMap.length; i++) { + this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + } + } +} +exports.DiagConsoleLogger = DiagConsoleLogger; +//# sourceMappingURL=consoleLogger.js.map + +/***/ }), + +/***/ 99639: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createLogLevelDiagLogger = void 0; +const types_1 = __nccwpck_require__(78077); +function createLogLevelDiagLogger(maxLevel, logger) { + if (maxLevel < types_1.DiagLogLevel.NONE) { + maxLevel = types_1.DiagLogLevel.NONE; + } + else if (maxLevel > types_1.DiagLogLevel.ALL) { + maxLevel = types_1.DiagLogLevel.ALL; + } + // In case the logger is null or undefined + logger = logger || {}; + function _filterFunc(funcName, theLevel) { + const theFunc = logger[funcName]; + if (typeof theFunc === 'function' && maxLevel >= theLevel) { + return theFunc.bind(logger); + } + return function () { }; + } + return { + error: _filterFunc('error', types_1.DiagLogLevel.ERROR), + warn: _filterFunc('warn', types_1.DiagLogLevel.WARN), + info: _filterFunc('info', types_1.DiagLogLevel.INFO), + debug: _filterFunc('debug', types_1.DiagLogLevel.DEBUG), + verbose: _filterFunc('verbose', types_1.DiagLogLevel.VERBOSE), + }; +} +exports.createLogLevelDiagLogger = createLogLevelDiagLogger; +//# sourceMappingURL=logLevelLogger.js.map + +/***/ }), + +/***/ 78077: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DiagLogLevel = void 0; +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +var DiagLogLevel; +(function (DiagLogLevel) { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; + /** Identifies an error scenario */ + DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; + /** Identifies a warning scenario */ + DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; + /** General informational log message */ + DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; + /** General debug log message */ + DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; + /** Used to set the logging level to include all logging */ + DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; +})(DiagLogLevel = exports.DiagLogLevel || (exports.DiagLogLevel = {})); +//# sourceMappingURL=types.js.map + +/***/ }), + +/***/ 65163: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.trace = exports.propagation = exports.metrics = exports.diag = exports.context = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.createTraceState = exports.TraceFlags = exports.SpanStatusCode = exports.SpanKind = exports.SamplingDecision = exports.ProxyTracerProvider = exports.ProxyTracer = exports.defaultTextMapSetter = exports.defaultTextMapGetter = exports.ValueType = exports.createNoopMeter = exports.DiagLogLevel = exports.DiagConsoleLogger = exports.ROOT_CONTEXT = exports.createContextKey = exports.baggageEntryMetadataFromString = void 0; +var utils_1 = __nccwpck_require__(28136); +Object.defineProperty(exports, "baggageEntryMetadataFromString", ({ enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } })); +// Context APIs +var context_1 = __nccwpck_require__(78242); +Object.defineProperty(exports, "createContextKey", ({ enumerable: true, get: function () { return context_1.createContextKey; } })); +Object.defineProperty(exports, "ROOT_CONTEXT", ({ enumerable: true, get: function () { return context_1.ROOT_CONTEXT; } })); +// Diag APIs +var consoleLogger_1 = __nccwpck_require__(3041); +Object.defineProperty(exports, "DiagConsoleLogger", ({ enumerable: true, get: function () { return consoleLogger_1.DiagConsoleLogger; } })); +var types_1 = __nccwpck_require__(78077); +Object.defineProperty(exports, "DiagLogLevel", ({ enumerable: true, get: function () { return types_1.DiagLogLevel; } })); +// Metrics APIs +var NoopMeter_1 = __nccwpck_require__(4837); +Object.defineProperty(exports, "createNoopMeter", ({ enumerable: true, get: function () { return NoopMeter_1.createNoopMeter; } })); +var Metric_1 = __nccwpck_require__(89999); +Object.defineProperty(exports, "ValueType", ({ enumerable: true, get: function () { return Metric_1.ValueType; } })); +// Propagation APIs +var TextMapPropagator_1 = __nccwpck_require__(80865); +Object.defineProperty(exports, "defaultTextMapGetter", ({ enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapGetter; } })); +Object.defineProperty(exports, "defaultTextMapSetter", ({ enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapSetter; } })); +var ProxyTracer_1 = __nccwpck_require__(43503); +Object.defineProperty(exports, "ProxyTracer", ({ enumerable: true, get: function () { return ProxyTracer_1.ProxyTracer; } })); +var ProxyTracerProvider_1 = __nccwpck_require__(2285); +Object.defineProperty(exports, "ProxyTracerProvider", ({ enumerable: true, get: function () { return ProxyTracerProvider_1.ProxyTracerProvider; } })); +var SamplingResult_1 = __nccwpck_require__(33209); +Object.defineProperty(exports, "SamplingDecision", ({ enumerable: true, get: function () { return SamplingResult_1.SamplingDecision; } })); +var span_kind_1 = __nccwpck_require__(31424); +Object.defineProperty(exports, "SpanKind", ({ enumerable: true, get: function () { return span_kind_1.SpanKind; } })); +var status_1 = __nccwpck_require__(48845); +Object.defineProperty(exports, "SpanStatusCode", ({ enumerable: true, get: function () { return status_1.SpanStatusCode; } })); +var trace_flags_1 = __nccwpck_require__(26905); +Object.defineProperty(exports, "TraceFlags", ({ enumerable: true, get: function () { return trace_flags_1.TraceFlags; } })); +var utils_2 = __nccwpck_require__(32615); +Object.defineProperty(exports, "createTraceState", ({ enumerable: true, get: function () { return utils_2.createTraceState; } })); +var spancontext_utils_1 = __nccwpck_require__(49745); +Object.defineProperty(exports, "isSpanContextValid", ({ enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } })); +Object.defineProperty(exports, "isValidTraceId", ({ enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } })); +Object.defineProperty(exports, "isValidSpanId", ({ enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } })); +var invalid_span_constants_1 = __nccwpck_require__(91760); +Object.defineProperty(exports, "INVALID_SPANID", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPANID; } })); +Object.defineProperty(exports, "INVALID_TRACEID", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_TRACEID; } })); +Object.defineProperty(exports, "INVALID_SPAN_CONTEXT", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPAN_CONTEXT; } })); +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const context_api_1 = __nccwpck_require__(7393); +Object.defineProperty(exports, "context", ({ enumerable: true, get: function () { return context_api_1.context; } })); +const diag_api_1 = __nccwpck_require__(39721); +Object.defineProperty(exports, "diag", ({ enumerable: true, get: function () { return diag_api_1.diag; } })); +const metrics_api_1 = __nccwpck_require__(72601); +Object.defineProperty(exports, "metrics", ({ enumerable: true, get: function () { return metrics_api_1.metrics; } })); +const propagation_api_1 = __nccwpck_require__(17591); +Object.defineProperty(exports, "propagation", ({ enumerable: true, get: function () { return propagation_api_1.propagation; } })); +const trace_api_1 = __nccwpck_require__(98989); +Object.defineProperty(exports, "trace", ({ enumerable: true, get: function () { return trace_api_1.trace; } })); +// Default export. +exports["default"] = { + context: context_api_1.context, + diag: diag_api_1.diag, + metrics: metrics_api_1.metrics, + propagation: propagation_api_1.propagation, + trace: trace_api_1.trace, +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 63979: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; +const platform_1 = __nccwpck_require__(99957); +const version_1 = __nccwpck_require__(98996); +const semver_1 = __nccwpck_require__(81522); +const major = version_1.VERSION.split('.')[0]; +const GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(`opentelemetry.js.api.${major}`); +const _global = platform_1._globalThis; +function registerGlobal(type, instance, diag, allowOverride = false) { + var _a; + const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + version: version_1.VERSION, + }); + if (!allowOverride && api[type]) { + // already registered an API of this type + const err = new Error(`@opentelemetry/api: Attempted duplicate registration of API: ${type}`); + diag.error(err.stack || err.message); + return false; + } + if (api.version !== version_1.VERSION) { + // All registered APIs must be of the same version exactly + const err = new Error(`@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${version_1.VERSION}`); + diag.error(err.stack || err.message); + return false; + } + api[type] = instance; + diag.debug(`@opentelemetry/api: Registered a global for ${type} v${version_1.VERSION}.`); + return true; +} +exports.registerGlobal = registerGlobal; +function getGlobal(type) { + var _a, _b; + const globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; + if (!globalVersion || !(0, semver_1.isCompatible)(globalVersion)) { + return; + } + return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; +} +exports.getGlobal = getGlobal; +function unregisterGlobal(type, diag) { + diag.debug(`@opentelemetry/api: Unregistering a global for ${type} v${version_1.VERSION}.`); + const api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; + if (api) { + delete api[type]; + } +} +exports.unregisterGlobal = unregisterGlobal; +//# sourceMappingURL=global-utils.js.map + +/***/ }), + +/***/ 81522: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isCompatible = exports._makeCompatibilityCheck = void 0; +const version_1 = __nccwpck_require__(98996); +const re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +function _makeCompatibilityCheck(ownVersion) { + const acceptedVersions = new Set([ownVersion]); + const rejectedVersions = new Set(); + const myVersionMatch = ownVersion.match(re); + if (!myVersionMatch) { + // we cannot guarantee compatibility so we always return noop + return () => false; + } + const ownVersionParsed = { + major: +myVersionMatch[1], + minor: +myVersionMatch[2], + patch: +myVersionMatch[3], + prerelease: myVersionMatch[4], + }; + // if ownVersion has a prerelease tag, versions must match exactly + if (ownVersionParsed.prerelease != null) { + return function isExactmatch(globalVersion) { + return globalVersion === ownVersion; + }; + } + function _reject(v) { + rejectedVersions.add(v); + return false; + } + function _accept(v) { + acceptedVersions.add(v); + return true; + } + return function isCompatible(globalVersion) { + if (acceptedVersions.has(globalVersion)) { + return true; + } + if (rejectedVersions.has(globalVersion)) { + return false; + } + const globalVersionMatch = globalVersion.match(re); + if (!globalVersionMatch) { + // cannot parse other version + // we cannot guarantee compatibility so we always noop + return _reject(globalVersion); + } + const globalVersionParsed = { + major: +globalVersionMatch[1], + minor: +globalVersionMatch[2], + patch: +globalVersionMatch[3], + prerelease: globalVersionMatch[4], + }; + // if globalVersion has a prerelease tag, versions must match exactly + if (globalVersionParsed.prerelease != null) { + return _reject(globalVersion); + } + // major versions must match + if (ownVersionParsed.major !== globalVersionParsed.major) { + return _reject(globalVersion); + } + if (ownVersionParsed.major === 0) { + if (ownVersionParsed.minor === globalVersionParsed.minor && + ownVersionParsed.patch <= globalVersionParsed.patch) { + return _accept(globalVersion); + } + return _reject(globalVersion); + } + if (ownVersionParsed.minor <= globalVersionParsed.minor) { + return _accept(globalVersion); + } + return _reject(globalVersion); + }; +} +exports._makeCompatibilityCheck = _makeCompatibilityCheck; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +exports.isCompatible = _makeCompatibilityCheck(version_1.VERSION); +//# sourceMappingURL=semver.js.map + +/***/ }), + +/***/ 72601: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.metrics = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const metrics_1 = __nccwpck_require__(17696); +/** Entrypoint for metrics API */ +exports.metrics = metrics_1.MetricsAPI.getInstance(); +//# sourceMappingURL=metrics-api.js.map + +/***/ }), + +/***/ 89999: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ValueType = void 0; +/** The Type of value. It describes how the data is reported. */ +var ValueType; +(function (ValueType) { + ValueType[ValueType["INT"] = 0] = "INT"; + ValueType[ValueType["DOUBLE"] = 1] = "DOUBLE"; +})(ValueType = exports.ValueType || (exports.ValueType = {})); +//# sourceMappingURL=Metric.js.map + +/***/ }), + +/***/ 4837: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createNoopMeter = exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = exports.NOOP_OBSERVABLE_GAUGE_METRIC = exports.NOOP_OBSERVABLE_COUNTER_METRIC = exports.NOOP_UP_DOWN_COUNTER_METRIC = exports.NOOP_HISTOGRAM_METRIC = exports.NOOP_GAUGE_METRIC = exports.NOOP_COUNTER_METRIC = exports.NOOP_METER = exports.NoopObservableUpDownCounterMetric = exports.NoopObservableGaugeMetric = exports.NoopObservableCounterMetric = exports.NoopObservableMetric = exports.NoopHistogramMetric = exports.NoopGaugeMetric = exports.NoopUpDownCounterMetric = exports.NoopCounterMetric = exports.NoopMetric = exports.NoopMeter = void 0; +/** + * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses + * constant NoopMetrics for all of its methods. + */ +class NoopMeter { + constructor() { } + /** + * @see {@link Meter.createGauge} + */ + createGauge(_name, _options) { + return exports.NOOP_GAUGE_METRIC; + } + /** + * @see {@link Meter.createHistogram} + */ + createHistogram(_name, _options) { + return exports.NOOP_HISTOGRAM_METRIC; + } + /** + * @see {@link Meter.createCounter} + */ + createCounter(_name, _options) { + return exports.NOOP_COUNTER_METRIC; + } + /** + * @see {@link Meter.createUpDownCounter} + */ + createUpDownCounter(_name, _options) { + return exports.NOOP_UP_DOWN_COUNTER_METRIC; + } + /** + * @see {@link Meter.createObservableGauge} + */ + createObservableGauge(_name, _options) { + return exports.NOOP_OBSERVABLE_GAUGE_METRIC; + } + /** + * @see {@link Meter.createObservableCounter} + */ + createObservableCounter(_name, _options) { + return exports.NOOP_OBSERVABLE_COUNTER_METRIC; + } + /** + * @see {@link Meter.createObservableUpDownCounter} + */ + createObservableUpDownCounter(_name, _options) { + return exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; + } + /** + * @see {@link Meter.addBatchObservableCallback} + */ + addBatchObservableCallback(_callback, _observables) { } + /** + * @see {@link Meter.removeBatchObservableCallback} + */ + removeBatchObservableCallback(_callback) { } +} +exports.NoopMeter = NoopMeter; +class NoopMetric { +} +exports.NoopMetric = NoopMetric; +class NoopCounterMetric extends NoopMetric { + add(_value, _attributes) { } +} +exports.NoopCounterMetric = NoopCounterMetric; +class NoopUpDownCounterMetric extends NoopMetric { + add(_value, _attributes) { } +} +exports.NoopUpDownCounterMetric = NoopUpDownCounterMetric; +class NoopGaugeMetric extends NoopMetric { + record(_value, _attributes) { } +} +exports.NoopGaugeMetric = NoopGaugeMetric; +class NoopHistogramMetric extends NoopMetric { + record(_value, _attributes) { } +} +exports.NoopHistogramMetric = NoopHistogramMetric; +class NoopObservableMetric { + addCallback(_callback) { } + removeCallback(_callback) { } +} +exports.NoopObservableMetric = NoopObservableMetric; +class NoopObservableCounterMetric extends NoopObservableMetric { +} +exports.NoopObservableCounterMetric = NoopObservableCounterMetric; +class NoopObservableGaugeMetric extends NoopObservableMetric { +} +exports.NoopObservableGaugeMetric = NoopObservableGaugeMetric; +class NoopObservableUpDownCounterMetric extends NoopObservableMetric { +} +exports.NoopObservableUpDownCounterMetric = NoopObservableUpDownCounterMetric; +exports.NOOP_METER = new NoopMeter(); +// Synchronous instruments +exports.NOOP_COUNTER_METRIC = new NoopCounterMetric(); +exports.NOOP_GAUGE_METRIC = new NoopGaugeMetric(); +exports.NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); +exports.NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); +// Asynchronous instruments +exports.NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); +exports.NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); +exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); +/** + * Create a no-op Meter + */ +function createNoopMeter() { + return exports.NOOP_METER; +} +exports.createNoopMeter = createNoopMeter; +//# sourceMappingURL=NoopMeter.js.map + +/***/ }), + +/***/ 72647: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NOOP_METER_PROVIDER = exports.NoopMeterProvider = void 0; +const NoopMeter_1 = __nccwpck_require__(4837); +/** + * An implementation of the {@link MeterProvider} which returns an impotent Meter + * for all calls to `getMeter` + */ +class NoopMeterProvider { + getMeter(_name, _version, _options) { + return NoopMeter_1.NOOP_METER; + } +} +exports.NoopMeterProvider = NoopMeterProvider; +exports.NOOP_METER_PROVIDER = new NoopMeterProvider(); +//# sourceMappingURL=NoopMeterProvider.js.map + +/***/ }), + +/***/ 99957: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__nccwpck_require__(87200), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 89406: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports._globalThis = void 0; +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins +exports._globalThis = typeof globalThis === 'object' ? globalThis : global; +//# sourceMappingURL=globalThis.js.map + +/***/ }), + +/***/ 87200: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__nccwpck_require__(89406), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 17591: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.propagation = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const propagation_1 = __nccwpck_require__(89909); +/** Entrypoint for propagation API */ +exports.propagation = propagation_1.PropagationAPI.getInstance(); +//# sourceMappingURL=propagation-api.js.map + +/***/ }), + +/***/ 72368: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoopTextMapPropagator = void 0; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +class NoopTextMapPropagator { + /** Noop inject function does nothing */ + inject(_context, _carrier) { } + /** Noop extract function does nothing and returns the input context */ + extract(context, _carrier) { + return context; + } + fields() { + return []; + } +} +exports.NoopTextMapPropagator = NoopTextMapPropagator; +//# sourceMappingURL=NoopTextMapPropagator.js.map + +/***/ }), + +/***/ 80865: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultTextMapSetter = exports.defaultTextMapGetter = void 0; +exports.defaultTextMapGetter = { + get(carrier, key) { + if (carrier == null) { + return undefined; + } + return carrier[key]; + }, + keys(carrier) { + if (carrier == null) { + return []; + } + return Object.keys(carrier); + }, +}; +exports.defaultTextMapSetter = { + set(carrier, key, value) { + if (carrier == null) { + return; + } + carrier[key] = value; + }, +}; +//# sourceMappingURL=TextMapPropagator.js.map + +/***/ }), + +/***/ 98989: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.trace = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const trace_1 = __nccwpck_require__(81539); +/** Entrypoint for trace API */ +exports.trace = trace_1.TraceAPI.getInstance(); +//# sourceMappingURL=trace-api.js.map + +/***/ }), + +/***/ 81462: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NonRecordingSpan = void 0; +const invalid_span_constants_1 = __nccwpck_require__(91760); +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +class NonRecordingSpan { + constructor(_spanContext = invalid_span_constants_1.INVALID_SPAN_CONTEXT) { + this._spanContext = _spanContext; + } + // Returns a SpanContext. + spanContext() { + return this._spanContext; + } + // By default does nothing + setAttribute(_key, _value) { + return this; + } + // By default does nothing + setAttributes(_attributes) { + return this; + } + // By default does nothing + addEvent(_name, _attributes) { + return this; + } + addLink(_link) { + return this; + } + addLinks(_links) { + return this; + } + // By default does nothing + setStatus(_status) { + return this; + } + // By default does nothing + updateName(_name) { + return this; + } + // By default does nothing + end(_endTime) { } + // isRecording always returns false for NonRecordingSpan. + isRecording() { + return false; + } + // By default does nothing + recordException(_exception, _time) { } +} +exports.NonRecordingSpan = NonRecordingSpan; +//# sourceMappingURL=NonRecordingSpan.js.map + +/***/ }), + +/***/ 17606: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoopTracer = void 0; +const context_1 = __nccwpck_require__(57171); +const context_utils_1 = __nccwpck_require__(23326); +const NonRecordingSpan_1 = __nccwpck_require__(81462); +const spancontext_utils_1 = __nccwpck_require__(49745); +const contextApi = context_1.ContextAPI.getInstance(); +/** + * No-op implementations of {@link Tracer}. + */ +class NoopTracer { + // startSpan starts a noop span. + startSpan(name, options, context = contextApi.active()) { + const root = Boolean(options === null || options === void 0 ? void 0 : options.root); + if (root) { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + const parentFromContext = context && (0, context_utils_1.getSpanContext)(context); + if (isSpanContext(parentFromContext) && + (0, spancontext_utils_1.isSpanContextValid)(parentFromContext)) { + return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); + } + else { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + } + startActiveSpan(name, arg2, arg3, arg4) { + let opts; + let ctx; + let fn; + if (arguments.length < 2) { + return; + } + else if (arguments.length === 2) { + fn = arg2; + } + else if (arguments.length === 3) { + opts = arg2; + fn = arg3; + } + else { + opts = arg2; + ctx = arg3; + fn = arg4; + } + const parentContext = ctx !== null && ctx !== void 0 ? ctx : contextApi.active(); + const span = this.startSpan(name, opts, parentContext); + const contextWithSpanSet = (0, context_utils_1.setSpan)(parentContext, span); + return contextApi.with(contextWithSpanSet, fn, undefined, span); + } +} +exports.NoopTracer = NoopTracer; +function isSpanContext(spanContext) { + return (typeof spanContext === 'object' && + typeof spanContext['spanId'] === 'string' && + typeof spanContext['traceId'] === 'string' && + typeof spanContext['traceFlags'] === 'number'); +} +//# sourceMappingURL=NoopTracer.js.map + +/***/ }), + +/***/ 23259: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoopTracerProvider = void 0; +const NoopTracer_1 = __nccwpck_require__(17606); +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +class NoopTracerProvider { + getTracer(_name, _version, _options) { + return new NoopTracer_1.NoopTracer(); + } +} +exports.NoopTracerProvider = NoopTracerProvider; +//# sourceMappingURL=NoopTracerProvider.js.map + +/***/ }), + +/***/ 43503: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ProxyTracer = void 0; +const NoopTracer_1 = __nccwpck_require__(17606); +const NOOP_TRACER = new NoopTracer_1.NoopTracer(); +/** + * Proxy tracer provided by the proxy tracer provider + */ +class ProxyTracer { + constructor(_provider, name, version, options) { + this._provider = _provider; + this.name = name; + this.version = version; + this.options = options; + } + startSpan(name, options, context) { + return this._getTracer().startSpan(name, options, context); + } + startActiveSpan(_name, _options, _context, _fn) { + const tracer = this._getTracer(); + return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + } + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + _getTracer() { + if (this._delegate) { + return this._delegate; + } + const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); + if (!tracer) { + return NOOP_TRACER; + } + this._delegate = tracer; + return this._delegate; + } +} +exports.ProxyTracer = ProxyTracer; +//# sourceMappingURL=ProxyTracer.js.map + +/***/ }), + +/***/ 2285: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ProxyTracerProvider = void 0; +const ProxyTracer_1 = __nccwpck_require__(43503); +const NoopTracerProvider_1 = __nccwpck_require__(23259); +const NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +class ProxyTracerProvider { + /** + * Get a {@link ProxyTracer} + */ + getTracer(name, version, options) { + var _a; + return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer_1.ProxyTracer(this, name, version, options)); + } + getDelegate() { + var _a; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + } + /** + * Set the delegate tracer provider + */ + setDelegate(delegate) { + this._delegate = delegate; + } + getDelegateTracer(name, version, options) { + var _a; + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); + } +} +exports.ProxyTracerProvider = ProxyTracerProvider; +//# sourceMappingURL=ProxyTracerProvider.js.map + +/***/ }), + +/***/ 33209: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SamplingDecision = void 0; +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +var SamplingDecision; +(function (SamplingDecision) { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; +})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {})); +//# sourceMappingURL=SamplingResult.js.map + +/***/ }), + +/***/ 23326: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSpanContext = exports.setSpanContext = exports.deleteSpan = exports.setSpan = exports.getActiveSpan = exports.getSpan = void 0; +const context_1 = __nccwpck_require__(78242); +const NonRecordingSpan_1 = __nccwpck_require__(81462); +const context_2 = __nccwpck_require__(57171); +/** + * span key + */ +const SPAN_KEY = (0, context_1.createContextKey)('OpenTelemetry Context Key SPAN'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +exports.getSpan = getSpan; +/** + * Gets the span from the current context, if one exists. + */ +function getActiveSpan() { + return getSpan(context_2.ContextAPI.getInstance().active()); +} +exports.getActiveSpan = getActiveSpan; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +exports.setSpan = setSpan; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +function deleteSpan(context) { + return context.deleteValue(SPAN_KEY); +} +exports.deleteSpan = deleteSpan; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return setSpan(context, new NonRecordingSpan_1.NonRecordingSpan(spanContext)); +} +exports.setSpanContext = setSpanContext; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); +} +exports.getSpanContext = getSpanContext; +//# sourceMappingURL=context-utils.js.map + +/***/ }), + +/***/ 62110: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TraceStateImpl = void 0; +const tracestate_validators_1 = __nccwpck_require__(54864); +const MAX_TRACE_STATE_ITEMS = 32; +const MAX_TRACE_STATE_LEN = 512; +const LIST_MEMBERS_SEPARATOR = ','; +const LIST_MEMBER_KEY_VALUE_SPLITTER = '='; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +class TraceStateImpl { + constructor(rawTraceState) { + this._internalState = new Map(); + if (rawTraceState) + this._parse(rawTraceState); + } + set(key, value) { + // TODO: Benchmark the different approaches(map vs list) and + // use the faster one. + const traceState = this._clone(); + if (traceState._internalState.has(key)) { + traceState._internalState.delete(key); + } + traceState._internalState.set(key, value); + return traceState; + } + unset(key) { + const traceState = this._clone(); + traceState._internalState.delete(key); + return traceState; + } + get(key) { + return this._internalState.get(key); + } + serialize() { + return this._keys() + .reduce((agg, key) => { + agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key)); + return agg; + }, []) + .join(LIST_MEMBERS_SEPARATOR); + } + _parse(rawTraceState) { + if (rawTraceState.length > MAX_TRACE_STATE_LEN) + return; + this._internalState = rawTraceState + .split(LIST_MEMBERS_SEPARATOR) + .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning + .reduce((agg, part) => { + const listMember = part.trim(); // Optional Whitespace (OWS) handling + const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); + if (i !== -1) { + const key = listMember.slice(0, i); + const value = listMember.slice(i + 1, part.length); + if ((0, tracestate_validators_1.validateKey)(key) && (0, tracestate_validators_1.validateValue)(value)) { + agg.set(key, value); + } + else { + // TODO: Consider to add warning log + } + } + return agg; + }, new Map()); + // Because of the reverse() requirement, trunc must be done after map is created + if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { + this._internalState = new Map(Array.from(this._internalState.entries()) + .reverse() // Use reverse same as original tracestate parse chain + .slice(0, MAX_TRACE_STATE_ITEMS)); + } + } + _keys() { + return Array.from(this._internalState.keys()).reverse(); + } + _clone() { + const traceState = new TraceStateImpl(); + traceState._internalState = new Map(this._internalState); + return traceState; + } +} +exports.TraceStateImpl = TraceStateImpl; +//# sourceMappingURL=tracestate-impl.js.map + +/***/ }), + +/***/ 54864: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateValue = exports.validateKey = void 0; +const VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; +const VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`; +const VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`; +const VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`); +const VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; +const INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +function validateKey(key) { + return VALID_KEY_REGEX.test(key); +} +exports.validateKey = validateKey; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +function validateValue(value) { + return (VALID_VALUE_BASE_REGEX.test(value) && + !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); +} +exports.validateValue = validateValue; +//# sourceMappingURL=tracestate-validators.js.map + +/***/ }), + +/***/ 32615: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createTraceState = void 0; +const tracestate_impl_1 = __nccwpck_require__(62110); +function createTraceState(rawTraceState) { + return new tracestate_impl_1.TraceStateImpl(rawTraceState); +} +exports.createTraceState = createTraceState; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 91760: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; +const trace_flags_1 = __nccwpck_require__(26905); +exports.INVALID_SPANID = '0000000000000000'; +exports.INVALID_TRACEID = '00000000000000000000000000000000'; +exports.INVALID_SPAN_CONTEXT = { + traceId: exports.INVALID_TRACEID, + spanId: exports.INVALID_SPANID, + traceFlags: trace_flags_1.TraceFlags.NONE, +}; +//# sourceMappingURL=invalid-span-constants.js.map + +/***/ }), + +/***/ 31424: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SpanKind = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind = exports.SpanKind || (exports.SpanKind = {})); +//# sourceMappingURL=span_kind.js.map + +/***/ }), + +/***/ 49745: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const invalid_span_constants_1 = __nccwpck_require__(91760); +const NonRecordingSpan_1 = __nccwpck_require__(81462); +const VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; +const VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; +function isValidTraceId(traceId) { + return VALID_TRACEID_REGEX.test(traceId) && traceId !== invalid_span_constants_1.INVALID_TRACEID; +} +exports.isValidTraceId = isValidTraceId; +function isValidSpanId(spanId) { + return VALID_SPANID_REGEX.test(spanId) && spanId !== invalid_span_constants_1.INVALID_SPANID; +} +exports.isValidSpanId = isValidSpanId; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +function isSpanContextValid(spanContext) { + return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); +} +exports.isSpanContextValid = isSpanContextValid; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +function wrapSpanContext(spanContext) { + return new NonRecordingSpan_1.NonRecordingSpan(spanContext); +} +exports.wrapSpanContext = wrapSpanContext; +//# sourceMappingURL=spancontext-utils.js.map + +/***/ }), + +/***/ 48845: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SpanStatusCode = void 0; +/** + * An enumeration of status codes. + */ +var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode = exports.SpanStatusCode || (exports.SpanStatusCode = {})); +//# sourceMappingURL=status.js.map + +/***/ }), + +/***/ 26905: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TraceFlags = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var TraceFlags; +(function (TraceFlags) { + /** Represents no flag set. */ + TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; + /** Bit to represent whether trace is sampled in trace flags. */ + TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; +})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {})); +//# sourceMappingURL=trace_flags.js.map + +/***/ }), + +/***/ 98996: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.VERSION = void 0; +// this is autogenerated file, see scripts/version-update.js +exports.VERSION = '1.9.0'; +//# sourceMappingURL=version.js.map + +/***/ }), + +/***/ 252: +/***/ ((module) => { + +"use strict"; + +module.exports = asPromise; + +/** + * Callback as used by {@link util.asPromise}. + * @typedef asPromiseCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {...*} params Additional arguments + * @returns {undefined} + */ + +/** + * Returns a promise from a node-style callback function. + * @memberof util + * @param {asPromiseCallback} fn Function to call + * @param {*} ctx Function context + * @param {...*} params Function arguments + * @returns {Promise<*>} Promisified function + */ +function asPromise(fn, ctx/*, varargs */) { + var params = new Array(arguments.length - 1), + offset = 0, + index = 2, + pending = true; + while (index < arguments.length) + params[offset++] = arguments[index++]; + return new Promise(function executor(resolve, reject) { + params[offset] = function callback(err/*, varargs */) { + if (pending) { + pending = false; + if (err) + reject(err); + else { + var params = new Array(arguments.length - 1), + offset = 0; + while (offset < params.length) + params[offset++] = arguments[offset]; + resolve.apply(null, params); + } + } + }; + try { + fn.apply(ctx || null, params); + } catch (err) { + if (pending) { + pending = false; + reject(err); + } + } + }); +} + + +/***/ }), + +/***/ 26718: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * A minimal base64 implementation for number arrays. + * @memberof util + * @namespace + */ +var base64 = exports; + +/** + * Calculates the byte length of a base64 encoded string. + * @param {string} string Base64 encoded string + * @returns {number} Byte length + */ +base64.length = function length(string) { + var p = string.length; + if (!p) + return 0; + var n = 0; + while (--p % 4 > 1 && string.charAt(p) === "=") + ++n; + return Math.ceil(string.length * 3) / 4 - n; +}; + +// Base64 encoding table +var b64 = new Array(64); + +// Base64 decoding table +var s64 = new Array(123); + +// 65..90, 97..122, 48..57, 43, 47 +for (var i = 0; i < 64;) + s64[b64[i] = i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i - 59 | 43] = i++; + +/** + * Encodes a buffer to a base64 encoded string. + * @param {Uint8Array} buffer Source buffer + * @param {number} start Source start + * @param {number} end Source end + * @returns {string} Base64 encoded string + */ +base64.encode = function encode(buffer, start, end) { + var parts = null, + chunk = []; + var i = 0, // output index + j = 0, // goto index + t; // temporary + while (start < end) { + var b = buffer[start++]; + switch (j) { + case 0: + chunk[i++] = b64[b >> 2]; + t = (b & 3) << 4; + j = 1; + break; + case 1: + chunk[i++] = b64[t | b >> 4]; + t = (b & 15) << 2; + j = 2; + break; + case 2: + chunk[i++] = b64[t | b >> 6]; + chunk[i++] = b64[b & 63]; + j = 0; + break; + } + if (i > 8191) { + (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk)); + i = 0; + } + } + if (j) { + chunk[i++] = b64[t]; + chunk[i++] = 61; + if (j === 1) + chunk[i++] = 61; + } + if (parts) { + if (i) + parts.push(String.fromCharCode.apply(String, chunk.slice(0, i))); + return parts.join(""); + } + return String.fromCharCode.apply(String, chunk.slice(0, i)); +}; + +var invalidEncoding = "invalid encoding"; + +/** + * Decodes a base64 encoded string to a buffer. + * @param {string} string Source string + * @param {Uint8Array} buffer Destination buffer + * @param {number} offset Destination offset + * @returns {number} Number of bytes written + * @throws {Error} If encoding is invalid + */ +base64.decode = function decode(string, buffer, offset) { + var start = offset; + var j = 0, // goto index + t; // temporary + for (var i = 0; i < string.length;) { + var c = string.charCodeAt(i++); + if (c === 61 && j > 1) + break; + if ((c = s64[c]) === undefined) + throw Error(invalidEncoding); + switch (j) { + case 0: + t = c; + j = 1; + break; + case 1: + buffer[offset++] = t << 2 | (c & 48) >> 4; + t = c; + j = 2; + break; + case 2: + buffer[offset++] = (t & 15) << 4 | (c & 60) >> 2; + t = c; + j = 3; + break; + case 3: + buffer[offset++] = (t & 3) << 6 | c; + j = 0; + break; + } + } + if (j === 1) + throw Error(invalidEncoding); + return offset - start; +}; + +/** + * Tests if the specified string appears to be base64 encoded. + * @param {string} string String to test + * @returns {boolean} `true` if probably base64 encoded, otherwise false + */ +base64.test = function test(string) { + return /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(string); +}; + + +/***/ }), + +/***/ 58882: +/***/ ((module) => { + +"use strict"; + +module.exports = codegen; + +/** + * Begins generating a function. + * @memberof util + * @param {string[]} functionParams Function parameter names + * @param {string} [functionName] Function name if not anonymous + * @returns {Codegen} Appender that appends code to the function's body + */ +function codegen(functionParams, functionName) { + + /* istanbul ignore if */ + if (typeof functionParams === "string") { + functionName = functionParams; + functionParams = undefined; + } + + var body = []; + + /** + * Appends code to the function's body or finishes generation. + * @typedef Codegen + * @type {function} + * @param {string|Object.} [formatStringOrScope] Format string or, to finish the function, an object of additional scope variables, if any + * @param {...*} [formatParams] Format parameters + * @returns {Codegen|Function} Itself or the generated function if finished + * @throws {Error} If format parameter counts do not match + */ + + function Codegen(formatStringOrScope) { + // note that explicit array handling below makes this ~50% faster + + // finish the function + if (typeof formatStringOrScope !== "string") { + var source = toString(); + if (codegen.verbose) + console.log("codegen: " + source); // eslint-disable-line no-console + source = "return " + source; + if (formatStringOrScope) { + var scopeKeys = Object.keys(formatStringOrScope), + scopeParams = new Array(scopeKeys.length + 1), + scopeValues = new Array(scopeKeys.length), + scopeOffset = 0; + while (scopeOffset < scopeKeys.length) { + scopeParams[scopeOffset] = scopeKeys[scopeOffset]; + scopeValues[scopeOffset] = formatStringOrScope[scopeKeys[scopeOffset++]]; + } + scopeParams[scopeOffset] = source; + return Function.apply(null, scopeParams).apply(null, scopeValues); // eslint-disable-line no-new-func + } + return Function(source)(); // eslint-disable-line no-new-func + } + + // otherwise append to body + var formatParams = new Array(arguments.length - 1), + formatOffset = 0; + while (formatOffset < formatParams.length) + formatParams[formatOffset] = arguments[++formatOffset]; + formatOffset = 0; + formatStringOrScope = formatStringOrScope.replace(/%([%dfijs])/g, function replace($0, $1) { + var value = formatParams[formatOffset++]; + switch ($1) { + case "d": case "f": return String(Number(value)); + case "i": return String(Math.floor(value)); + case "j": return JSON.stringify(value); + case "s": return String(value); + } + return "%"; + }); + if (formatOffset !== formatParams.length) + throw Error("parameter count mismatch"); + body.push(formatStringOrScope); + return Codegen; + } + + function toString(functionNameOverride) { + return "function " + (functionNameOverride || functionName || "") + "(" + (functionParams && functionParams.join(",") || "") + "){\n " + body.join("\n ") + "\n}"; + } + + Codegen.toString = toString; + return Codegen; +} + +/** + * Begins generating a function. + * @memberof util + * @function codegen + * @param {string} [functionName] Function name if not anonymous + * @returns {Codegen} Appender that appends code to the function's body + * @variation 2 + */ + +/** + * When set to `true`, codegen will log generated code to console. Useful for debugging. + * @name util.codegen.verbose + * @type {boolean} + */ +codegen.verbose = false; + + +/***/ }), + +/***/ 86850: +/***/ ((module) => { + +"use strict"; + +module.exports = EventEmitter; + +/** + * Constructs a new event emitter instance. + * @classdesc A minimal event emitter. + * @memberof util + * @constructor + */ +function EventEmitter() { + + /** + * Registered listeners. + * @type {Object.} + * @private + */ + this._listeners = {}; +} + +/** + * Registers an event listener. + * @param {string} evt Event name + * @param {function} fn Listener + * @param {*} [ctx] Listener context + * @returns {util.EventEmitter} `this` + */ +EventEmitter.prototype.on = function on(evt, fn, ctx) { + (this._listeners[evt] || (this._listeners[evt] = [])).push({ + fn : fn, + ctx : ctx || this + }); + return this; +}; + +/** + * Removes an event listener or any matching listeners if arguments are omitted. + * @param {string} [evt] Event name. Removes all listeners if omitted. + * @param {function} [fn] Listener to remove. Removes all listeners of `evt` if omitted. + * @returns {util.EventEmitter} `this` + */ +EventEmitter.prototype.off = function off(evt, fn) { + if (evt === undefined) + this._listeners = {}; + else { + if (fn === undefined) + this._listeners[evt] = []; + else { + var listeners = this._listeners[evt]; + for (var i = 0; i < listeners.length;) + if (listeners[i].fn === fn) + listeners.splice(i, 1); + else + ++i; + } + } + return this; +}; + +/** + * Emits an event by calling its listeners with the specified arguments. + * @param {string} evt Event name + * @param {...*} args Arguments + * @returns {util.EventEmitter} `this` + */ +EventEmitter.prototype.emit = function emit(evt) { + var listeners = this._listeners[evt]; + if (listeners) { + var args = [], + i = 1; + for (; i < arguments.length;) + args.push(arguments[i++]); + for (i = 0; i < listeners.length;) + listeners[i].fn.apply(listeners[i++].ctx, args); + } + return this; +}; + + +/***/ }), + +/***/ 50663: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +module.exports = fetch; + +var asPromise = __nccwpck_require__(252), + inquire = __nccwpck_require__(60094); + +var fs = inquire("fs"); + +/** + * Node-style callback as used by {@link util.fetch}. + * @typedef FetchCallback + * @type {function} + * @param {?Error} error Error, if any, otherwise `null` + * @param {string} [contents] File contents, if there hasn't been an error + * @returns {undefined} + */ + +/** + * Options as used by {@link util.fetch}. + * @typedef FetchOptions + * @type {Object} + * @property {boolean} [binary=false] Whether expecting a binary response + * @property {boolean} [xhr=false] If `true`, forces the use of XMLHttpRequest + */ + +/** + * Fetches the contents of a file. + * @memberof util + * @param {string} filename File path or url + * @param {FetchOptions} options Fetch options + * @param {FetchCallback} callback Callback function + * @returns {undefined} + */ +function fetch(filename, options, callback) { + if (typeof options === "function") { + callback = options; + options = {}; + } else if (!options) + options = {}; + + if (!callback) + return asPromise(fetch, this, filename, options); // eslint-disable-line no-invalid-this + + // if a node-like filesystem is present, try it first but fall back to XHR if nothing is found. + if (!options.xhr && fs && fs.readFile) + return fs.readFile(filename, function fetchReadFileCallback(err, contents) { + return err && typeof XMLHttpRequest !== "undefined" + ? fetch.xhr(filename, options, callback) + : err + ? callback(err) + : callback(null, options.binary ? contents : contents.toString("utf8")); + }); + + // use the XHR version otherwise. + return fetch.xhr(filename, options, callback); +} + +/** + * Fetches the contents of a file. + * @name util.fetch + * @function + * @param {string} path File path or url + * @param {FetchCallback} callback Callback function + * @returns {undefined} + * @variation 2 + */ + +/** + * Fetches the contents of a file. + * @name util.fetch + * @function + * @param {string} path File path or url + * @param {FetchOptions} [options] Fetch options + * @returns {Promise} Promise + * @variation 3 + */ + +/**/ +fetch.xhr = function fetch_xhr(filename, options, callback) { + var xhr = new XMLHttpRequest(); + xhr.onreadystatechange /* works everywhere */ = function fetchOnReadyStateChange() { + + if (xhr.readyState !== 4) + return undefined; + + // local cors security errors return status 0 / empty string, too. afaik this cannot be + // reliably distinguished from an actually empty file for security reasons. feel free + // to send a pull request if you are aware of a solution. + if (xhr.status !== 0 && xhr.status !== 200) + return callback(Error("status " + xhr.status)); + + // if binary data is expected, make sure that some sort of array is returned, even if + // ArrayBuffers are not supported. the binary string fallback, however, is unsafe. + if (options.binary) { + var buffer = xhr.response; + if (!buffer) { + buffer = []; + for (var i = 0; i < xhr.responseText.length; ++i) + buffer.push(xhr.responseText.charCodeAt(i) & 255); + } + return callback(null, typeof Uint8Array !== "undefined" ? new Uint8Array(buffer) : buffer); + } + return callback(null, xhr.responseText); + }; + + if (options.binary) { + // ref: https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/Sending_and_Receiving_Binary_Data#Receiving_binary_data_in_older_browsers + if ("overrideMimeType" in xhr) + xhr.overrideMimeType("text/plain; charset=x-user-defined"); + xhr.responseType = "arraybuffer"; + } + + xhr.open("GET", filename); + xhr.send(); +}; + + +/***/ }), + +/***/ 21843: +/***/ ((module) => { + +"use strict"; + + +module.exports = factory(factory); + +/** + * Reads / writes floats / doubles from / to buffers. + * @name util.float + * @namespace + */ + +/** + * Writes a 32 bit float to a buffer using little endian byte order. + * @name util.float.writeFloatLE + * @function + * @param {number} val Value to write + * @param {Uint8Array} buf Target buffer + * @param {number} pos Target buffer offset + * @returns {undefined} + */ + +/** + * Writes a 32 bit float to a buffer using big endian byte order. + * @name util.float.writeFloatBE + * @function + * @param {number} val Value to write + * @param {Uint8Array} buf Target buffer + * @param {number} pos Target buffer offset + * @returns {undefined} + */ + +/** + * Reads a 32 bit float from a buffer using little endian byte order. + * @name util.float.readFloatLE + * @function + * @param {Uint8Array} buf Source buffer + * @param {number} pos Source buffer offset + * @returns {number} Value read + */ + +/** + * Reads a 32 bit float from a buffer using big endian byte order. + * @name util.float.readFloatBE + * @function + * @param {Uint8Array} buf Source buffer + * @param {number} pos Source buffer offset + * @returns {number} Value read + */ + +/** + * Writes a 64 bit double to a buffer using little endian byte order. + * @name util.float.writeDoubleLE + * @function + * @param {number} val Value to write + * @param {Uint8Array} buf Target buffer + * @param {number} pos Target buffer offset + * @returns {undefined} + */ + +/** + * Writes a 64 bit double to a buffer using big endian byte order. + * @name util.float.writeDoubleBE + * @function + * @param {number} val Value to write + * @param {Uint8Array} buf Target buffer + * @param {number} pos Target buffer offset + * @returns {undefined} + */ + +/** + * Reads a 64 bit double from a buffer using little endian byte order. + * @name util.float.readDoubleLE + * @function + * @param {Uint8Array} buf Source buffer + * @param {number} pos Source buffer offset + * @returns {number} Value read + */ + +/** + * Reads a 64 bit double from a buffer using big endian byte order. + * @name util.float.readDoubleBE + * @function + * @param {Uint8Array} buf Source buffer + * @param {number} pos Source buffer offset + * @returns {number} Value read + */ + +// Factory function for the purpose of node-based testing in modified global environments +function factory(exports) { + + // float: typed array + if (typeof Float32Array !== "undefined") (function() { + + var f32 = new Float32Array([ -0 ]), + f8b = new Uint8Array(f32.buffer), + le = f8b[3] === 128; + + function writeFloat_f32_cpy(val, buf, pos) { + f32[0] = val; + buf[pos ] = f8b[0]; + buf[pos + 1] = f8b[1]; + buf[pos + 2] = f8b[2]; + buf[pos + 3] = f8b[3]; + } + + function writeFloat_f32_rev(val, buf, pos) { + f32[0] = val; + buf[pos ] = f8b[3]; + buf[pos + 1] = f8b[2]; + buf[pos + 2] = f8b[1]; + buf[pos + 3] = f8b[0]; + } + + /* istanbul ignore next */ + exports.writeFloatLE = le ? writeFloat_f32_cpy : writeFloat_f32_rev; + /* istanbul ignore next */ + exports.writeFloatBE = le ? writeFloat_f32_rev : writeFloat_f32_cpy; + + function readFloat_f32_cpy(buf, pos) { + f8b[0] = buf[pos ]; + f8b[1] = buf[pos + 1]; + f8b[2] = buf[pos + 2]; + f8b[3] = buf[pos + 3]; + return f32[0]; + } + + function readFloat_f32_rev(buf, pos) { + f8b[3] = buf[pos ]; + f8b[2] = buf[pos + 1]; + f8b[1] = buf[pos + 2]; + f8b[0] = buf[pos + 3]; + return f32[0]; + } + + /* istanbul ignore next */ + exports.readFloatLE = le ? readFloat_f32_cpy : readFloat_f32_rev; + /* istanbul ignore next */ + exports.readFloatBE = le ? readFloat_f32_rev : readFloat_f32_cpy; + + // float: ieee754 + })(); else (function() { + + function writeFloat_ieee754(writeUint, val, buf, pos) { + var sign = val < 0 ? 1 : 0; + if (sign) + val = -val; + if (val === 0) + writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos); + else if (isNaN(val)) + writeUint(2143289344, buf, pos); + else if (val > 3.4028234663852886e+38) // +-Infinity + writeUint((sign << 31 | 2139095040) >>> 0, buf, pos); + else if (val < 1.1754943508222875e-38) // denormal + writeUint((sign << 31 | Math.round(val / 1.401298464324817e-45)) >>> 0, buf, pos); + else { + var exponent = Math.floor(Math.log(val) / Math.LN2), + mantissa = Math.round(val * Math.pow(2, -exponent) * 8388608) & 8388607; + writeUint((sign << 31 | exponent + 127 << 23 | mantissa) >>> 0, buf, pos); + } + } + + exports.writeFloatLE = writeFloat_ieee754.bind(null, writeUintLE); + exports.writeFloatBE = writeFloat_ieee754.bind(null, writeUintBE); + + function readFloat_ieee754(readUint, buf, pos) { + var uint = readUint(buf, pos), + sign = (uint >> 31) * 2 + 1, + exponent = uint >>> 23 & 255, + mantissa = uint & 8388607; + return exponent === 255 + ? mantissa + ? NaN + : sign * Infinity + : exponent === 0 // denormal + ? sign * 1.401298464324817e-45 * mantissa + : sign * Math.pow(2, exponent - 150) * (mantissa + 8388608); + } + + exports.readFloatLE = readFloat_ieee754.bind(null, readUintLE); + exports.readFloatBE = readFloat_ieee754.bind(null, readUintBE); + + })(); + + // double: typed array + if (typeof Float64Array !== "undefined") (function() { + + var f64 = new Float64Array([-0]), + f8b = new Uint8Array(f64.buffer), + le = f8b[7] === 128; + + function writeDouble_f64_cpy(val, buf, pos) { + f64[0] = val; + buf[pos ] = f8b[0]; + buf[pos + 1] = f8b[1]; + buf[pos + 2] = f8b[2]; + buf[pos + 3] = f8b[3]; + buf[pos + 4] = f8b[4]; + buf[pos + 5] = f8b[5]; + buf[pos + 6] = f8b[6]; + buf[pos + 7] = f8b[7]; + } + + function writeDouble_f64_rev(val, buf, pos) { + f64[0] = val; + buf[pos ] = f8b[7]; + buf[pos + 1] = f8b[6]; + buf[pos + 2] = f8b[5]; + buf[pos + 3] = f8b[4]; + buf[pos + 4] = f8b[3]; + buf[pos + 5] = f8b[2]; + buf[pos + 6] = f8b[1]; + buf[pos + 7] = f8b[0]; + } + + /* istanbul ignore next */ + exports.writeDoubleLE = le ? writeDouble_f64_cpy : writeDouble_f64_rev; + /* istanbul ignore next */ + exports.writeDoubleBE = le ? writeDouble_f64_rev : writeDouble_f64_cpy; + + function readDouble_f64_cpy(buf, pos) { + f8b[0] = buf[pos ]; + f8b[1] = buf[pos + 1]; + f8b[2] = buf[pos + 2]; + f8b[3] = buf[pos + 3]; + f8b[4] = buf[pos + 4]; + f8b[5] = buf[pos + 5]; + f8b[6] = buf[pos + 6]; + f8b[7] = buf[pos + 7]; + return f64[0]; + } + + function readDouble_f64_rev(buf, pos) { + f8b[7] = buf[pos ]; + f8b[6] = buf[pos + 1]; + f8b[5] = buf[pos + 2]; + f8b[4] = buf[pos + 3]; + f8b[3] = buf[pos + 4]; + f8b[2] = buf[pos + 5]; + f8b[1] = buf[pos + 6]; + f8b[0] = buf[pos + 7]; + return f64[0]; + } + + /* istanbul ignore next */ + exports.readDoubleLE = le ? readDouble_f64_cpy : readDouble_f64_rev; + /* istanbul ignore next */ + exports.readDoubleBE = le ? readDouble_f64_rev : readDouble_f64_cpy; + + // double: ieee754 + })(); else (function() { + + function writeDouble_ieee754(writeUint, off0, off1, val, buf, pos) { + var sign = val < 0 ? 1 : 0; + if (sign) + val = -val; + if (val === 0) { + writeUint(0, buf, pos + off0); + writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos + off1); + } else if (isNaN(val)) { + writeUint(0, buf, pos + off0); + writeUint(2146959360, buf, pos + off1); + } else if (val > 1.7976931348623157e+308) { // +-Infinity + writeUint(0, buf, pos + off0); + writeUint((sign << 31 | 2146435072) >>> 0, buf, pos + off1); + } else { + var mantissa; + if (val < 2.2250738585072014e-308) { // denormal + mantissa = val / 5e-324; + writeUint(mantissa >>> 0, buf, pos + off0); + writeUint((sign << 31 | mantissa / 4294967296) >>> 0, buf, pos + off1); + } else { + var exponent = Math.floor(Math.log(val) / Math.LN2); + if (exponent === 1024) + exponent = 1023; + mantissa = val * Math.pow(2, -exponent); + writeUint(mantissa * 4503599627370496 >>> 0, buf, pos + off0); + writeUint((sign << 31 | exponent + 1023 << 20 | mantissa * 1048576 & 1048575) >>> 0, buf, pos + off1); + } + } + } + + exports.writeDoubleLE = writeDouble_ieee754.bind(null, writeUintLE, 0, 4); + exports.writeDoubleBE = writeDouble_ieee754.bind(null, writeUintBE, 4, 0); + + function readDouble_ieee754(readUint, off0, off1, buf, pos) { + var lo = readUint(buf, pos + off0), + hi = readUint(buf, pos + off1); + var sign = (hi >> 31) * 2 + 1, + exponent = hi >>> 20 & 2047, + mantissa = 4294967296 * (hi & 1048575) + lo; + return exponent === 2047 + ? mantissa + ? NaN + : sign * Infinity + : exponent === 0 // denormal + ? sign * 5e-324 * mantissa + : sign * Math.pow(2, exponent - 1075) * (mantissa + 4503599627370496); + } + + exports.readDoubleLE = readDouble_ieee754.bind(null, readUintLE, 0, 4); + exports.readDoubleBE = readDouble_ieee754.bind(null, readUintBE, 4, 0); + + })(); + + return exports; +} + +// uint helpers + +function writeUintLE(val, buf, pos) { + buf[pos ] = val & 255; + buf[pos + 1] = val >>> 8 & 255; + buf[pos + 2] = val >>> 16 & 255; + buf[pos + 3] = val >>> 24; +} + +function writeUintBE(val, buf, pos) { + buf[pos ] = val >>> 24; + buf[pos + 1] = val >>> 16 & 255; + buf[pos + 2] = val >>> 8 & 255; + buf[pos + 3] = val & 255; +} + +function readUintLE(buf, pos) { + return (buf[pos ] + | buf[pos + 1] << 8 + | buf[pos + 2] << 16 + | buf[pos + 3] << 24) >>> 0; +} + +function readUintBE(buf, pos) { + return (buf[pos ] << 24 + | buf[pos + 1] << 16 + | buf[pos + 2] << 8 + | buf[pos + 3]) >>> 0; +} + + +/***/ }), + +/***/ 60094: +/***/ ((module) => { + +"use strict"; + +module.exports = inquire; + +/** + * Requires a module only if available. + * @memberof util + * @param {string} moduleName Module to require + * @returns {?Object} Required module if available and not empty, otherwise `null` + */ +function inquire(moduleName) { + try { + var mod = eval("quire".replace(/^/,"re"))(moduleName); // eslint-disable-line no-eval + if (mod && (mod.length || Object.keys(mod).length)) + return mod; + } catch (e) {} // eslint-disable-line no-empty + return null; +} + + +/***/ }), + +/***/ 24761: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * A minimal path module to resolve Unix, Windows and URL paths alike. + * @memberof util + * @namespace + */ +var path = exports; + +var isAbsolute = +/** + * Tests if the specified path is absolute. + * @param {string} path Path to test + * @returns {boolean} `true` if path is absolute + */ +path.isAbsolute = function isAbsolute(path) { + return /^(?:\/|\w+:)/.test(path); +}; + +var normalize = +/** + * Normalizes the specified path. + * @param {string} path Path to normalize + * @returns {string} Normalized path + */ +path.normalize = function normalize(path) { + path = path.replace(/\\/g, "/") + .replace(/\/{2,}/g, "/"); + var parts = path.split("/"), + absolute = isAbsolute(path), + prefix = ""; + if (absolute) + prefix = parts.shift() + "/"; + for (var i = 0; i < parts.length;) { + if (parts[i] === "..") { + if (i > 0 && parts[i - 1] !== "..") + parts.splice(--i, 2); + else if (absolute) + parts.splice(i, 1); + else + ++i; + } else if (parts[i] === ".") + parts.splice(i, 1); + else + ++i; + } + return prefix + parts.join("/"); +}; + +/** + * Resolves the specified include path against the specified origin path. + * @param {string} originPath Path to the origin file + * @param {string} includePath Include path relative to origin path + * @param {boolean} [alreadyNormalized=false] `true` if both paths are already known to be normalized + * @returns {string} Path to the include file + */ +path.resolve = function resolve(originPath, includePath, alreadyNormalized) { + if (!alreadyNormalized) + includePath = normalize(includePath); + if (isAbsolute(includePath)) + return includePath; + if (!alreadyNormalized) + originPath = normalize(originPath); + return (originPath = originPath.replace(/(?:\/|^)[^/]+$/, "")).length ? normalize(originPath + "/" + includePath) : includePath; +}; + + +/***/ }), + +/***/ 47743: +/***/ ((module) => { + +"use strict"; + +module.exports = pool; + +/** + * An allocator as used by {@link util.pool}. + * @typedef PoolAllocator + * @type {function} + * @param {number} size Buffer size + * @returns {Uint8Array} Buffer + */ + +/** + * A slicer as used by {@link util.pool}. + * @typedef PoolSlicer + * @type {function} + * @param {number} start Start offset + * @param {number} end End offset + * @returns {Uint8Array} Buffer slice + * @this {Uint8Array} + */ + +/** + * A general purpose buffer pool. + * @memberof util + * @function + * @param {PoolAllocator} alloc Allocator + * @param {PoolSlicer} slice Slicer + * @param {number} [size=8192] Slab size + * @returns {PoolAllocator} Pooled allocator + */ +function pool(alloc, slice, size) { + var SIZE = size || 8192; + var MAX = SIZE >>> 1; + var slab = null; + var offset = SIZE; + return function pool_alloc(size) { + if (size < 1 || size > MAX) + return alloc(size); + if (offset + size > SIZE) { + slab = alloc(SIZE); + offset = 0; + } + var buf = slice.call(slab, offset, offset += size); + if (offset & 7) // align to 32 bit + offset = (offset | 7) + 1; + return buf; + }; +} + + +/***/ }), + +/***/ 99049: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * A minimal UTF8 implementation for number arrays. + * @memberof util + * @namespace + */ +var utf8 = exports; + +/** + * Calculates the UTF8 byte length of a string. + * @param {string} string String + * @returns {number} Byte length + */ +utf8.length = function utf8_length(string) { + var len = 0, + c = 0; + for (var i = 0; i < string.length; ++i) { + c = string.charCodeAt(i); + if (c < 128) + len += 1; + else if (c < 2048) + len += 2; + else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) { + ++i; + len += 4; + } else + len += 3; + } + return len; +}; + +/** + * Reads UTF8 bytes as a string. + * @param {Uint8Array} buffer Source buffer + * @param {number} start Source start + * @param {number} end Source end + * @returns {string} String read + */ +utf8.read = function utf8_read(buffer, start, end) { + var len = end - start; + if (len < 1) + return ""; + var parts = null, + chunk = [], + i = 0, // char offset + t; // temporary + while (start < end) { + t = buffer[start++]; + if (t < 128) + chunk[i++] = t; + else if (t > 191 && t < 224) + chunk[i++] = (t & 31) << 6 | buffer[start++] & 63; + else if (t > 239 && t < 365) { + t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000; + chunk[i++] = 0xD800 + (t >> 10); + chunk[i++] = 0xDC00 + (t & 1023); + } else + chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63; + if (i > 8191) { + (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk)); + i = 0; + } + } + if (parts) { + if (i) + parts.push(String.fromCharCode.apply(String, chunk.slice(0, i))); + return parts.join(""); + } + return String.fromCharCode.apply(String, chunk.slice(0, i)); +}; + +/** + * Writes a string as UTF8 bytes. + * @param {string} string Source string + * @param {Uint8Array} buffer Destination buffer + * @param {number} offset Destination offset + * @returns {number} Bytes written + */ +utf8.write = function utf8_write(string, buffer, offset) { + var start = offset, + c1, // character 1 + c2; // character 2 + for (var i = 0; i < string.length; ++i) { + c1 = string.charCodeAt(i); + if (c1 < 128) { + buffer[offset++] = c1; + } else if (c1 < 2048) { + buffer[offset++] = c1 >> 6 | 192; + buffer[offset++] = c1 & 63 | 128; + } else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) { + c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF); + ++i; + buffer[offset++] = c1 >> 18 | 240; + buffer[offset++] = c1 >> 12 & 63 | 128; + buffer[offset++] = c1 >> 6 & 63 | 128; + buffer[offset++] = c1 & 63 | 128; + } else { + buffer[offset++] = c1 >> 12 | 224; + buffer[offset++] = c1 >> 6 & 63 | 128; + buffer[offset++] = c1 & 63 | 128; + } + } + return offset - start; +}; + + +/***/ }), + +/***/ 43779: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = exports.DEFAULT_USE_DUALSTACK_ENDPOINT = exports.CONFIG_USE_DUALSTACK_ENDPOINT = exports.ENV_USE_DUALSTACK_ENDPOINT = void 0; +const util_config_provider_1 = __nccwpck_require__(83375); +exports.ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +exports.CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +exports.DEFAULT_USE_DUALSTACK_ENDPOINT = false; +exports.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, util_config_provider_1.booleanSelector)(env, exports.ENV_USE_DUALSTACK_ENDPOINT, util_config_provider_1.SelectorType.ENV), + configFileSelector: (profile) => (0, util_config_provider_1.booleanSelector)(profile, exports.CONFIG_USE_DUALSTACK_ENDPOINT, util_config_provider_1.SelectorType.CONFIG), + default: false, +}; + + +/***/ }), + +/***/ 17994: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = exports.DEFAULT_USE_FIPS_ENDPOINT = exports.CONFIG_USE_FIPS_ENDPOINT = exports.ENV_USE_FIPS_ENDPOINT = void 0; +const util_config_provider_1 = __nccwpck_require__(83375); +exports.ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +exports.CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +exports.DEFAULT_USE_FIPS_ENDPOINT = false; +exports.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, util_config_provider_1.booleanSelector)(env, exports.ENV_USE_FIPS_ENDPOINT, util_config_provider_1.SelectorType.ENV), + configFileSelector: (profile) => (0, util_config_provider_1.booleanSelector)(profile, exports.CONFIG_USE_FIPS_ENDPOINT, util_config_provider_1.SelectorType.CONFIG), + default: false, +}; + + +/***/ }), + +/***/ 18421: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(43779), exports); +tslib_1.__exportStar(__nccwpck_require__(17994), exports); +tslib_1.__exportStar(__nccwpck_require__(37432), exports); +tslib_1.__exportStar(__nccwpck_require__(61892), exports); + + +/***/ }), + +/***/ 37432: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveCustomEndpointsConfig = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const resolveCustomEndpointsConfig = (input) => { + var _a, _b; + const { endpoint, urlParser } = input; + return { + ...input, + tls: (_a = input.tls) !== null && _a !== void 0 ? _a : true, + endpoint: (0, util_middleware_1.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: (0, util_middleware_1.normalizeProvider)((_b = input.useDualstackEndpoint) !== null && _b !== void 0 ? _b : false), + }; +}; +exports.resolveCustomEndpointsConfig = resolveCustomEndpointsConfig; + + +/***/ }), + +/***/ 61892: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveEndpointsConfig = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const getEndpointFromRegion_1 = __nccwpck_require__(48570); +const resolveEndpointsConfig = (input) => { + var _a, _b; + const useDualstackEndpoint = (0, util_middleware_1.normalizeProvider)((_a = input.useDualstackEndpoint) !== null && _a !== void 0 ? _a : false); + const { endpoint, useFipsEndpoint, urlParser } = input; + return { + ...input, + tls: (_b = input.tls) !== null && _b !== void 0 ? _b : true, + endpoint: endpoint + ? (0, util_middleware_1.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) + : () => (0, getEndpointFromRegion_1.getEndpointFromRegion)({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint, + }; +}; +exports.resolveEndpointsConfig = resolveEndpointsConfig; + + +/***/ }), + +/***/ 48570: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointFromRegion = void 0; +const getEndpointFromRegion = async (input) => { + var _a; + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = (_a = (await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint }))) !== null && _a !== void 0 ? _a : {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}; +exports.getEndpointFromRegion = getEndpointFromRegion; + + +/***/ }), + +/***/ 53098: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(18421), exports); +tslib_1.__exportStar(__nccwpck_require__(221), exports); +tslib_1.__exportStar(__nccwpck_require__(86985), exports); + + +/***/ }), + +/***/ 33898: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_REGION_CONFIG_FILE_OPTIONS = exports.NODE_REGION_CONFIG_OPTIONS = exports.REGION_INI_NAME = exports.REGION_ENV_NAME = void 0; +exports.REGION_ENV_NAME = "AWS_REGION"; +exports.REGION_INI_NAME = "region"; +exports.NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[exports.REGION_ENV_NAME], + configFileSelector: (profile) => profile[exports.REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +exports.NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; + + +/***/ }), + +/***/ 49506: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRealRegion = void 0; +const isFipsRegion_1 = __nccwpck_require__(43870); +const getRealRegion = (region) => (0, isFipsRegion_1.isFipsRegion)(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; +exports.getRealRegion = getRealRegion; + + +/***/ }), + +/***/ 221: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(33898), exports); +tslib_1.__exportStar(__nccwpck_require__(87065), exports); + + +/***/ }), + +/***/ 43870: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isFipsRegion = void 0; +const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); +exports.isFipsRegion = isFipsRegion; + + +/***/ }), + +/***/ 87065: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveRegionConfig = void 0; +const getRealRegion_1 = __nccwpck_require__(49506); +const isFipsRegion_1 = __nccwpck_require__(43870); +const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return { + ...input, + region: async () => { + if (typeof region === "string") { + return (0, getRealRegion_1.getRealRegion)(region); + } + const providedRegion = await region(); + return (0, getRealRegion_1.getRealRegion)(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if ((0, isFipsRegion_1.isFipsRegion)(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }; +}; +exports.resolveRegionConfig = resolveRegionConfig; + + +/***/ }), + +/***/ 19814: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 14832: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 99760: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHostnameFromVariants = void 0; +const getHostnameFromVariants = (variants = [], { useFipsEndpoint, useDualstackEndpoint }) => { + var _a; + return (_a = variants.find(({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack"))) === null || _a === void 0 ? void 0 : _a.hostname; +}; +exports.getHostnameFromVariants = getHostnameFromVariants; + + +/***/ }), + +/***/ 77792: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRegionInfo = void 0; +const getHostnameFromVariants_1 = __nccwpck_require__(99760); +const getResolvedHostname_1 = __nccwpck_require__(1487); +const getResolvedPartition_1 = __nccwpck_require__(44441); +const getResolvedSigningRegion_1 = __nccwpck_require__(92281); +const getRegionInfo = (region, { useFipsEndpoint = false, useDualstackEndpoint = false, signingService, regionHash, partitionHash, }) => { + var _a, _b, _c, _d, _e, _f; + const partition = (0, getResolvedPartition_1.getResolvedPartition)(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : (_b = (_a = partitionHash[partition]) === null || _a === void 0 ? void 0 : _a.endpoint) !== null && _b !== void 0 ? _b : region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = (0, getHostnameFromVariants_1.getHostnameFromVariants)((_c = regionHash[resolvedRegion]) === null || _c === void 0 ? void 0 : _c.variants, hostnameOptions); + const partitionHostname = (0, getHostnameFromVariants_1.getHostnameFromVariants)((_d = partitionHash[partition]) === null || _d === void 0 ? void 0 : _d.variants, hostnameOptions); + const hostname = (0, getResolvedHostname_1.getResolvedHostname)(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === undefined) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = (0, getResolvedSigningRegion_1.getResolvedSigningRegion)(hostname, { + signingRegion: (_e = regionHash[resolvedRegion]) === null || _e === void 0 ? void 0 : _e.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint, + }); + return { + partition, + signingService, + hostname, + ...(signingRegion && { signingRegion }), + ...(((_f = regionHash[resolvedRegion]) === null || _f === void 0 ? void 0 : _f.signingService) && { + signingService: regionHash[resolvedRegion].signingService, + }), + }; +}; +exports.getRegionInfo = getRegionInfo; + + +/***/ }), + +/***/ 1487: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getResolvedHostname = void 0; +const getResolvedHostname = (resolvedRegion, { regionHostname, partitionHostname }) => regionHostname + ? regionHostname + : partitionHostname + ? partitionHostname.replace("{region}", resolvedRegion) + : undefined; +exports.getResolvedHostname = getResolvedHostname; + + +/***/ }), + +/***/ 44441: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getResolvedPartition = void 0; +const getResolvedPartition = (region, { partitionHash }) => { var _a; return (_a = Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region))) !== null && _a !== void 0 ? _a : "aws"; }; +exports.getResolvedPartition = getResolvedPartition; + + +/***/ }), + +/***/ 92281: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getResolvedSigningRegion = void 0; +const getResolvedSigningRegion = (hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } + else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}; +exports.getResolvedSigningRegion = getResolvedSigningRegion; + + +/***/ }), + +/***/ 86985: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(19814), exports); +tslib_1.__exportStar(__nccwpck_require__(14832), exports); +tslib_1.__exportStar(__nccwpck_require__(77792), exports); + + +/***/ }), + +/***/ 55829: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DefaultIdentityProviderConfig: () => DefaultIdentityProviderConfig, + EXPIRATION_MS: () => EXPIRATION_MS, + HttpApiKeyAuthSigner: () => HttpApiKeyAuthSigner, + HttpBearerAuthSigner: () => HttpBearerAuthSigner, + NoAuthSigner: () => NoAuthSigner, + RequestBuilder: () => RequestBuilder, + createIsIdentityExpiredFunction: () => createIsIdentityExpiredFunction, + createPaginator: () => createPaginator, + doesIdentityRequireRefresh: () => doesIdentityRequireRefresh, + getHttpAuthSchemeEndpointRuleSetPlugin: () => getHttpAuthSchemeEndpointRuleSetPlugin, + getHttpAuthSchemePlugin: () => getHttpAuthSchemePlugin, + getHttpSigningPlugin: () => getHttpSigningPlugin, + getSmithyContext: () => getSmithyContext3, + httpAuthSchemeEndpointRuleSetMiddlewareOptions: () => httpAuthSchemeEndpointRuleSetMiddlewareOptions, + httpAuthSchemeMiddleware: () => httpAuthSchemeMiddleware, + httpAuthSchemeMiddlewareOptions: () => httpAuthSchemeMiddlewareOptions, + httpSigningMiddleware: () => httpSigningMiddleware, + httpSigningMiddlewareOptions: () => httpSigningMiddlewareOptions, + isIdentityExpired: () => isIdentityExpired, + memoizeIdentityProvider: () => memoizeIdentityProvider, + normalizeProvider: () => normalizeProvider, + requestBuilder: () => requestBuilder +}); +module.exports = __toCommonJS(src_exports); + +// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts +var import_util_middleware = __nccwpck_require__(56266); +function convertHttpAuthSchemesToMap(httpAuthSchemes) { + const map = /* @__PURE__ */ new Map(); + for (const scheme of httpAuthSchemes) { + map.set(scheme.schemeId, scheme); + } + return map; +} +__name(convertHttpAuthSchemesToMap, "convertHttpAuthSchemesToMap"); +var httpAuthSchemeMiddleware = /* @__PURE__ */ __name((config, mwOptions) => (next, context) => async (args) => { + var _a; + const options = config.httpAuthSchemeProvider( + await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input) + ); + const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const failureReasons = []; + for (const option of options) { + const scheme = authSchemes.get(option.schemeId); + if (!scheme) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); + continue; + } + const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); + if (!identityProvider) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); + continue; + } + const { identityProperties = {}, signingProperties = {} } = ((_a = option.propertiesExtractor) == null ? void 0 : _a.call(option, config, context)) || {}; + option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); + option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); + smithyContext.selectedHttpAuthScheme = { + httpAuthOption: option, + identity: await identityProvider(option.identityProperties), + signer: scheme.signer + }; + break; + } + if (!smithyContext.selectedHttpAuthScheme) { + throw new Error(failureReasons.join("\n")); + } + return next(args); +}, "httpAuthSchemeMiddleware"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.ts +var import_middleware_endpoint = __nccwpck_require__(41677); +var httpAuthSchemeEndpointRuleSetMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: import_middleware_endpoint.endpointMiddlewareOptions.name +}; +var getHttpAuthSchemeEndpointRuleSetPlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeEndpointRuleSetMiddlewareOptions + ); + } +}), "getHttpAuthSchemeEndpointRuleSetPlugin"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemePlugin.ts +var import_middleware_serde = __nccwpck_require__(33411); +var httpAuthSchemeMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getHttpAuthSchemePlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeMiddlewareOptions + ); + } +}), "getHttpAuthSchemePlugin"); + +// src/middleware-http-signing/httpSigningMiddleware.ts +var import_protocol_http = __nccwpck_require__(51343); + +var defaultErrorHandler = /* @__PURE__ */ __name((signingProperties) => (error) => { + throw error; +}, "defaultErrorHandler"); +var defaultSuccessHandler = /* @__PURE__ */ __name((httpResponse, signingProperties) => { +}, "defaultSuccessHandler"); +var httpSigningMiddleware = /* @__PURE__ */ __name((config) => (next, context) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { + httpAuthOption: { signingProperties = {} }, + identity, + signer + } = scheme; + const output = await next({ + ...args, + request: await signer.sign(args.request, identity, signingProperties) + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}, "httpSigningMiddleware"); + +// src/middleware-http-signing/getHttpSigningMiddleware.ts +var import_middleware_retry = __nccwpck_require__(5019); +var httpSigningMiddlewareOptions = { + step: "finalizeRequest", + tags: ["HTTP_SIGNING"], + name: "httpSigningMiddleware", + aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], + override: true, + relation: "after", + toMiddleware: import_middleware_retry.retryMiddlewareOptions.name +}; +var getHttpSigningPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions); + } +}), "getHttpSigningPlugin"); + +// src/util-identity-and-auth/DefaultIdentityProviderConfig.ts +var _DefaultIdentityProviderConfig = class _DefaultIdentityProviderConfig { + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config) { + this.authSchemes = /* @__PURE__ */ new Map(); + for (const [key, value] of Object.entries(config)) { + if (value !== void 0) { + this.authSchemes.set(key, value); + } + } + } + getIdentityProvider(schemeId) { + return this.authSchemes.get(schemeId); + } +}; +__name(_DefaultIdentityProviderConfig, "DefaultIdentityProviderConfig"); +var DefaultIdentityProviderConfig = _DefaultIdentityProviderConfig; + +// src/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.ts +var import_types = __nccwpck_require__(19801); +var _HttpApiKeyAuthSigner = class _HttpApiKeyAuthSigner { + async sign(httpRequest, identity, signingProperties) { + if (!signingProperties) { + throw new Error( + "request could not be signed with `apiKey` since the `name` and `in` signer properties are missing" + ); + } + if (!signingProperties.name) { + throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); + } + if (!signingProperties.in) { + throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); + } + if (!identity.apiKey) { + throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); + } + const clonedRequest = httpRequest.clone(); + if (signingProperties.in === import_types.HttpApiKeyAuthLocation.QUERY) { + clonedRequest.query[signingProperties.name] = identity.apiKey; + } else if (signingProperties.in === import_types.HttpApiKeyAuthLocation.HEADER) { + clonedRequest.headers[signingProperties.name] = signingProperties.scheme ? `${signingProperties.scheme} ${identity.apiKey}` : identity.apiKey; + } else { + throw new Error( + "request can only be signed with `apiKey` locations `query` or `header`, but found: `" + signingProperties.in + "`" + ); + } + return clonedRequest; + } +}; +__name(_HttpApiKeyAuthSigner, "HttpApiKeyAuthSigner"); +var HttpApiKeyAuthSigner = _HttpApiKeyAuthSigner; + +// src/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.ts +var _HttpBearerAuthSigner = class _HttpBearerAuthSigner { + async sign(httpRequest, identity, signingProperties) { + const clonedRequest = httpRequest.clone(); + if (!identity.token) { + throw new Error("request could not be signed with `token` since the `token` is not defined"); + } + clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; + return clonedRequest; + } +}; +__name(_HttpBearerAuthSigner, "HttpBearerAuthSigner"); +var HttpBearerAuthSigner = _HttpBearerAuthSigner; + +// src/util-identity-and-auth/httpAuthSchemes/noAuth.ts +var _NoAuthSigner = class _NoAuthSigner { + async sign(httpRequest, identity, signingProperties) { + return httpRequest; + } +}; +__name(_NoAuthSigner, "NoAuthSigner"); +var NoAuthSigner = _NoAuthSigner; + +// src/util-identity-and-auth/memoizeIdentityProvider.ts +var createIsIdentityExpiredFunction = /* @__PURE__ */ __name((expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs, "createIsIdentityExpiredFunction"); +var EXPIRATION_MS = 3e5; +var isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS); +var doesIdentityRequireRefresh = /* @__PURE__ */ __name((identity) => identity.expiration !== void 0, "doesIdentityRequireRefresh"); +var memoizeIdentityProvider = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + if (provider === void 0) { + return void 0; + } + const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async (options) => { + if (!pending) { + pending = normalizedProvider(options); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || (options == null ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(options); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || (options == null ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(options); + } + if (isConstant) { + return resolved; + } + if (!requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(options); + return resolved; + } + return resolved; + }; +}, "memoizeIdentityProvider"); + +// src/getSmithyContext.ts + +var getSmithyContext3 = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); + +// src/protocols/requestBuilder.ts + +var import_smithy_client = __nccwpck_require__(97257); +function requestBuilder(input, context) { + return new RequestBuilder(input, context); +} +__name(requestBuilder, "requestBuilder"); +var _RequestBuilder = class _RequestBuilder { + constructor(input, context) { + this.input = input; + this.context = context; + this.query = {}; + this.method = ""; + this.headers = {}; + this.path = ""; + this.body = null; + this.hostname = ""; + this.resolvePathStack = []; + } + async build() { + const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); + this.path = basePath; + for (const resolvePath of this.resolvePathStack) { + resolvePath(this.path); + } + return new import_protocol_http.HttpRequest({ + protocol, + hostname: this.hostname || hostname, + port, + method: this.method, + path: this.path, + query: this.query, + body: this.body, + headers: this.headers + }); + } + /** + * Brevity setter for "hostname". + */ + hn(hostname) { + this.hostname = hostname; + return this; + } + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel) { + this.resolvePathStack.push((basePath) => { + this.path = `${(basePath == null ? void 0 : basePath.endsWith("/")) ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; + }); + return this; + } + /** + * Brevity incremental builder for "path". + */ + p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { + this.resolvePathStack.push((path) => { + this.path = (0, import_smithy_client.resolvedPath)(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); + }); + return this; + } + /** + * Brevity setter for "headers". + */ + h(headers) { + this.headers = headers; + return this; + } + /** + * Brevity setter for "query". + */ + q(query) { + this.query = query; + return this; + } + /** + * Brevity setter for "body". + */ + b(body) { + this.body = body; + return this; + } + /** + * Brevity setter for "method". + */ + m(method) { + this.method = method; + return this; + } +}; +__name(_RequestBuilder, "RequestBuilder"); +var RequestBuilder = _RequestBuilder; + +// src/pagination/createPaginator.ts +var makePagedClientRequest = /* @__PURE__ */ __name(async (CommandCtor, client, input, ...args) => { + return await client.send(new CommandCtor(input), ...args); +}, "makePagedClientRequest"); +function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) { + return /* @__PURE__ */ __name(async function* paginateOperation(config, input, ...additionalArguments) { + let token = config.startingToken || void 0; + let hasNext = true; + let page; + while (hasNext) { + input[inputTokenName] = token; + if (pageSizeTokenName) { + input[pageSizeTokenName] = input[pageSizeTokenName] ?? config.pageSize; + } + if (config.client instanceof ClientCtor) { + page = await makePagedClientRequest(CommandCtor, config.client, input, ...additionalArguments); + } else { + throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`); + } + yield page; + const prevToken = token; + token = get(page, outputTokenName); + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return void 0; + }, "paginateOperation"); +} +__name(createPaginator, "createPaginator"); +var get = /* @__PURE__ */ __name((fromObject, path) => { + let cursor = fromObject; + const pathComponents = path.split("."); + for (const step of pathComponents) { + if (!cursor || typeof cursor !== "object") { + return void 0; + } + cursor = cursor[step]; + } + return cursor; +}, "get"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 79074: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 86402: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointFromConfig = void 0; +const node_config_provider_1 = __nccwpck_require__(66803); +const getEndpointUrlConfig_1 = __nccwpck_require__(50188); +const getEndpointFromConfig = async (serviceId) => (0, node_config_provider_1.loadConfig)((0, getEndpointUrlConfig_1.getEndpointUrlConfig)(serviceId))(); +exports.getEndpointFromConfig = getEndpointFromConfig; + + +/***/ }), + +/***/ 50188: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointUrlConfig = void 0; +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); +exports.getEndpointUrlConfig = getEndpointUrlConfig; + + +/***/ }), + +/***/ 41677: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + endpointMiddleware: () => endpointMiddleware, + endpointMiddlewareOptions: () => endpointMiddlewareOptions, + getEndpointFromInstructions: () => getEndpointFromInstructions, + getEndpointPlugin: () => getEndpointPlugin, + resolveEndpointConfig: () => resolveEndpointConfig, + resolveParams: () => resolveParams, + toEndpointV1: () => toEndpointV1 +}); +module.exports = __toCommonJS(src_exports); + +// src/service-customizations/s3.ts +var resolveParamsForS3 = /* @__PURE__ */ __name(async (endpointParams) => { + const bucket = (endpointParams == null ? void 0 : endpointParams.Bucket) || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if (isArnBucketName(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } else if (!isDnsCompatibleBucketName(bucket) || bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:") || bucket.toLowerCase() !== bucket || bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}, "resolveParamsForS3"); +var DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +var IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +var DOTS_PATTERN = /\.\./; +var isDnsCompatibleBucketName = /* @__PURE__ */ __name((bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName), "isDnsCompatibleBucketName"); +var isArnBucketName = /* @__PURE__ */ __name((bucketName) => { + const [arn, partition, service, , , bucket] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = Boolean(isArn && partition && service && bucket); + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return isValidArn; +}, "isArnBucketName"); + +// src/adaptors/createConfigValueProvider.ts +var createConfigValueProvider = /* @__PURE__ */ __name((configKey, canonicalEndpointParamKey, config) => { + const configProvider = /* @__PURE__ */ __name(async () => { + const configValue = config[configKey] ?? config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }, "configProvider"); + if (configKey === "credentialScope" || canonicalEndpointParamKey === "CredentialScope") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = (credentials == null ? void 0 : credentials.credentialScope) ?? (credentials == null ? void 0 : credentials.CredentialScope); + return configValue; + }; + } + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}, "createConfigValueProvider"); + +// src/adaptors/getEndpointFromInstructions.ts +var import_getEndpointFromConfig = __nccwpck_require__(86402); + +// src/adaptors/toEndpointV1.ts +var import_url_parser = __nccwpck_require__(38524); +var toEndpointV1 = /* @__PURE__ */ __name((endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return (0, import_url_parser.parseUrl)(endpoint.url); + } + return endpoint; + } + return (0, import_url_parser.parseUrl)(endpoint); +}, "toEndpointV1"); + +// src/adaptors/getEndpointFromInstructions.ts +var getEndpointFromInstructions = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + const endpointFromConfig = await (0, import_getEndpointFromConfig.getEndpointFromConfig)(clientConfig.serviceId || ""); + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve(toEndpointV1(endpointFromConfig)); + } + } + const endpointParams = await resolveParams(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}, "getEndpointFromInstructions"); +var resolveParams = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig) => { + var _a; + const endpointParams = {}; + const instructions = ((_a = instructionsSupplier == null ? void 0 : instructionsSupplier.getEndpointParameterInstructions) == null ? void 0 : _a.call(instructionsSupplier)) || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await createConfigValueProvider(instruction.name, name, clientConfig)(); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await resolveParamsForS3(endpointParams); + } + return endpointParams; +}, "resolveParams"); + +// src/endpointMiddleware.ts +var import_util_middleware = __nccwpck_require__(56266); +var endpointMiddleware = /* @__PURE__ */ __name(({ + config, + instructions +}) => { + return (next, context) => async (args) => { + var _a, _b, _c; + const endpoint = await getEndpointFromInstructions( + args.input, + { + getEndpointParameterInstructions() { + return instructions; + } + }, + { ...config }, + context + ); + context.endpointV2 = endpoint; + context.authSchemes = (_a = endpoint.properties) == null ? void 0 : _a.authSchemes; + const authScheme = (_b = context.authSchemes) == null ? void 0 : _b[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const httpAuthOption = (_c = smithyContext == null ? void 0 : smithyContext.selectedHttpAuthScheme) == null ? void 0 : _c.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign( + httpAuthOption.signingProperties || {}, + { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet + }, + authScheme.properties + ); + } + } + return next({ + ...args + }); + }; +}, "endpointMiddleware"); + +// src/getEndpointPlugin.ts +var import_middleware_serde = __nccwpck_require__(33411); +var endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getEndpointPlugin = /* @__PURE__ */ __name((config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + endpointMiddleware({ + config, + instructions + }), + endpointMiddlewareOptions + ); + } +}), "getEndpointPlugin"); + +// src/resolveEndpointConfig.ts + +var resolveEndpointConfig = /* @__PURE__ */ __name((input) => { + const tls = input.tls ?? true; + const { endpoint } = input; + const customEndpointProvider = endpoint != null ? async () => toEndpointV1(await (0, import_util_middleware.normalizeProvider)(endpoint)()) : void 0; + const isCustomEndpoint = !!endpoint; + return { + ...input, + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(input.useDualstackEndpoint ?? false), + useFipsEndpoint: (0, import_util_middleware.normalizeProvider)(input.useFipsEndpoint ?? false) + }; +}, "resolveEndpointConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 5019: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + CONFIG_MAX_ATTEMPTS: () => CONFIG_MAX_ATTEMPTS, + CONFIG_RETRY_MODE: () => CONFIG_RETRY_MODE, + ENV_MAX_ATTEMPTS: () => ENV_MAX_ATTEMPTS, + ENV_RETRY_MODE: () => ENV_RETRY_MODE, + NODE_MAX_ATTEMPT_CONFIG_OPTIONS: () => NODE_MAX_ATTEMPT_CONFIG_OPTIONS, + NODE_RETRY_MODE_CONFIG_OPTIONS: () => NODE_RETRY_MODE_CONFIG_OPTIONS, + StandardRetryStrategy: () => StandardRetryStrategy, + defaultDelayDecider: () => defaultDelayDecider, + defaultRetryDecider: () => defaultRetryDecider, + getOmitRetryHeadersPlugin: () => getOmitRetryHeadersPlugin, + getRetryAfterHint: () => getRetryAfterHint, + getRetryPlugin: () => getRetryPlugin, + omitRetryHeadersMiddleware: () => omitRetryHeadersMiddleware, + omitRetryHeadersMiddlewareOptions: () => omitRetryHeadersMiddlewareOptions, + resolveRetryConfig: () => resolveRetryConfig, + retryMiddleware: () => retryMiddleware, + retryMiddlewareOptions: () => retryMiddlewareOptions +}); +module.exports = __toCommonJS(src_exports); + +// src/AdaptiveRetryStrategy.ts + + +// src/StandardRetryStrategy.ts +var import_protocol_http = __nccwpck_require__(51343); + + +var import_uuid = __nccwpck_require__(75840); + +// src/defaultRetryQuota.ts +var import_util_retry = __nccwpck_require__(79154); +var getDefaultRetryQuota = /* @__PURE__ */ __name((initialRetryTokens, options) => { + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = (options == null ? void 0 : options.noRetryIncrement) ?? import_util_retry.NO_RETRY_INCREMENT; + const retryCost = (options == null ? void 0 : options.retryCost) ?? import_util_retry.RETRY_COST; + const timeoutRetryCost = (options == null ? void 0 : options.timeoutRetryCost) ?? import_util_retry.TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = /* @__PURE__ */ __name((error) => error.name === "TimeoutError" ? timeoutRetryCost : retryCost, "getCapacityAmount"); + const hasRetryTokens = /* @__PURE__ */ __name((error) => getCapacityAmount(error) <= availableCapacity, "hasRetryTokens"); + const retrieveRetryTokens = /* @__PURE__ */ __name((error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }, "retrieveRetryTokens"); + const releaseRetryTokens = /* @__PURE__ */ __name((capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount ?? noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }, "releaseRetryTokens"); + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens + }); +}, "getDefaultRetryQuota"); + +// src/delayDecider.ts + +var defaultDelayDecider = /* @__PURE__ */ __name((delayBase, attempts) => Math.floor(Math.min(import_util_retry.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)), "defaultDelayDecider"); + +// src/retryDecider.ts +var import_service_error_classification = __nccwpck_require__(6375); +var defaultRetryDecider = /* @__PURE__ */ __name((error) => { + if (!error) { + return false; + } + return (0, import_service_error_classification.isRetryableByTrait)(error) || (0, import_service_error_classification.isClockSkewError)(error) || (0, import_service_error_classification.isThrottlingError)(error) || (0, import_service_error_classification.isTransientError)(error); +}, "defaultRetryDecider"); + +// src/util.ts +var asSdkError = /* @__PURE__ */ __name((error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}, "asSdkError"); + +// src/StandardRetryStrategy.ts +var _StandardRetryStrategy = class _StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = import_util_retry.RETRY_MODES.STANDARD; + this.retryDecider = (options == null ? void 0 : options.retryDecider) ?? defaultRetryDecider; + this.delayDecider = (options == null ? void 0 : options.delayDecider) ?? defaultDelayDecider; + this.retryQuota = (options == null ? void 0 : options.retryQuota) ?? getDefaultRetryQuota(import_util_retry.INITIAL_RETRY_TOKENS); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } catch (error) { + maxAttempts = import_util_retry.DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options == null ? void 0 : options.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options == null ? void 0 : options.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } catch (e) { + const err = asSdkError(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider( + (0, import_service_error_classification.isThrottlingError)(err) ? import_util_retry.THROTTLING_RETRY_DELAY_BASE : import_util_retry.DEFAULT_RETRY_DELAY_BASE, + attempts + ); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +}; +__name(_StandardRetryStrategy, "StandardRetryStrategy"); +var StandardRetryStrategy = _StandardRetryStrategy; +var getDelayFromRetryAfterHeader = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1e3; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}, "getDelayFromRetryAfterHeader"); + +// src/AdaptiveRetryStrategy.ts +var _AdaptiveRetryStrategy = class _AdaptiveRetryStrategy extends StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options ?? {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter ?? new import_util_retry.DefaultRateLimiter(); + this.mode = import_util_retry.RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + } + }); + } +}; +__name(_AdaptiveRetryStrategy, "AdaptiveRetryStrategy"); +var AdaptiveRetryStrategy = _AdaptiveRetryStrategy; + +// src/configurations.ts +var import_util_middleware = __nccwpck_require__(56266); + +var ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +var CONFIG_MAX_ATTEMPTS = "max_attempts"; +var NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; }, - meta: { - get: ["GET /meta"], - getAllVersions: ["GET /versions"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"] + configFileSelector: (profile) => { + const value = profile[CONFIG_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; }, - migrations: { - cancelImport: [ - "DELETE /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + default: import_util_retry.DEFAULT_MAX_ATTEMPTS +}; +var resolveRetryConfig = /* @__PURE__ */ __name((input) => { + const { retryStrategy } = input; + const maxAttempts = (0, import_util_middleware.normalizeProvider)(input.maxAttempts ?? import_util_retry.DEFAULT_MAX_ATTEMPTS); + return { + ...input, + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await (0, import_util_middleware.normalizeProvider)(input.retryMode)(); + if (retryMode === import_util_retry.RETRY_MODES.ADAPTIVE) { + return new import_util_retry.AdaptiveRetryStrategy(maxAttempts); + } + return new import_util_retry.StandardRetryStrategy(maxAttempts); + } + }; +}, "resolveRetryConfig"); +var ENV_RETRY_MODE = "AWS_RETRY_MODE"; +var CONFIG_RETRY_MODE = "retry_mode"; +var NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_RETRY_MODE], + configFileSelector: (profile) => profile[CONFIG_RETRY_MODE], + default: import_util_retry.DEFAULT_RETRY_MODE +}; + +// src/omitRetryHeadersMiddleware.ts + + +var omitRetryHeadersMiddleware = /* @__PURE__ */ __name(() => (next) => async (args) => { + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + delete request.headers[import_util_retry.INVOCATION_ID_HEADER]; + delete request.headers[import_util_retry.REQUEST_HEADER]; + } + return next(args); +}, "omitRetryHeadersMiddleware"); +var omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true +}; +var getOmitRetryHeadersPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(omitRetryHeadersMiddleware(), omitRetryHeadersMiddlewareOptions); + } +}), "getOmitRetryHeadersPlugin"); + +// src/retryMiddleware.ts + + +var import_smithy_client = __nccwpck_require__(97257); + + +var import_isStreamingPayload = __nccwpck_require__(10643); +var retryMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + var _a; + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = import_protocol_http.HttpRequest.isInstance(request); + if (isRequest) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (isRequest) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = asSdkError(e); + if (isRequest && (0, import_isStreamingPayload.isStreamingPayload)(request)) { + (_a = context.logger instanceof import_smithy_client.NoOpLogger ? console : context.logger) == null ? void 0 : _a.warn( + "An error was encountered in a non-retryable streaming request." + ); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } else { + retryStrategy = retryStrategy; + if (retryStrategy == null ? void 0 : retryStrategy.mode) + context.userAgent = [...context.userAgent || [], ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}, "retryMiddleware"); +var isRetryStrategyV2 = /* @__PURE__ */ __name((retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && typeof retryStrategy.recordSuccess !== "undefined", "isRetryStrategyV2"); +var getRetryErrorInfo = /* @__PURE__ */ __name((error) => { + const errorInfo = { + error, + errorType: getRetryErrorType(error) + }; + const retryAfterHint = getRetryAfterHint(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}, "getRetryErrorInfo"); +var getRetryErrorType = /* @__PURE__ */ __name((error) => { + if ((0, import_service_error_classification.isThrottlingError)(error)) + return "THROTTLING"; + if ((0, import_service_error_classification.isTransientError)(error)) + return "TRANSIENT"; + if ((0, import_service_error_classification.isServerError)(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}, "getRetryErrorType"); +var retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true +}; +var getRetryPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(retryMiddleware(options), retryMiddlewareOptions); + } +}), "getRetryPlugin"); +var getRetryAfterHint = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1e3); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}, "getRetryAfterHint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 10643: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isStreamingPayload = void 0; +const stream_1 = __nccwpck_require__(12781); +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof stream_1.Readable || + (typeof ReadableStream !== "undefined" && (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream); +exports.isStreamingPayload = isStreamingPayload; + + +/***/ }), + +/***/ 33411: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + deserializerMiddleware: () => deserializerMiddleware, + deserializerMiddlewareOption: () => deserializerMiddlewareOption, + getSerdePlugin: () => getSerdePlugin, + serializerMiddleware: () => serializerMiddleware, + serializerMiddlewareOption: () => serializerMiddlewareOption +}); +module.exports = __toCommonJS(src_exports); + +// src/deserializerMiddleware.ts +var deserializerMiddleware = /* @__PURE__ */ __name((options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed + }; + } catch (error) { + Object.defineProperty(error, "$response", { + value: response + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + error.message += "\n " + hint; + if (typeof error.$responseBodyText !== "undefined") { + if (error.$response) { + error.$response.body = error.$responseBodyText; + } + } + } + throw error; + } +}, "deserializerMiddleware"); + +// src/serializerMiddleware.ts +var serializerMiddleware = /* @__PURE__ */ __name((options, serializer) => (next, context) => async (args) => { + var _a; + const endpoint = ((_a = context.endpointV2) == null ? void 0 : _a.url) && options.urlParser ? async () => options.urlParser(context.endpointV2.url) : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request + }); +}, "serializerMiddleware"); + +// src/serdePlugin.ts +var deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true +}; +var serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true +}; +function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add(deserializerMiddleware(config, deserializer), deserializerMiddlewareOption); + commandStack.add(serializerMiddleware(config, serializer), serializerMiddlewareOption); + } + }; +} +__name(getSerdePlugin, "getSerdePlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 54317: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + constructStack: () => constructStack +}); +module.exports = __toCommonJS(src_exports); + +// src/MiddlewareStack.ts +var getAllAliases = /* @__PURE__ */ __name((name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}, "getAllAliases"); +var getMiddlewareNameWithAliases = /* @__PURE__ */ __name((name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}, "getMiddlewareNameWithAliases"); +var constructStack = /* @__PURE__ */ __name(() => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = /* @__PURE__ */ new Set(); + const sort = /* @__PURE__ */ __name((entries) => entries.sort( + (a, b) => stepWeights[b.step] - stepWeights[a.step] || priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"] + ), "sort"); + const removeByName = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByName"); + const removeByReference = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByReference"); + const cloneTo = /* @__PURE__ */ __name((toStack) => { + var _a; + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + (_a = toStack.identifyOnResolve) == null ? void 0 : _a.call(toStack, stack.identifyOnResolve()); + return toStack; + }, "cloneTo"); + const expandRelativeMiddlewareList = /* @__PURE__ */ __name((from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }, "expandRelativeMiddlewareList"); + const getMiddlewareList = /* @__PURE__ */ __name((debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === void 0) { + if (debug) { + return; + } + throw new Error( + `${entry.toMiddleware} is not found when adding ${getMiddlewareNameWithAliases(entry.name, entry.aliases)} middleware ${entry.relation} ${entry.toMiddleware}` + ); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries).map(expandRelativeMiddlewareList).reduce((wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, []); + return mainChain; + }, "getMiddlewareList"); + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex( + (entry2) => { + var _a; + return entry2.name === alias || ((_a = entry2.aliases) == null ? void 0 : _a.some((a) => a === alias)); + } + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ${toOverride.priority} priority in ${toOverride.step} step cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ${entry.priority} priority in ${entry.step} step.` + ); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex( + (entry2) => { + var _a; + return entry2.name === alias || ((_a = entry2.aliases) == null ? void 0 : _a.some((a) => a === alias)); + } + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} "${entry.toMiddleware}" middleware.` + ); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo(constructStack()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + var _a; + const cloned = cloneTo(constructStack()); + cloned.use(from); + cloned.identifyOnResolve( + identifyOnResolve || cloned.identifyOnResolve() || (((_a = from.identifyOnResolve) == null ? void 0 : _a.call(from)) ?? false) + ); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + const step = mw.step ?? mw.relation + " " + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList().map((entry) => entry.middleware).reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + } + }; + return stack; +}, "constructStack"); +var stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1 +}; +var priorityWeights = { + high: 3, + normal: 2, + low: 1 +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 66803: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + loadConfig: () => loadConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/configLoader.ts + + +// src/fromEnv.ts +var import_property_provider = __nccwpck_require__(79721); +var fromEnv = /* @__PURE__ */ __name((envVarSelector) => async () => { + try { + const config = envVarSelector(process.env); + if (config === void 0) { + throw new Error(); + } + return config; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Cannot load config from environment variables with getter: ${envVarSelector}` + ); + } +}, "fromEnv"); + +// src/fromSharedConfigFiles.ts + +var import_shared_ini_file_loader = __nccwpck_require__(43507); +var fromSharedConfigFiles = /* @__PURE__ */ __name((configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = (0, import_shared_ini_file_loader.getProfileName)(init); + const { configFile, credentialsFile } = await (0, import_shared_ini_file_loader.loadSharedConfigFiles)(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" ? { ...profileFromCredentials, ...profileFromConfig } : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === void 0) { + throw new Error(); + } + return configValue; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Cannot load config for profile ${profile} in SDK configuration files with getter: ${configSelector}` + ); + } +}, "fromSharedConfigFiles"); + +// src/fromStatic.ts + +var isFunction = /* @__PURE__ */ __name((func) => typeof func === "function", "isFunction"); +var fromStatic = /* @__PURE__ */ __name((defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, import_property_provider.fromStatic)(defaultValue), "fromStatic"); + +// src/configLoader.ts +var loadConfig = /* @__PURE__ */ __name(({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + fromEnv(environmentVariableSelector), + fromSharedConfigFiles(configFileSelector, configuration), + fromStatic(defaultValue) + ) +), "loadConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 42012: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_REQUEST_TIMEOUT: () => DEFAULT_REQUEST_TIMEOUT, + NodeHttp2Handler: () => NodeHttp2Handler, + NodeHttpHandler: () => NodeHttpHandler, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/node-http-handler.ts +var import_protocol_http = __nccwpck_require__(51343); +var import_querystring_builder = __nccwpck_require__(68031); +var import_http = __nccwpck_require__(13685); +var import_https = __nccwpck_require__(95687); + +// src/constants.ts +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; + +// src/get-transformed-headers.ts +var getTransformedHeaders = /* @__PURE__ */ __name((headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}, "getTransformedHeaders"); + +// src/set-connection-timeout.ts +var setConnectionTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return; + } + const timeoutId = setTimeout(() => { + request.destroy(); + reject( + Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError" + }) + ); + }, timeoutInMs); + request.on("socket", (socket) => { + if (socket.connecting) { + socket.on("connect", () => { + clearTimeout(timeoutId); + }); + } else { + clearTimeout(timeoutId); + } + }); +}, "setConnectionTimeout"); + +// src/set-socket-keep-alive.ts +var setSocketKeepAlive = /* @__PURE__ */ __name((request, { keepAlive, keepAliveMsecs }) => { + if (keepAlive !== true) { + return; + } + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); +}, "setSocketKeepAlive"); + +// src/set-socket-timeout.ts +var setSocketTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = 0) => { + request.setTimeout(timeoutInMs, () => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }); +}, "setSocketTimeout"); + +// src/write-request-body.ts +var import_stream = __nccwpck_require__(12781); +var MIN_WAIT_TIME = 1e3; +async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + const headers = request.headers ?? {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let hasError = false; + if (expect === "100-continue") { + await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(setTimeout(resolve, Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + clearTimeout(timeoutId); + resolve(); + }); + httpRequest.on("error", () => { + hasError = true; + clearTimeout(timeoutId); + resolve(); + }); + }) + ]); + } + if (!hasError) { + writeBody(httpRequest, request.body); + } +} +__name(writeRequestBody, "writeRequestBody"); +function writeBody(httpRequest, body) { + if (body instanceof import_stream.Readable) { + body.pipe(httpRequest); + return; + } + if (body) { + if (Buffer.isBuffer(body) || typeof body === "string") { + httpRequest.end(body); + return; + } + const uint8 = body; + if (typeof uint8 === "object" && uint8.buffer && typeof uint8.byteOffset === "number" && typeof uint8.byteLength === "number") { + httpRequest.end(Buffer.from(uint8.buffer, uint8.byteOffset, uint8.byteLength)); + return; + } + httpRequest.end(Buffer.from(body)); + return; + } + httpRequest.end(); +} +__name(writeBody, "writeBody"); + +// src/node-http-handler.ts +var DEFAULT_REQUEST_TIMEOUT = 0; +var _NodeHttpHandler = class _NodeHttpHandler { + constructor(options) { + this.socketWarningTimestamp = 0; + // Node http handler is hard-coded to http/1.1: https://github.com/nodejs/node/blob/ff5664b83b89c55e4ab5d5f60068fb457f1f5872/lib/_http_server.js#L286 + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }).catch(reject); + } else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof (instanceOrOptions == null ? void 0 : instanceOrOptions.handle) === "function") { + return instanceOrOptions; + } + return new _NodeHttpHandler(instanceOrOptions); + } + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent, socketWarningTimestamp) { + var _a, _b; + const { sockets, requests, maxSockets } = agent; + if (typeof maxSockets !== "number" || maxSockets === Infinity) { + return socketWarningTimestamp; + } + const interval = 15e3; + if (Date.now() - interval < socketWarningTimestamp) { + return socketWarningTimestamp; + } + if (sockets && requests) { + for (const origin in sockets) { + const socketsInUse = ((_a = sockets[origin]) == null ? void 0 : _a.length) ?? 0; + const requestsEnqueued = ((_b = requests[origin]) == null ? void 0 : _b.length) ?? 0; + if (socketsInUse >= maxSockets && requestsEnqueued >= 2 * maxSockets) { + console.warn( + "@smithy/node-http-handler:WARN", + `socket usage at capacity=${socketsInUse} and ${requestsEnqueued} additional requests are enqueued.`, + "See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-configuring-maxsockets.html", + "or increase socketAcquisitionWarningTimeout=(millis) in the NodeHttpHandler config." + ); + return Date.now(); + } + } + } + return socketWarningTimestamp; + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout ?? socketTimeout, + httpAgent: (() => { + if (httpAgent instanceof import_http.Agent || typeof (httpAgent == null ? void 0 : httpAgent.destroy) === "function") { + return httpAgent; + } + return new import_http.Agent({ keepAlive, maxSockets, ...httpAgent }); + })(), + httpsAgent: (() => { + if (httpsAgent instanceof import_https.Agent || typeof (httpsAgent == null ? void 0 : httpsAgent.destroy) === "function") { + return httpsAgent; + } + return new import_https.Agent({ keepAlive, maxSockets, ...httpsAgent }); + })() + }; + } + destroy() { + var _a, _b, _c, _d; + (_b = (_a = this.config) == null ? void 0 : _a.httpAgent) == null ? void 0 : _b.destroy(); + (_d = (_c = this.config) == null ? void 0 : _c.httpsAgent) == null ? void 0 : _d.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + let socketCheckTimeoutId; + return new Promise((_resolve, _reject) => { + let writeRequestBodyPromise = void 0; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + clearTimeout(socketCheckTimeoutId); + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }, "reject"); + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal == null ? void 0 : abortSignal.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const agent = isSSL ? this.config.httpsAgent : this.config.httpAgent; + socketCheckTimeoutId = setTimeout(() => { + this.socketWarningTimestamp = _NodeHttpHandler.checkSocketUsage(agent, this.socketWarningTimestamp); + }, this.config.socketAcquisitionWarningTimeout ?? (this.config.requestTimeout ?? 2e3) + (this.config.connectionTimeout ?? 1e3)); + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + let auth = void 0; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const nodeHttpsOptions = { + headers: request.headers, + host: request.hostname, + method: request.method, + path, + port: request.port, + agent, + auth + }; + const requestFunc = isSSL ? import_https.request : import_http.request; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: getTransformedHeaders(res.headers), + body: res + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } else { + reject(err); + } + }); + setConnectionTimeout(req, reject, this.config.connectionTimeout); + setSocketTimeout(req, reject, this.config.requestTimeout); + if (abortSignal) { + abortSignal.onabort = () => { + req.abort(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + } + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + setSocketKeepAlive(req, { + // @ts-expect-error keepAlive is not public on httpAgent. + keepAlive: httpAgent.keepAlive, + // @ts-expect-error keepAliveMsecs is not public on httpAgent. + keepAliveMsecs: httpAgent.keepAliveMsecs + }); + } + writeRequestBodyPromise = writeRequestBody(req, request, this.config.requestTimeout).catch(_reject); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; +__name(_NodeHttpHandler, "NodeHttpHandler"); +var NodeHttpHandler = _NodeHttpHandler; + +// src/node-http2-handler.ts + + +var import_http22 = __nccwpck_require__(85158); + +// src/node-http2-connection-manager.ts +var import_http2 = __toESM(__nccwpck_require__(85158)); + +// src/node-http2-connection-pool.ts +var _NodeHttp2ConnectionPool = class _NodeHttp2ConnectionPool { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions ?? []; + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +}; +__name(_NodeHttp2ConnectionPool, "NodeHttp2ConnectionPool"); +var NodeHttp2ConnectionPool = _NodeHttp2ConnectionPool; + +// src/node-http2-connection-manager.ts +var _NodeHttp2ConnectionManager = class _NodeHttp2ConnectionManager { + constructor(config) { + this.sessionCache = /* @__PURE__ */ new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = import_http2.default.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error( + "Fail to set maxConcurrentStreams to " + this.config.maxConcurrency + "when creating new session for " + requestContext.destination.toString() + ); + } + }); + } + session.unref(); + const destroySessionCb = /* @__PURE__ */ __name(() => { + session.destroy(); + this.deleteSession(url, session); + }, "destroySessionCb"); + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + var _a; + const cacheKey = this.getUrlString(requestContext); + (_a = this.sessionCache.get(cacheKey)) == null ? void 0 : _a.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +}; +__name(_NodeHttp2ConnectionManager, "NodeHttp2ConnectionManager"); +var NodeHttp2ConnectionManager = _NodeHttp2ConnectionManager; + +// src/node-http2-handler.ts +var _NodeHttp2Handler = class _NodeHttp2Handler { + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((opts) => { + resolve(opts || {}); + }).catch(reject); + } else { + resolve(options || {}); + } + }); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof (instanceOrOptions == null ? void 0 : instanceOrOptions.handle) === "function") { + return instanceOrOptions; + } + return new _NodeHttp2Handler(instanceOrOptions); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + var _a; + let fulfilled = false; + let writeRequestBodyPromise = void 0; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }, "reject"); + if (abortSignal == null ? void 0 : abortSignal.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: (_a = this.config) == null ? void 0 : _a.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false + }); + const rejectWithDestroy = /* @__PURE__ */ __name((err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }, "rejectWithDestroy"); + const queryString = (0, import_querystring_builder.buildQueryString)(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [import_http22.constants.HTTP2_HEADER_PATH]: path, + [import_http22.constants.HTTP2_HEADER_METHOD]: method + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: headers[":status"] || -1, + headers: getTransformedHeaders(headers), + body: req + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + abortSignal.onabort = () => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }; + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy( + new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`) + ); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = writeRequestBody(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } + /** + * Destroys a session. + * @param session The session to destroy. + */ + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +}; +__name(_NodeHttp2Handler, "NodeHttp2Handler"); +var NodeHttp2Handler = _NodeHttp2Handler; + +// src/stream-collector/collector.ts + +var _Collector = class _Collector extends import_stream.Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +}; +__name(_Collector, "Collector"); +var Collector = _Collector; + +// src/stream-collector/index.ts +var streamCollector = /* @__PURE__ */ __name((stream) => new Promise((resolve, reject) => { + const collector = new Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function() { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); +}), "streamCollector"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 51343: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Field: () => Field, + Fields: () => Fields, + HttpRequest: () => HttpRequest, + HttpResponse: () => HttpResponse, + getHttpHandlerExtensionConfiguration: () => getHttpHandlerExtensionConfiguration, + isValidHostname: () => isValidHostname, + resolveHttpHandlerRuntimeConfig: () => resolveHttpHandlerRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/extensions/httpExtensionConfiguration.ts +var getHttpHandlerExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + let httpHandler = runtimeConfig.httpHandler; + return { + setHttpHandler(handler) { + httpHandler = handler; + }, + httpHandler() { + return httpHandler; + }, + updateHttpClientConfig(key, value) { + httpHandler.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return httpHandler.httpHandlerConfigs(); + } + }; +}, "getHttpHandlerExtensionConfiguration"); +var resolveHttpHandlerRuntimeConfig = /* @__PURE__ */ __name((httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler() + }; +}, "resolveHttpHandlerRuntimeConfig"); + +// src/Field.ts +var import_types = __nccwpck_require__(19801); +var _Field = class _Field { + constructor({ name, kind = import_types.FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value) { + this.values.push(value); + } + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values) { + this.values = values; + } + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString() { + return this.values.map((v) => v.includes(",") || v.includes(" ") ? `"${v}"` : v).join(", "); + } + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get() { + return this.values; + } +}; +__name(_Field, "Field"); +var Field = _Field; + +// src/Fields.ts +var _Fields = class _Fields { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name) { + return this.entries[name.toLowerCase()]; + } + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +}; +__name(_Fields, "Fields"); +var Fields = _Fields; + +// src/httpRequest.ts +var _HttpRequest = class _HttpRequest { + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol ? options.protocol.slice(-1) !== ":" ? `${options.protocol}:` : options.protocol : "https:"; + this.path = options.path ? options.path.charAt(0) !== "/" ? `/${options.path}` : options.path : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + static isInstance(request) { + if (!request) + return false; + const req = request; + return "method" in req && "protocol" in req && "hostname" in req && "path" in req && typeof req["query"] === "object" && typeof req["headers"] === "object"; + } + clone() { + const cloned = new _HttpRequest({ + ...this, + headers: { ...this.headers } + }); + if (cloned.query) + cloned.query = cloneQuery(cloned.query); + return cloned; + } +}; +__name(_HttpRequest, "HttpRequest"); +var HttpRequest = _HttpRequest; +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param + }; + }, {}); +} +__name(cloneQuery, "cloneQuery"); + +// src/httpResponse.ts +var _HttpResponse = class _HttpResponse { + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +}; +__name(_HttpResponse, "HttpResponse"); +var HttpResponse = _HttpResponse; + +// src/isValidHostname.ts +function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} +__name(isValidHostname, "isValidHostname"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 97257: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Client: () => Client, + Command: () => Command, + LazyJsonString: () => LazyJsonString, + NoOpLogger: () => NoOpLogger, + SENSITIVE_STRING: () => SENSITIVE_STRING, + ServiceException: () => ServiceException, + StringWrapper: () => StringWrapper, + _json: () => _json, + collectBody: () => collectBody, + convertMap: () => convertMap, + createAggregatedClient: () => createAggregatedClient, + dateToUtcString: () => dateToUtcString, + decorateServiceException: () => decorateServiceException, + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + expectBoolean: () => expectBoolean, + expectByte: () => expectByte, + expectFloat32: () => expectFloat32, + expectInt: () => expectInt, + expectInt32: () => expectInt32, + expectLong: () => expectLong, + expectNonNull: () => expectNonNull, + expectNumber: () => expectNumber, + expectObject: () => expectObject, + expectShort: () => expectShort, + expectString: () => expectString, + expectUnion: () => expectUnion, + extendedEncodeURIComponent: () => extendedEncodeURIComponent, + getArrayIfSingleItem: () => getArrayIfSingleItem, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + getDefaultExtensionConfiguration: () => getDefaultExtensionConfiguration, + getValueFromTextNode: () => getValueFromTextNode, + handleFloat: () => handleFloat, + limitedParseDouble: () => limitedParseDouble, + limitedParseFloat: () => limitedParseFloat, + limitedParseFloat32: () => limitedParseFloat32, + loadConfigsForDefaultMode: () => loadConfigsForDefaultMode, + logger: () => logger, + map: () => map, + parseBoolean: () => parseBoolean, + parseEpochTimestamp: () => parseEpochTimestamp, + parseRfc3339DateTime: () => parseRfc3339DateTime, + parseRfc3339DateTimeWithOffset: () => parseRfc3339DateTimeWithOffset, + parseRfc7231DateTime: () => parseRfc7231DateTime, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig, + resolvedPath: () => resolvedPath, + serializeFloat: () => serializeFloat, + splitEvery: () => splitEvery, + strictParseByte: () => strictParseByte, + strictParseDouble: () => strictParseDouble, + strictParseFloat: () => strictParseFloat, + strictParseFloat32: () => strictParseFloat32, + strictParseInt: () => strictParseInt, + strictParseInt32: () => strictParseInt32, + strictParseLong: () => strictParseLong, + strictParseShort: () => strictParseShort, + take: () => take, + throwDefaultError: () => throwDefaultError, + withBaseException: () => withBaseException +}); +module.exports = __toCommonJS(src_exports); + +// src/NoOpLogger.ts +var _NoOpLogger = class _NoOpLogger { + trace() { + } + debug() { + } + info() { + } + warn() { + } + error() { + } +}; +__name(_NoOpLogger, "NoOpLogger"); +var NoOpLogger = _NoOpLogger; + +// src/client.ts +var import_middleware_stack = __nccwpck_require__(54317); +var _Client = class _Client { + constructor(config) { + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + this.config = config; + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : void 0; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + if (callback) { + handler(command).then( + (result) => callback(null, result.output), + (err) => callback(err) + ).catch( + // prevent any errors thrown in the callback from triggering an + // unhandled promise rejection + () => { + } + ); + } else { + return handler(command).then((result) => result.output); + } + } + destroy() { + if (this.config.requestHandler.destroy) + this.config.requestHandler.destroy(); + } +}; +__name(_Client, "Client"); +var Client = _Client; + +// src/collect-stream-body.ts +var import_util_stream = __nccwpck_require__(21604); +var collectBody = /* @__PURE__ */ __name(async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return import_util_stream.Uint8ArrayBlobAdapter.mutate(await fromContext); +}, "collectBody"); + +// src/command.ts + +var import_types = __nccwpck_require__(19801); +var _Command = class _Command { + constructor() { + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder() { + return new ClassBuilder(); + } + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack, configuration, options, { + middlewareFn, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + smithyContext, + additionalContext, + CommandCtor + }) { + for (const mw of middlewareFn.bind(this)(CommandCtor, clientStack, configuration, options)) { + this.middlewareStack.use(mw); + } + const stack = clientStack.concat(this.middlewareStack); + const { logger: logger2 } = configuration; + const handlerExecutionContext = { + logger: logger2, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + [import_types.SMITHY_CONTEXT_KEY]: { + ...smithyContext + }, + ...additionalContext + }; + const { requestHandler } = configuration; + return stack.resolve( + (request) => requestHandler.handle(request.request, options || {}), + handlerExecutionContext + ); + } +}; +__name(_Command, "Command"); +var Command = _Command; +var _ClassBuilder = class _ClassBuilder { + constructor() { + this._init = () => { + }; + this._ep = {}; + this._middlewareFn = () => []; + this._commandName = ""; + this._clientName = ""; + this._additionalContext = {}; + this._smithyContext = {}; + this._inputFilterSensitiveLog = (_) => _; + this._outputFilterSensitiveLog = (_) => _; + this._serializer = null; + this._deserializer = null; + } + /** + * Optional init callback. + */ + init(cb) { + this._init = cb; + } + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions) { + this._ep = endpointParameterInstructions; + return this; + } + /** + * Add any number of middleware. + */ + m(middlewareSupplier) { + this._middlewareFn = middlewareSupplier; + return this; + } + /** + * Set the initial handler execution context Smithy field. + */ + s(service, operation, smithyContext = {}) { + this._smithyContext = { + service, + operation, + ...smithyContext + }; + return this; + } + /** + * Set the initial handler execution context. + */ + c(additionalContext = {}) { + this._additionalContext = additionalContext; + return this; + } + /** + * Set constant string identifiers for the operation. + */ + n(clientName, commandName) { + this._clientName = clientName; + this._commandName = commandName; + return this; + } + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter = (_) => _, outputFilter = (_) => _) { + this._inputFilterSensitiveLog = inputFilter; + this._outputFilterSensitiveLog = outputFilter; + return this; + } + /** + * Sets the serializer. + */ + ser(serializer) { + this._serializer = serializer; + return this; + } + /** + * Sets the deserializer. + */ + de(deserializer) { + this._deserializer = deserializer; + return this; + } + /** + * @returns a Command class with the classBuilder properties. + */ + build() { + var _a; + const closure = this; + let CommandRef; + return CommandRef = (_a = class extends Command { + /** + * @public + */ + constructor(...[input]) { + super(); + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.serialize = closure._serializer; + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.deserialize = closure._deserializer; + this.input = input ?? {}; + closure._init(this); + } + /** + * @public + */ + static getEndpointParameterInstructions() { + return closure._ep; + } + /** + * @internal + */ + resolveMiddleware(stack, configuration, options) { + return this.resolveMiddlewareWithContext(stack, configuration, options, { + CommandCtor: CommandRef, + middlewareFn: closure._middlewareFn, + clientName: closure._clientName, + commandName: closure._commandName, + inputFilterSensitiveLog: closure._inputFilterSensitiveLog, + outputFilterSensitiveLog: closure._outputFilterSensitiveLog, + smithyContext: closure._smithyContext, + additionalContext: closure._additionalContext + }); + } + }, __name(_a, "CommandRef"), _a); + } +}; +__name(_ClassBuilder, "ClassBuilder"); +var ClassBuilder = _ClassBuilder; + +// src/constants.ts +var SENSITIVE_STRING = "***SensitiveInformation***"; + +// src/create-aggregated-client.ts +var createAggregatedClient = /* @__PURE__ */ __name((commands, Client2) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = /* @__PURE__ */ __name(async function(args, optionsOrCb, cb) { + const command2 = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command2, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command2, optionsOrCb || {}, cb); + } else { + return this.send(command2, optionsOrCb); + } + }, "methodImpl"); + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client2.prototype[methodName] = methodImpl; + } +}, "createAggregatedClient"); + +// src/parse-utils.ts +var parseBoolean = /* @__PURE__ */ __name((value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}, "parseBoolean"); +var expectBoolean = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}, "expectBoolean"); +var expectNumber = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}, "expectNumber"); +var MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +var expectFloat32 = /* @__PURE__ */ __name((value) => { + const expected = expectNumber(value); + if (expected !== void 0 && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}, "expectFloat32"); +var expectLong = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}, "expectLong"); +var expectInt = expectLong; +var expectInt32 = /* @__PURE__ */ __name((value) => expectSizedInt(value, 32), "expectInt32"); +var expectShort = /* @__PURE__ */ __name((value) => expectSizedInt(value, 16), "expectShort"); +var expectByte = /* @__PURE__ */ __name((value) => expectSizedInt(value, 8), "expectByte"); +var expectSizedInt = /* @__PURE__ */ __name((value, size) => { + const expected = expectLong(value); + if (expected !== void 0 && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}, "expectSizedInt"); +var castInt = /* @__PURE__ */ __name((value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}, "castInt"); +var expectNonNull = /* @__PURE__ */ __name((value, location) => { + if (value === null || value === void 0) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}, "expectNonNull"); +var expectObject = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}, "expectObject"); +var expectString = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}, "expectString"); +var expectUnion = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + const asObject = expectObject(value); + const setKeys = Object.entries(asObject).filter(([, v]) => v != null).map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}, "expectUnion"); +var strictParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectNumber(parseNumber(value)); + } + return expectNumber(value); +}, "strictParseDouble"); +var strictParseFloat = strictParseDouble; +var strictParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectFloat32(parseNumber(value)); + } + return expectFloat32(value); +}, "strictParseFloat32"); +var NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +var parseNumber = /* @__PURE__ */ __name((value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}, "parseNumber"); +var limitedParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectNumber(value); +}, "limitedParseDouble"); +var handleFloat = limitedParseDouble; +var limitedParseFloat = limitedParseDouble; +var limitedParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectFloat32(value); +}, "limitedParseFloat32"); +var parseFloatString = /* @__PURE__ */ __name((value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}, "parseFloatString"); +var strictParseLong = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectLong(parseNumber(value)); + } + return expectLong(value); +}, "strictParseLong"); +var strictParseInt = strictParseLong; +var strictParseInt32 = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectInt32(parseNumber(value)); + } + return expectInt32(value); +}, "strictParseInt32"); +var strictParseShort = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectShort(parseNumber(value)); + } + return expectShort(value); +}, "strictParseShort"); +var strictParseByte = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectByte(parseNumber(value)); + } + return expectByte(value); +}, "strictParseByte"); +var stackTraceWarning = /* @__PURE__ */ __name((message) => { + return String(new TypeError(message).stack || message).split("\n").slice(0, 5).filter((s) => !s.includes("stackTraceWarning")).join("\n"); +}, "stackTraceWarning"); +var logger = { + warn: console.warn +}; + +// src/date-utils.ts +var DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +var MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +__name(dateToUtcString, "dateToUtcString"); +var RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +var parseRfc3339DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}, "parseRfc3339DateTime"); +var RFC3339_WITH_OFFSET = new RegExp( + /^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/ +); +var parseRfc3339DateTimeWithOffset = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}, "parseRfc3339DateTimeWithOffset"); +var IMF_FIXDATE = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var RFC_850_DATE = new RegExp( + /^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var ASC_TIME = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/ +); +var parseRfc7231DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr, "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year( + buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds + }) + ); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr.trimLeft(), "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}, "parseRfc7231DateTime"); +var parseEpochTimestamp = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } else if (typeof value === "string") { + valueAsDouble = strictParseDouble(value); + } else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1e3)); +}, "parseEpochTimestamp"); +var buildDate = /* @__PURE__ */ __name((year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date( + Date.UTC( + year, + adjustedMonth, + day, + parseDateValue(time.hours, "hour", 0, 23), + parseDateValue(time.minutes, "minute", 0, 59), + // seconds can go up to 60 for leap seconds + parseDateValue(time.seconds, "seconds", 0, 60), + parseMilliseconds(time.fractionalMilliseconds) + ) + ); +}, "buildDate"); +var parseTwoDigitYear = /* @__PURE__ */ __name((value) => { + const thisYear = (/* @__PURE__ */ new Date()).getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}, "parseTwoDigitYear"); +var FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1e3; +var adjustRfc850Year = /* @__PURE__ */ __name((input) => { + if (input.getTime() - (/* @__PURE__ */ new Date()).getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date( + Date.UTC( + input.getUTCFullYear() - 100, + input.getUTCMonth(), + input.getUTCDate(), + input.getUTCHours(), + input.getUTCMinutes(), + input.getUTCSeconds(), + input.getUTCMilliseconds() + ) + ); + } + return input; +}, "adjustRfc850Year"); +var parseMonthByShortName = /* @__PURE__ */ __name((value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}, "parseMonthByShortName"); +var DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +var validateDayOfMonth = /* @__PURE__ */ __name((year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}, "validateDayOfMonth"); +var isLeapYear = /* @__PURE__ */ __name((year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}, "isLeapYear"); +var parseDateValue = /* @__PURE__ */ __name((value, type, lower, upper) => { + const dateVal = strictParseByte(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}, "parseDateValue"); +var parseMilliseconds = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return 0; + } + return strictParseFloat32("0." + value) * 1e3; +}, "parseMilliseconds"); +var parseOffsetToMilliseconds = /* @__PURE__ */ __name((value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } else if (directionStr == "-") { + direction = -1; + } else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1e3; +}, "parseOffsetToMilliseconds"); +var stripLeadingZeroes = /* @__PURE__ */ __name((value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}, "stripLeadingZeroes"); + +// src/exceptions.ts +var _ServiceException = class _ServiceException extends Error { + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, _ServiceException.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } +}; +__name(_ServiceException, "ServiceException"); +var ServiceException = _ServiceException; +var decorateServiceException = /* @__PURE__ */ __name((exception, additions = {}) => { + Object.entries(additions).filter(([, v]) => v !== void 0).forEach(([k, v]) => { + if (exception[k] == void 0 || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}, "decorateServiceException"); + +// src/default-error-handler.ts +var throwDefaultError = /* @__PURE__ */ __name(({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : void 0; + const response = new exceptionCtor({ + name: (parsedBody == null ? void 0 : parsedBody.code) || (parsedBody == null ? void 0 : parsedBody.Code) || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata + }); + throw decorateServiceException(response, parsedBody); +}, "throwDefaultError"); +var withBaseException = /* @__PURE__ */ __name((ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + throwDefaultError({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}, "withBaseException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); + +// src/defaults-mode.ts +var loadConfigsForDefaultMode = /* @__PURE__ */ __name((mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100 + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 3e4 + }; + default: + return {}; + } +}, "loadConfigsForDefaultMode"); + +// src/emitWarningIfUnsupportedVersion.ts +var warningEmitted = false; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 14) { + warningEmitted = true; + } +}, "emitWarningIfUnsupportedVersion"); + +// src/extensions/checksum.ts + +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in import_types.AlgorithmId) { + const algorithmId = import_types.AlgorithmId[id]; + if (runtimeConfig[algorithmId] === void 0) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId] + }); + } + return { + _checksumAlgorithms: checksumAlgorithms, + addChecksumAlgorithm(algo) { + this._checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return this._checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/retry.ts +var getRetryConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + let _retryStrategy = runtimeConfig.retryStrategy; + return { + setRetryStrategy(retryStrategy) { + _retryStrategy = retryStrategy; + }, + retryStrategy() { + return _retryStrategy; + } + }; +}, "getRetryConfiguration"); +var resolveRetryRuntimeConfig = /* @__PURE__ */ __name((retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}, "resolveRetryRuntimeConfig"); + +// src/extensions/defaultExtensionConfiguration.ts +var getDefaultExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + ...getChecksumConfiguration(runtimeConfig), + ...getRetryConfiguration(runtimeConfig) + }; +}, "getDefaultExtensionConfiguration"); +var getDefaultClientConfiguration = getDefaultExtensionConfiguration; +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + ...resolveChecksumRuntimeConfig(config), + ...resolveRetryRuntimeConfig(config) + }; +}, "resolveDefaultRuntimeConfig"); + +// src/extended-encode-uri-component.ts +function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +__name(extendedEncodeURIComponent, "extendedEncodeURIComponent"); + +// src/get-array-if-single-item.ts +var getArrayIfSingleItem = /* @__PURE__ */ __name((mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray], "getArrayIfSingleItem"); + +// src/get-value-from-text-node.ts +var getValueFromTextNode = /* @__PURE__ */ __name((obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== void 0) { + obj[key] = obj[key][textNodeName]; + } else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = getValueFromTextNode(obj[key]); + } + } + return obj; +}, "getValueFromTextNode"); + +// src/lazy-json.ts +var StringWrapper = /* @__PURE__ */ __name(function() { + const Class = Object.getPrototypeOf(this).constructor; + const Constructor = Function.bind.apply(String, [null, ...arguments]); + const instance = new Constructor(); + Object.setPrototypeOf(instance, Class.prototype); + return instance; +}, "StringWrapper"); +StringWrapper.prototype = Object.create(String.prototype, { + constructor: { + value: StringWrapper, + enumerable: false, + writable: true, + configurable: true + } +}); +Object.setPrototypeOf(StringWrapper, String); +var _LazyJsonString = class _LazyJsonString extends StringWrapper { + deserializeJSON() { + return JSON.parse(super.toString()); + } + toJSON() { + return super.toString(); + } + static fromObject(object) { + if (object instanceof _LazyJsonString) { + return object; + } else if (object instanceof String || typeof object === "string") { + return new _LazyJsonString(object); + } + return new _LazyJsonString(JSON.stringify(object)); + } +}; +__name(_LazyJsonString, "LazyJsonString"); +var LazyJsonString = _LazyJsonString; + +// src/object-mapping.ts +function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +__name(map, "map"); +var convertMap = /* @__PURE__ */ __name((target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}, "convertMap"); +var take = /* @__PURE__ */ __name((source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}, "take"); +var mapWithFilter = /* @__PURE__ */ __name((target, filter, instructions) => { + return map( + target, + Object.entries(instructions).reduce( + (_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, + {} + ) + ); +}, "mapWithFilter"); +var applyInstruction = /* @__PURE__ */ __name((target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter2 = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if (typeof filter2 === "function" && filter2(source[sourceKey]) || typeof filter2 !== "function" && !!filter2) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === void 0 && (_value = value()) != null; + const customFilterPassed = typeof filter === "function" && !!filter(void 0) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed) { + target[targetKey] = _value; + } else if (customFilterPassed) { + target[targetKey] = value(); + } + } else { + const defaultFilterPassed = filter === void 0 && value != null; + const customFilterPassed = typeof filter === "function" && !!filter(value) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}, "applyInstruction"); +var nonNullish = /* @__PURE__ */ __name((_) => _ != null, "nonNullish"); +var pass = /* @__PURE__ */ __name((_) => _, "pass"); + +// src/resolve-path.ts +var resolvedPath = /* @__PURE__ */ __name((resolvedPath2, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== void 0) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath2 = resolvedPath2.replace( + uriLabel, + isGreedyLabel ? labelValue.split("/").map((segment) => extendedEncodeURIComponent(segment)).join("/") : extendedEncodeURIComponent(labelValue) + ); + } else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath2; +}, "resolvedPath"); + +// src/ser-utils.ts +var serializeFloat = /* @__PURE__ */ __name((value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}, "serializeFloat"); + +// src/serde-json.ts +var _json = /* @__PURE__ */ __name((obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(_json); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = _json(obj[key]); + } + return target; + } + return obj; +}, "_json"); + +// src/split-every.ts +function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} +__name(splitEvery, "splitEvery"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 19801: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AlgorithmId: () => AlgorithmId, + EndpointURLScheme: () => EndpointURLScheme, + FieldPosition: () => FieldPosition, + HttpApiKeyAuthLocation: () => HttpApiKeyAuthLocation, + HttpAuthLocation: () => HttpAuthLocation, + IniSectionType: () => IniSectionType, + RequestHandlerProtocol: () => RequestHandlerProtocol, + SMITHY_CONTEXT_KEY: () => SMITHY_CONTEXT_KEY, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/auth/auth.ts +var HttpAuthLocation = /* @__PURE__ */ ((HttpAuthLocation2) => { + HttpAuthLocation2["HEADER"] = "header"; + HttpAuthLocation2["QUERY"] = "query"; + return HttpAuthLocation2; +})(HttpAuthLocation || {}); + +// src/auth/HttpApiKeyAuth.ts +var HttpApiKeyAuthLocation = /* @__PURE__ */ ((HttpApiKeyAuthLocation2) => { + HttpApiKeyAuthLocation2["HEADER"] = "header"; + HttpApiKeyAuthLocation2["QUERY"] = "query"; + return HttpApiKeyAuthLocation2; +})(HttpApiKeyAuthLocation || {}); + +// src/endpoint.ts +var EndpointURLScheme = /* @__PURE__ */ ((EndpointURLScheme2) => { + EndpointURLScheme2["HTTP"] = "http"; + EndpointURLScheme2["HTTPS"] = "https"; + return EndpointURLScheme2; +})(EndpointURLScheme || {}); + +// src/extensions/checksum.ts +var AlgorithmId = /* @__PURE__ */ ((AlgorithmId2) => { + AlgorithmId2["MD5"] = "md5"; + AlgorithmId2["CRC32"] = "crc32"; + AlgorithmId2["CRC32C"] = "crc32c"; + AlgorithmId2["SHA1"] = "sha1"; + AlgorithmId2["SHA256"] = "sha256"; + return AlgorithmId2; +})(AlgorithmId || {}); +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== void 0) { + checksumAlgorithms.push({ + algorithmId: () => "sha256" /* SHA256 */, + checksumConstructor: () => runtimeConfig.sha256 + }); + } + if (runtimeConfig.md5 != void 0) { + checksumAlgorithms.push({ + algorithmId: () => "md5" /* MD5 */, + checksumConstructor: () => runtimeConfig.md5 + }); + } + return { + _checksumAlgorithms: checksumAlgorithms, + addChecksumAlgorithm(algo) { + this._checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return this._checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/defaultClientConfiguration.ts +var getDefaultClientConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + ...getChecksumConfiguration(runtimeConfig) + }; +}, "getDefaultClientConfiguration"); +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + ...resolveChecksumRuntimeConfig(config) + }; +}, "resolveDefaultRuntimeConfig"); + +// src/http.ts +var FieldPosition = /* @__PURE__ */ ((FieldPosition2) => { + FieldPosition2[FieldPosition2["HEADER"] = 0] = "HEADER"; + FieldPosition2[FieldPosition2["TRAILER"] = 1] = "TRAILER"; + return FieldPosition2; +})(FieldPosition || {}); + +// src/middleware.ts +var SMITHY_CONTEXT_KEY = "__smithy_context"; + +// src/profile.ts +var IniSectionType = /* @__PURE__ */ ((IniSectionType2) => { + IniSectionType2["PROFILE"] = "profile"; + IniSectionType2["SSO_SESSION"] = "sso-session"; + IniSectionType2["SERVICES"] = "services"; + return IniSectionType2; +})(IniSectionType || {}); + +// src/transfer.ts +var RequestHandlerProtocol = /* @__PURE__ */ ((RequestHandlerProtocol2) => { + RequestHandlerProtocol2["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol2["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol2["TDS_8_0"] = "tds/8.0"; + return RequestHandlerProtocol2; +})(RequestHandlerProtocol || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 38524: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseUrl: () => parseUrl +}); +module.exports = __toCommonJS(src_exports); +var import_querystring_parser = __nccwpck_require__(4769); +var parseUrl = /* @__PURE__ */ __name((url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = (0, import_querystring_parser.parseQueryString)(search); + } + return { + hostname, + port: port ? parseInt(port) : void 0, + protocol, + path: pathname, + query + }; +}, "parseUrl"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 14135: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromBase64 = void 0; +const util_buffer_from_1 = __nccwpck_require__(93726); +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = (0, util_buffer_from_1.fromString)(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; +exports.fromBase64 = fromBase64; + + +/***/ }), + +/***/ 54637: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, __nccwpck_require__(14135), module.exports); +__reExport(src_exports, __nccwpck_require__(65702), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 65702: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toBase64 = void 0; +const util_buffer_from_1 = __nccwpck_require__(93726); +const util_utf8_1 = __nccwpck_require__(92662); +const toBase64 = (_input) => { + let input; + if (typeof _input === "string") { + input = (0, util_utf8_1.fromUtf8)(_input); + } + else { + input = _input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + return (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +}; +exports.toBase64 = toBase64; + + +/***/ }), + +/***/ 93726: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = __nccwpck_require__(79074); +var import_buffer = __nccwpck_require__(14300); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 56266: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + getSmithyContext: () => getSmithyContext, + normalizeProvider: () => normalizeProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = __nccwpck_require__(19801); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 79154: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + ConfiguredRetryStrategy: () => ConfiguredRetryStrategy, + DEFAULT_MAX_ATTEMPTS: () => DEFAULT_MAX_ATTEMPTS, + DEFAULT_RETRY_DELAY_BASE: () => DEFAULT_RETRY_DELAY_BASE, + DEFAULT_RETRY_MODE: () => DEFAULT_RETRY_MODE, + DefaultRateLimiter: () => DefaultRateLimiter, + INITIAL_RETRY_TOKENS: () => INITIAL_RETRY_TOKENS, + INVOCATION_ID_HEADER: () => INVOCATION_ID_HEADER, + MAXIMUM_RETRY_DELAY: () => MAXIMUM_RETRY_DELAY, + NO_RETRY_INCREMENT: () => NO_RETRY_INCREMENT, + REQUEST_HEADER: () => REQUEST_HEADER, + RETRY_COST: () => RETRY_COST, + RETRY_MODES: () => RETRY_MODES, + StandardRetryStrategy: () => StandardRetryStrategy, + THROTTLING_RETRY_DELAY_BASE: () => THROTTLING_RETRY_DELAY_BASE, + TIMEOUT_RETRY_COST: () => TIMEOUT_RETRY_COST +}); +module.exports = __toCommonJS(src_exports); + +// src/config.ts +var RETRY_MODES = /* @__PURE__ */ ((RETRY_MODES2) => { + RETRY_MODES2["STANDARD"] = "standard"; + RETRY_MODES2["ADAPTIVE"] = "adaptive"; + return RETRY_MODES2; +})(RETRY_MODES || {}); +var DEFAULT_MAX_ATTEMPTS = 3; +var DEFAULT_RETRY_MODE = "standard" /* STANDARD */; + +// src/DefaultRateLimiter.ts +var import_service_error_classification = __nccwpck_require__(6375); +var _DefaultRateLimiter = class _DefaultRateLimiter { + constructor(options) { + // Pre-set state variables + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = (options == null ? void 0 : options.beta) ?? 0.7; + this.minCapacity = (options == null ? void 0 : options.minCapacity) ?? 1; + this.minFillRate = (options == null ? void 0 : options.minFillRate) ?? 0.5; + this.scaleConstant = (options == null ? void 0 : options.scaleConstant) ?? 0.4; + this.smooth = (options == null ? void 0 : options.smooth) ?? 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + getCurrentTimeInSeconds() { + return Date.now() / 1e3; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = (amount - this.currentCapacity) / this.fillRate * 1e3; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if ((0, import_service_error_classification.isThrottlingError)(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow(this.lastMaxRate * (1 - this.beta) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise( + this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate + ); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +}; +__name(_DefaultRateLimiter, "DefaultRateLimiter"); +var DefaultRateLimiter = _DefaultRateLimiter; + +// src/constants.ts +var DEFAULT_RETRY_DELAY_BASE = 100; +var MAXIMUM_RETRY_DELAY = 20 * 1e3; +var THROTTLING_RETRY_DELAY_BASE = 500; +var INITIAL_RETRY_TOKENS = 500; +var RETRY_COST = 5; +var TIMEOUT_RETRY_COST = 10; +var NO_RETRY_INCREMENT = 1; +var INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +var REQUEST_HEADER = "amz-sdk-request"; + +// src/defaultRetryBackoffStrategy.ts +var getDefaultRetryBackoffStrategy = /* @__PURE__ */ __name(() => { + let delayBase = DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = /* @__PURE__ */ __name((attempts) => { + return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }, "computeNextBackoffDelay"); + const setDelayBase = /* @__PURE__ */ __name((delay) => { + delayBase = delay; + }, "setDelayBase"); + return { + computeNextBackoffDelay, + setDelayBase + }; +}, "getDefaultRetryBackoffStrategy"); + +// src/defaultRetryToken.ts +var createDefaultRetryToken = /* @__PURE__ */ __name(({ + retryDelay, + retryCount, + retryCost +}) => { + const getRetryCount = /* @__PURE__ */ __name(() => retryCount, "getRetryCount"); + const getRetryDelay = /* @__PURE__ */ __name(() => Math.min(MAXIMUM_RETRY_DELAY, retryDelay), "getRetryDelay"); + const getRetryCost = /* @__PURE__ */ __name(() => retryCost, "getRetryCost"); + return { + getRetryCount, + getRetryDelay, + getRetryCost + }; +}, "createDefaultRetryToken"); + +// src/StandardRetryStrategy.ts +var _StandardRetryStrategy = class _StandardRetryStrategy { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = "standard" /* STANDARD */; + this.capacity = INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = getDefaultRetryBackoffStrategy(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + async acquireInitialRetryToken(retryTokenScope) { + return createDefaultRetryToken({ + retryDelay: DEFAULT_RETRY_DELAY_BASE, + retryCount: 0 + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase( + errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE + ); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return createDefaultRetryToken({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT)); + } + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`); + return DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return attempts < maxAttempts && this.capacity >= this.getCapacityCost(errorInfo.errorType) && this.isRetryableError(errorInfo.errorType); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +}; +__name(_StandardRetryStrategy, "StandardRetryStrategy"); +var StandardRetryStrategy = _StandardRetryStrategy; + +// src/AdaptiveRetryStrategy.ts +var _AdaptiveRetryStrategy = class _AdaptiveRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = "adaptive" /* ADAPTIVE */; + const { rateLimiter } = options ?? {}; + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +}; +__name(_AdaptiveRetryStrategy, "AdaptiveRetryStrategy"); +var AdaptiveRetryStrategy = _AdaptiveRetryStrategy; + +// src/ConfiguredRetryStrategy.ts +var _ConfiguredRetryStrategy = class _ConfiguredRetryStrategy extends StandardRetryStrategy { + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +}; +__name(_ConfiguredRetryStrategy, "ConfiguredRetryStrategy"); +var ConfiguredRetryStrategy = _ConfiguredRetryStrategy; +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 25698: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAwsChunkedEncodingStream = void 0; +const stream_1 = __nccwpck_require__(12781); +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; + + +/***/ }), + +/***/ 21604: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Uint8ArrayBlobAdapter: () => Uint8ArrayBlobAdapter +}); +module.exports = __toCommonJS(src_exports); + +// src/blob/transforms.ts +var import_util_base64 = __nccwpck_require__(54637); +var import_util_utf8 = __nccwpck_require__(92662); +function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return (0, import_util_base64.toBase64)(payload); + } + return (0, import_util_utf8.toUtf8)(payload); +} +__name(transformToString, "transformToString"); +function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter.mutate((0, import_util_base64.fromBase64)(str)); + } + return Uint8ArrayBlobAdapter.mutate((0, import_util_utf8.fromUtf8)(str)); +} +__name(transformFromString, "transformFromString"); + +// src/blob/Uint8ArrayBlobAdapter.ts +var _Uint8ArrayBlobAdapter = class _Uint8ArrayBlobAdapter extends Uint8Array { + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return transformFromString(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source) { + Object.setPrototypeOf(source, _Uint8ArrayBlobAdapter.prototype); + return source; + } + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding = "utf-8") { + return transformToString(this, encoding); + } +}; +__name(_Uint8ArrayBlobAdapter, "Uint8ArrayBlobAdapter"); +var Uint8ArrayBlobAdapter = _Uint8ArrayBlobAdapter; + +// src/index.ts +__reExport(src_exports, __nccwpck_require__(25698), module.exports); +__reExport(src_exports, __nccwpck_require__(55789), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 55789: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.sdkStreamMixin = void 0; +const node_http_handler_1 = __nccwpck_require__(42012); +const util_buffer_from_1 = __nccwpck_require__(93726); +const stream_1 = __nccwpck_require__(12781); +const util_1 = __nccwpck_require__(73837); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!(stream instanceof stream_1.Readable)) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, node_http_handler_1.streamCollector)(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new util_1.TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof stream_1.Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please make sure you are using Node.js >= 17.0.0, or polyfill is available."); + } + transformed = true; + return stream_1.Readable.toWeb(stream); + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; + + +/***/ }), + +/***/ 92662: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = __nccwpck_require__(93726); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 7477: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_MAX_RETRIES: () => DEFAULT_MAX_RETRIES, + DEFAULT_TIMEOUT: () => DEFAULT_TIMEOUT, + ENV_CMDS_AUTH_TOKEN: () => ENV_CMDS_AUTH_TOKEN, + ENV_CMDS_FULL_URI: () => ENV_CMDS_FULL_URI, + ENV_CMDS_RELATIVE_URI: () => ENV_CMDS_RELATIVE_URI, + Endpoint: () => Endpoint, + fromContainerMetadata: () => fromContainerMetadata, + fromInstanceMetadata: () => fromInstanceMetadata, + getInstanceMetadataEndpoint: () => getInstanceMetadataEndpoint, + httpRequest: () => httpRequest, + providerConfigFromInit: () => providerConfigFromInit +}); +module.exports = __toCommonJS(src_exports); + +// src/fromContainerMetadata.ts + +var import_url = __nccwpck_require__(57310); + +// src/remoteProvider/httpRequest.ts +var import_property_provider = __nccwpck_require__(79721); +var import_buffer = __nccwpck_require__(14300); +var import_http = __nccwpck_require__(13685); +function httpRequest(options) { + return new Promise((resolve, reject) => { + var _a; + const req = (0, import_http.request)({ + method: "GET", + ...options, + // Node.js http module doesn't accept hostname with square brackets + // Refs: https://github.com/nodejs/node/issues/39738 + hostname: (_a = options.hostname) == null ? void 0 : _a.replace(/^\[(.+)\]$/, "$1") + }); + req.on("error", (err) => { + reject(Object.assign(new import_property_provider.ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new import_property_provider.ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject( + Object.assign(new import_property_provider.ProviderError("Error response received from instance metadata service"), { statusCode }) + ); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(import_buffer.Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} +__name(httpRequest, "httpRequest"); + +// src/remoteProvider/ImdsCredentials.ts +var isImdsCredentials = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.AccessKeyId === "string" && typeof arg.SecretAccessKey === "string" && typeof arg.Token === "string" && typeof arg.Expiration === "string", "isImdsCredentials"); +var fromImdsCredentials = /* @__PURE__ */ __name((creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration) +}), "fromImdsCredentials"); + +// src/remoteProvider/RemoteProviderInit.ts +var DEFAULT_TIMEOUT = 1e3; +var DEFAULT_MAX_RETRIES = 0; +var providerConfigFromInit = /* @__PURE__ */ __name(({ + maxRetries = DEFAULT_MAX_RETRIES, + timeout = DEFAULT_TIMEOUT +}) => ({ maxRetries, timeout }), "providerConfigFromInit"); + +// src/remoteProvider/retry.ts +var retry = /* @__PURE__ */ __name((toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}, "retry"); + +// src/fromContainerMetadata.ts +var ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +var ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +var ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +var fromContainerMetadata = /* @__PURE__ */ __name((init = {}) => { + const { timeout, maxRetries } = providerConfigFromInit(init); + return () => retry(async () => { + const requestOptions = await getCmdsUri(); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!isImdsCredentials(credsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service."); + } + return fromImdsCredentials(credsResponse); + }, maxRetries); +}, "fromContainerMetadata"); +var requestFromEcsImds = /* @__PURE__ */ __name(async (timeout, options) => { + if (process.env[ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[ENV_CMDS_AUTH_TOKEN] + }; + } + const buffer = await httpRequest({ + ...options, + timeout + }); + return buffer.toString(); +}, "requestFromEcsImds"); +var CMDS_IP = "169.254.170.2"; +var GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true +}; +var GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true +}; +var getCmdsUri = /* @__PURE__ */ __name(async () => { + if (process.env[ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[ENV_CMDS_RELATIVE_URI] + }; + } + if (process.env[ENV_CMDS_FULL_URI]) { + const parsed = (0, import_url.parse)(process.env[ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new import_property_provider.CredentialsProviderError( + `${parsed.hostname} is not a valid container metadata service hostname`, + false + ); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new import_property_provider.CredentialsProviderError( + `${parsed.protocol} is not a valid container metadata service protocol`, + false + ); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : void 0 + }; + } + throw new import_property_provider.CredentialsProviderError( + `The container metadata credential provider cannot be used unless the ${ENV_CMDS_RELATIVE_URI} or ${ENV_CMDS_FULL_URI} environment variable is set`, + false + ); +}, "getCmdsUri"); + +// src/fromInstanceMetadata.ts + + + +// src/error/InstanceMetadataV1FallbackError.ts + +var _InstanceMetadataV1FallbackError = class _InstanceMetadataV1FallbackError extends import_property_provider.CredentialsProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "InstanceMetadataV1FallbackError"; + Object.setPrototypeOf(this, _InstanceMetadataV1FallbackError.prototype); + } +}; +__name(_InstanceMetadataV1FallbackError, "InstanceMetadataV1FallbackError"); +var InstanceMetadataV1FallbackError = _InstanceMetadataV1FallbackError; + +// src/utils/getInstanceMetadataEndpoint.ts +var import_node_config_provider = __nccwpck_require__(67642); +var import_url_parser = __nccwpck_require__(48770); + +// src/config/Endpoint.ts +var Endpoint = /* @__PURE__ */ ((Endpoint2) => { + Endpoint2["IPv4"] = "http://169.254.169.254"; + Endpoint2["IPv6"] = "http://[fd00:ec2::254]"; + return Endpoint2; +})(Endpoint || {}); + +// src/config/EndpointConfigOptions.ts +var ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +var CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +var ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_NAME], + default: void 0 +}; + +// src/config/EndpointMode.ts +var EndpointMode = /* @__PURE__ */ ((EndpointMode2) => { + EndpointMode2["IPv4"] = "IPv4"; + EndpointMode2["IPv6"] = "IPv6"; + return EndpointMode2; +})(EndpointMode || {}); + +// src/config/EndpointModeConfigOptions.ts +var ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +var CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +var ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_MODE_NAME], + default: "IPv4" /* IPv4 */ +}; + +// src/utils/getInstanceMetadataEndpoint.ts +var getInstanceMetadataEndpoint = /* @__PURE__ */ __name(async () => (0, import_url_parser.parseUrl)(await getFromEndpointConfig() || await getFromEndpointModeConfig()), "getInstanceMetadataEndpoint"); +var getFromEndpointConfig = /* @__PURE__ */ __name(async () => (0, import_node_config_provider.loadConfig)(ENDPOINT_CONFIG_OPTIONS)(), "getFromEndpointConfig"); +var getFromEndpointModeConfig = /* @__PURE__ */ __name(async () => { + const endpointMode = await (0, import_node_config_provider.loadConfig)(ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case "IPv4" /* IPv4 */: + return "http://169.254.169.254" /* IPv4 */; + case "IPv6" /* IPv6 */: + return "http://[fd00:ec2::254]" /* IPv6 */; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}. Select from ${Object.values(EndpointMode)}`); + } +}, "getFromEndpointModeConfig"); + +// src/utils/getExtendedInstanceMetadataCredentials.ts +var STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +var STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +var STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +var getExtendedInstanceMetadataCredentials = /* @__PURE__ */ __name((credentials, logger) => { + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1e3); + logger.warn( + `Attempting credential expiration extension due to a credential service availability issue. A refresh of these credentials will be attempted after ${new Date(newExpiration)}. +For more information, please visit: ` + STATIC_STABILITY_DOC_URL + ); + const originalExpiration = credentials.originalExpiration ?? credentials.expiration; + return { + ...credentials, + ...originalExpiration ? { originalExpiration } : {}, + expiration: newExpiration + }; +}, "getExtendedInstanceMetadataCredentials"); + +// src/utils/staticStabilityProvider.ts +var staticStabilityProvider = /* @__PURE__ */ __name((provider, options = {}) => { + const logger = (options == null ? void 0 : options.logger) || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = getExtendedInstanceMetadataCredentials(credentials, logger); + } + } catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = getExtendedInstanceMetadataCredentials(pastCredentials, logger); + } else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}, "staticStabilityProvider"); + +// src/fromInstanceMetadata.ts +var IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +var IMDS_TOKEN_PATH = "/latest/api/token"; +var AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; +var PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; +var X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; +var fromInstanceMetadata = /* @__PURE__ */ __name((init = {}) => staticStabilityProvider(getInstanceImdsProvider(init), { logger: init.logger }), "fromInstanceMetadata"); +var getInstanceImdsProvider = /* @__PURE__ */ __name((init) => { + let disableFetchToken = false; + const { logger, profile } = init; + const { timeout, maxRetries } = providerConfigFromInit(init); + const getCredentials = /* @__PURE__ */ __name(async (maxRetries2, options) => { + var _a; + const isImdsV1Fallback = disableFetchToken || ((_a = options.headers) == null ? void 0 : _a[X_AWS_EC2_METADATA_TOKEN]) == null; + if (isImdsV1Fallback) { + let fallbackBlockedFromProfile = false; + let fallbackBlockedFromProcessEnv = false; + const configValue = await (0, import_node_config_provider.loadConfig)( + { + environmentVariableSelector: (env) => { + const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; + if (envValue === void 0) { + throw new import_property_provider.CredentialsProviderError( + `${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.` + ); + } + return fallbackBlockedFromProcessEnv; + }, + configFileSelector: (profile2) => { + const profileValue = profile2[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; + return fallbackBlockedFromProfile; + }, + default: false + }, + { + profile + } + )(); + if (init.ec2MetadataV1Disabled || configValue) { + const causes = []; + if (init.ec2MetadataV1Disabled) + causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); + if (fallbackBlockedFromProfile) + causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); + if (fallbackBlockedFromProcessEnv) + causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); + throw new InstanceMetadataV1FallbackError( + `AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join( + ", " + )}].` + ); + } + } + const imdsProfile = (await retry(async () => { + let profile2; + try { + profile2 = await getProfile(options); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile2; + }, maxRetries2)).trim(); + return retry(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(imdsProfile, options); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries2); + }, "getCredentials"); + return async () => { + const endpoint = await getInstanceMetadataEndpoint(); + if (disableFetchToken) { + logger == null ? void 0 : logger.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } catch (error) { + if ((error == null ? void 0 : error.statusCode) === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error" + }); + } else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + logger == null ? void 0 : logger.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + [X_AWS_EC2_METADATA_TOKEN]: token + }, + timeout + }); + } + }; +}, "getInstanceImdsProvider"); +var getMetadataToken = /* @__PURE__ */ __name(async (options) => httpRequest({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600" + } +}), "getMetadataToken"); +var getProfile = /* @__PURE__ */ __name(async (options) => (await httpRequest({ ...options, path: IMDS_PATH })).toString(), "getProfile"); +var getCredentialsFromProfile = /* @__PURE__ */ __name(async (profile, options) => { + const credsResponse = JSON.parse( + (await httpRequest({ + ...options, + path: IMDS_PATH + profile + })).toString() + ); + if (!isImdsCredentials(credsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service."); + } + return fromImdsCredentials(credsResponse); +}, "getCredentialsFromProfile"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 67642: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + loadConfig: () => loadConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/configLoader.ts + + +// src/fromEnv.ts +var import_property_provider = __nccwpck_require__(79721); +var fromEnv = /* @__PURE__ */ __name((envVarSelector) => async () => { + try { + const config = envVarSelector(process.env); + if (config === void 0) { + throw new Error(); + } + return config; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Cannot load config from environment variables with getter: ${envVarSelector}` + ); + } +}, "fromEnv"); + +// src/fromSharedConfigFiles.ts + +var import_shared_ini_file_loader = __nccwpck_require__(43507); +var fromSharedConfigFiles = /* @__PURE__ */ __name((configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = (0, import_shared_ini_file_loader.getProfileName)(init); + const { configFile, credentialsFile } = await (0, import_shared_ini_file_loader.loadSharedConfigFiles)(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" ? { ...profileFromCredentials, ...profileFromConfig } : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === void 0) { + throw new Error(); + } + return configValue; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Cannot load config for profile ${profile} in SDK configuration files with getter: ${configSelector}` + ); + } +}, "fromSharedConfigFiles"); + +// src/fromStatic.ts + +var isFunction = /* @__PURE__ */ __name((func) => typeof func === "function", "isFunction"); +var fromStatic = /* @__PURE__ */ __name((defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, import_property_provider.fromStatic)(defaultValue), "fromStatic"); + +// src/configLoader.ts +var loadConfig = /* @__PURE__ */ __name(({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + fromEnv(environmentVariableSelector), + fromSharedConfigFiles(configFileSelector, configuration), + fromStatic(defaultValue) + ) +), "loadConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 48770: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseUrl: () => parseUrl +}); +module.exports = __toCommonJS(src_exports); +var import_querystring_parser = __nccwpck_require__(4769); +var parseUrl = /* @__PURE__ */ __name((url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = (0, import_querystring_parser.parseQueryString)(search); + } + return { + hostname, + port: port ? parseInt(port) : void 0, + protocol, + path: pathname, + query + }; +}, "parseUrl"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 11014: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EventStreamCodec = void 0; +const crc32_1 = __nccwpck_require__(47327); +const HeaderMarshaller_1 = __nccwpck_require__(74712); +const splitMessage_1 = __nccwpck_require__(20597); +class EventStreamCodec { + constructor(toUtf8, fromUtf8) { + this.headerMarshaller = new HeaderMarshaller_1.HeaderMarshaller(toUtf8, fromUtf8); + this.messageBuffer = []; + this.isEndOfStream = false; + } + feed(message) { + this.messageBuffer.push(this.decode(message)); + } + endOfStream() { + this.isEndOfStream = true; + } + getMessage() { + const message = this.messageBuffer.pop(); + const isEndOfStream = this.isEndOfStream; + return { + getMessage() { + return message; + }, + isEndOfStream() { + return isEndOfStream; + }, + }; + } + getAvailableMessages() { + const messages = this.messageBuffer; + this.messageBuffer = []; + const isEndOfStream = this.isEndOfStream; + return { + getMessages() { + return messages; + }, + isEndOfStream() { + return isEndOfStream; + }, + }; + } + encode({ headers: rawHeaders, body }) { + const headers = this.headerMarshaller.format(rawHeaders); + const length = headers.byteLength + body.byteLength + 16; + const out = new Uint8Array(length); + const view = new DataView(out.buffer, out.byteOffset, out.byteLength); + const checksum = new crc32_1.Crc32(); + view.setUint32(0, length, false); + view.setUint32(4, headers.byteLength, false); + view.setUint32(8, checksum.update(out.subarray(0, 8)).digest(), false); + out.set(headers, 12); + out.set(body, headers.byteLength + 12); + view.setUint32(length - 4, checksum.update(out.subarray(8, length - 4)).digest(), false); + return out; + } + decode(message) { + const { headers, body } = (0, splitMessage_1.splitMessage)(message); + return { headers: this.headerMarshaller.parse(headers), body }; + } + formatHeaders(rawHeaders) { + return this.headerMarshaller.format(rawHeaders); + } +} +exports.EventStreamCodec = EventStreamCodec; + + +/***/ }), + +/***/ 74712: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HeaderMarshaller = void 0; +const util_hex_encoding_1 = __nccwpck_require__(45364); +const Int64_1 = __nccwpck_require__(46086); +class HeaderMarshaller { + constructor(toUtf8, fromUtf8) { + this.toUtf8 = toUtf8; + this.fromUtf8 = fromUtf8; + } + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = this.fromUtf8(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 : 1]); + case "byte": + return Uint8Array.from([2, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = this.fromUtf8(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8; + tsBytes.set(Int64_1.Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9; + uuidBytes.set((0, util_hex_encoding_1.fromHex)(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } + parse(headers) { + const out = {}; + let position = 0; + while (position < headers.byteLength) { + const nameLength = headers.getUint8(position++); + const name = this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, nameLength)); + position += nameLength; + switch (headers.getUint8(position++)) { + case 0: + out[name] = { + type: BOOLEAN_TAG, + value: true, + }; + break; + case 1: + out[name] = { + type: BOOLEAN_TAG, + value: false, + }; + break; + case 2: + out[name] = { + type: BYTE_TAG, + value: headers.getInt8(position++), + }; + break; + case 3: + out[name] = { + type: SHORT_TAG, + value: headers.getInt16(position, false), + }; + position += 2; + break; + case 4: + out[name] = { + type: INT_TAG, + value: headers.getInt32(position, false), + }; + position += 4; + break; + case 5: + out[name] = { + type: LONG_TAG, + value: new Int64_1.Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)), + }; + position += 8; + break; + case 6: + const binaryLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: BINARY_TAG, + value: new Uint8Array(headers.buffer, headers.byteOffset + position, binaryLength), + }; + position += binaryLength; + break; + case 7: + const stringLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: STRING_TAG, + value: this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, stringLength)), + }; + position += stringLength; + break; + case 8: + out[name] = { + type: TIMESTAMP_TAG, + value: new Date(new Int64_1.Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)).valueOf()), + }; + position += 8; + break; + case 9: + const uuidBytes = new Uint8Array(headers.buffer, headers.byteOffset + position, 16); + position += 16; + out[name] = { + type: UUID_TAG, + value: `${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(0, 4))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(4, 6))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(6, 8))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(8, 10))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(10))}`, + }; + break; + default: + throw new Error(`Unrecognized header type tag`); + } + } + return out; + } +} +exports.HeaderMarshaller = HeaderMarshaller; +var HEADER_VALUE_TYPE; +(function (HEADER_VALUE_TYPE) { + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolTrue"] = 0] = "boolTrue"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolFalse"] = 1] = "boolFalse"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byte"] = 2] = "byte"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["short"] = 3] = "short"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["integer"] = 4] = "integer"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["long"] = 5] = "long"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byteArray"] = 6] = "byteArray"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["string"] = 7] = "string"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["timestamp"] = 8] = "timestamp"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["uuid"] = 9] = "uuid"; +})(HEADER_VALUE_TYPE || (HEADER_VALUE_TYPE = {})); +const BOOLEAN_TAG = "boolean"; +const BYTE_TAG = "byte"; +const SHORT_TAG = "short"; +const INT_TAG = "integer"; +const LONG_TAG = "long"; +const BINARY_TAG = "binary"; +const STRING_TAG = "string"; +const TIMESTAMP_TAG = "timestamp"; +const UUID_TAG = "uuid"; +const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; + + +/***/ }), + +/***/ 46086: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Int64 = void 0; +const util_hex_encoding_1 = __nccwpck_require__(45364); +class Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static fromNumber(number) { + if (number > 9223372036854776000 || number < -9223372036854776000) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new Int64(bytes); + } + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 0b10000000; + if (negative) { + negate(bytes); + } + return parseInt((0, util_hex_encoding_1.toHex)(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +} +exports.Int64 = Int64; +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 0xff; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} + + +/***/ }), + +/***/ 73684: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 57255: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MessageDecoderStream = void 0; +class MessageDecoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const bytes of this.options.inputStream) { + const decoded = this.options.decoder.decode(bytes); + yield decoded; + } + } +} +exports.MessageDecoderStream = MessageDecoderStream; + + +/***/ }), + +/***/ 52362: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MessageEncoderStream = void 0; +class MessageEncoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const msg of this.options.messageStream) { + const encoded = this.options.encoder.encode(msg); + yield encoded; + } + if (this.options.includeEndFrame) { + yield new Uint8Array(0); + } + } +} +exports.MessageEncoderStream = MessageEncoderStream; + + +/***/ }), + +/***/ 62379: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SmithyMessageDecoderStream = void 0; +class SmithyMessageDecoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const message of this.options.messageStream) { + const deserialized = await this.options.deserializer(message); + if (deserialized === undefined) + continue; + yield deserialized; + } + } +} +exports.SmithyMessageDecoderStream = SmithyMessageDecoderStream; + + +/***/ }), + +/***/ 12484: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SmithyMessageEncoderStream = void 0; +class SmithyMessageEncoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const chunk of this.options.inputStream) { + const payloadBuf = this.options.serializer(chunk); + yield payloadBuf; + } + } +} +exports.SmithyMessageEncoderStream = SmithyMessageEncoderStream; + + +/***/ }), + +/***/ 56459: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(11014), exports); +tslib_1.__exportStar(__nccwpck_require__(74712), exports); +tslib_1.__exportStar(__nccwpck_require__(46086), exports); +tslib_1.__exportStar(__nccwpck_require__(73684), exports); +tslib_1.__exportStar(__nccwpck_require__(57255), exports); +tslib_1.__exportStar(__nccwpck_require__(52362), exports); +tslib_1.__exportStar(__nccwpck_require__(62379), exports); +tslib_1.__exportStar(__nccwpck_require__(12484), exports); + + +/***/ }), + +/***/ 20597: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.splitMessage = void 0; +const crc32_1 = __nccwpck_require__(47327); +const PRELUDE_MEMBER_LENGTH = 4; +const PRELUDE_LENGTH = PRELUDE_MEMBER_LENGTH * 2; +const CHECKSUM_LENGTH = 4; +const MINIMUM_MESSAGE_LENGTH = PRELUDE_LENGTH + CHECKSUM_LENGTH * 2; +function splitMessage({ byteLength, byteOffset, buffer }) { + if (byteLength < MINIMUM_MESSAGE_LENGTH) { + throw new Error("Provided message too short to accommodate event stream message overhead"); + } + const view = new DataView(buffer, byteOffset, byteLength); + const messageLength = view.getUint32(0, false); + if (byteLength !== messageLength) { + throw new Error("Reported message length does not match received message length"); + } + const headerLength = view.getUint32(PRELUDE_MEMBER_LENGTH, false); + const expectedPreludeChecksum = view.getUint32(PRELUDE_LENGTH, false); + const expectedMessageChecksum = view.getUint32(byteLength - CHECKSUM_LENGTH, false); + const checksummer = new crc32_1.Crc32().update(new Uint8Array(buffer, byteOffset, PRELUDE_LENGTH)); + if (expectedPreludeChecksum !== checksummer.digest()) { + throw new Error(`The prelude checksum specified in the message (${expectedPreludeChecksum}) does not match the calculated CRC32 checksum (${checksummer.digest()})`); + } + checksummer.update(new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH, byteLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH))); + if (expectedMessageChecksum !== checksummer.digest()) { + throw new Error(`The message checksum (${checksummer.digest()}) did not match the expected value of ${expectedMessageChecksum}`); + } + return { + headers: new DataView(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH, headerLength), + body: new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH + headerLength, messageLength - headerLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH + CHECKSUM_LENGTH)), + }; +} +exports.splitMessage = splitMessage; + + +/***/ }), + +/***/ 33193: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveEventStreamSerdeConfig = void 0; +const resolveEventStreamSerdeConfig = (input) => ({ + ...input, + eventStreamMarshaller: input.eventStreamSerdeProvider(input), +}); +exports.resolveEventStreamSerdeConfig = resolveEventStreamSerdeConfig; + + +/***/ }), + +/***/ 16181: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(33193), exports); + + +/***/ }), + +/***/ 76865: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EventStreamMarshaller = void 0; +const eventstream_serde_universal_1 = __nccwpck_require__(66673); +const stream_1 = __nccwpck_require__(12781); +const utils_1 = __nccwpck_require__(58047); +class EventStreamMarshaller { + constructor({ utf8Encoder, utf8Decoder }) { + this.universalMarshaller = new eventstream_serde_universal_1.EventStreamMarshaller({ + utf8Decoder, + utf8Encoder, + }); + } + deserialize(body, deserializer) { + const bodyIterable = typeof body[Symbol.asyncIterator] === "function" ? body : (0, utils_1.readabletoIterable)(body); + return this.universalMarshaller.deserialize(bodyIterable, deserializer); + } + serialize(input, serializer) { + return stream_1.Readable.from(this.universalMarshaller.serialize(input, serializer)); + } +} +exports.EventStreamMarshaller = EventStreamMarshaller; + + +/***/ }), + +/***/ 77682: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(76865), exports); +tslib_1.__exportStar(__nccwpck_require__(56887), exports); + + +/***/ }), + +/***/ 56887: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.eventStreamSerdeProvider = void 0; +const EventStreamMarshaller_1 = __nccwpck_require__(76865); +const eventStreamSerdeProvider = (options) => new EventStreamMarshaller_1.EventStreamMarshaller(options); +exports.eventStreamSerdeProvider = eventStreamSerdeProvider; + + +/***/ }), + +/***/ 58047: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.readabletoIterable = void 0; +async function* readabletoIterable(readStream) { + let streamEnded = false; + let generationEnded = false; + const records = new Array(); + readStream.on("error", (err) => { + if (!streamEnded) { + streamEnded = true; + } + if (err) { + throw err; + } + }); + readStream.on("data", (data) => { + records.push(data); + }); + readStream.on("end", () => { + streamEnded = true; + }); + while (!generationEnded) { + const value = await new Promise((resolve) => setTimeout(() => resolve(records.shift()), 0)); + if (value) { + yield value; + } + generationEnded = streamEnded && records.length === 0; + } +} +exports.readabletoIterable = readabletoIterable; + + +/***/ }), + +/***/ 66673: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EventStreamMarshaller: () => EventStreamMarshaller, + eventStreamSerdeProvider: () => eventStreamSerdeProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/EventStreamMarshaller.ts +var import_eventstream_codec = __nccwpck_require__(23186); + +// src/getChunkedStream.ts +function getChunkedStream(source) { + let currentMessageTotalLength = 0; + let currentMessagePendingLength = 0; + let currentMessage = null; + let messageLengthBuffer = null; + const allocateMessage = /* @__PURE__ */ __name((size) => { + if (typeof size !== "number") { + throw new Error("Attempted to allocate an event message where size was not a number: " + size); + } + currentMessageTotalLength = size; + currentMessagePendingLength = 4; + currentMessage = new Uint8Array(size); + const currentMessageView = new DataView(currentMessage.buffer); + currentMessageView.setUint32(0, size, false); + }, "allocateMessage"); + const iterator = /* @__PURE__ */ __name(async function* () { + const sourceIterator = source[Symbol.asyncIterator](); + while (true) { + const { value, done } = await sourceIterator.next(); + if (done) { + if (!currentMessageTotalLength) { + return; + } else if (currentMessageTotalLength === currentMessagePendingLength) { + yield currentMessage; + } else { + throw new Error("Truncated event message received."); + } + return; } - ], - deleteArchiveForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/archive" - ], - deleteArchiveForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/archive" - ], - downloadArchiveForOrg: [ - "GET /orgs/{org}/migrations/{migration_id}/archive" - ], - getArchiveForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}/archive" - ], - getCommitAuthors: [ - "GET /repos/{owner}/{repo}/import/authors", - {}, - { - deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + const chunkLength = value.length; + let currentOffset = 0; + while (currentOffset < chunkLength) { + if (!currentMessage) { + const bytesRemaining = chunkLength - currentOffset; + if (!messageLengthBuffer) { + messageLengthBuffer = new Uint8Array(4); + } + const numBytesForTotal = Math.min( + 4 - currentMessagePendingLength, + // remaining bytes to fill the messageLengthBuffer + bytesRemaining + // bytes left in chunk + ); + messageLengthBuffer.set( + // @ts-ignore error TS2532: Object is possibly 'undefined' for value + value.slice(currentOffset, currentOffset + numBytesForTotal), + currentMessagePendingLength + ); + currentMessagePendingLength += numBytesForTotal; + currentOffset += numBytesForTotal; + if (currentMessagePendingLength < 4) { + break; + } + allocateMessage(new DataView(messageLengthBuffer.buffer).getUint32(0, false)); + messageLengthBuffer = null; + } + const numBytesToWrite = Math.min( + currentMessageTotalLength - currentMessagePendingLength, + // number of bytes left to complete message + chunkLength - currentOffset + // number of bytes left in the original chunk + ); + currentMessage.set( + // @ts-ignore error TS2532: Object is possibly 'undefined' for value + value.slice(currentOffset, currentOffset + numBytesToWrite), + currentMessagePendingLength + ); + currentMessagePendingLength += numBytesToWrite; + currentOffset += numBytesToWrite; + if (currentMessageTotalLength && currentMessageTotalLength === currentMessagePendingLength) { + yield currentMessage; + currentMessage = null; + currentMessageTotalLength = 0; + currentMessagePendingLength = 0; + } } - ], - getImportStatus: [ - "GET /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + }, "iterator"); + return { + [Symbol.asyncIterator]: iterator + }; +} +__name(getChunkedStream, "getChunkedStream"); + +// src/getUnmarshalledStream.ts +function getMessageUnmarshaller(deserializer, toUtf8) { + return async function(message) { + const { value: messageType } = message.headers[":message-type"]; + if (messageType === "error") { + const unmodeledError = new Error(message.headers[":error-message"].value || "UnknownError"); + unmodeledError.name = message.headers[":error-code"].value; + throw unmodeledError; + } else if (messageType === "exception") { + const code = message.headers[":exception-type"].value; + const exception = { [code]: message }; + const deserializedException = await deserializer(exception); + if (deserializedException.$unknown) { + const error = new Error(toUtf8(message.body)); + error.name = code; + throw error; } - ], - getLargeFiles: [ - "GET /repos/{owner}/{repo}/import/large_files", - {}, - { - deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + throw deserializedException[code]; + } else if (messageType === "event") { + const event = { + [message.headers[":event-type"].value]: message + }; + const deserialized = await deserializer(event); + if (deserialized.$unknown) + return; + return deserialized; + } else { + throw Error(`Unrecognizable event type: ${message.headers[":event-type"].value}`); + } + }; +} +__name(getMessageUnmarshaller, "getMessageUnmarshaller"); + +// src/EventStreamMarshaller.ts +var _EventStreamMarshaller = class _EventStreamMarshaller { + constructor({ utf8Encoder, utf8Decoder }) { + this.eventStreamCodec = new import_eventstream_codec.EventStreamCodec(utf8Encoder, utf8Decoder); + this.utfEncoder = utf8Encoder; + } + deserialize(body, deserializer) { + const inputStream = getChunkedStream(body); + return new import_eventstream_codec.SmithyMessageDecoderStream({ + messageStream: new import_eventstream_codec.MessageDecoderStream({ inputStream, decoder: this.eventStreamCodec }), + // @ts-expect-error Type 'T' is not assignable to type 'Record' + deserializer: getMessageUnmarshaller(deserializer, this.utfEncoder) + }); + } + serialize(inputStream, serializer) { + return new import_eventstream_codec.MessageEncoderStream({ + messageStream: new import_eventstream_codec.SmithyMessageEncoderStream({ inputStream, serializer }), + encoder: this.eventStreamCodec, + includeEndFrame: true + }); + } +}; +__name(_EventStreamMarshaller, "EventStreamMarshaller"); +var EventStreamMarshaller = _EventStreamMarshaller; + +// src/provider.ts +var eventStreamSerdeProvider = /* @__PURE__ */ __name((options) => new EventStreamMarshaller(options), "eventStreamSerdeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 23186: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EventStreamCodec: () => EventStreamCodec, + HeaderMarshaller: () => HeaderMarshaller, + Int64: () => Int64, + MessageDecoderStream: () => MessageDecoderStream, + MessageEncoderStream: () => MessageEncoderStream, + SmithyMessageDecoderStream: () => SmithyMessageDecoderStream, + SmithyMessageEncoderStream: () => SmithyMessageEncoderStream +}); +module.exports = __toCommonJS(src_exports); + +// src/EventStreamCodec.ts +var import_crc322 = __nccwpck_require__(47327); + +// src/HeaderMarshaller.ts + + +// src/Int64.ts +var import_util_hex_encoding = __nccwpck_require__(53804); +var _Int64 = class _Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static fromNumber(number) { + if (number > 9223372036854776e3 || number < -9223372036854776e3) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new _Int64(bytes); + } + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 128; + if (negative) { + negate(bytes); + } + return parseInt((0, import_util_hex_encoding.toHex)(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +}; +__name(_Int64, "Int64"); +var Int64 = _Int64; +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 255; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} +__name(negate, "negate"); + +// src/HeaderMarshaller.ts +var _HeaderMarshaller = class _HeaderMarshaller { + constructor(toUtf8, fromUtf8) { + this.toUtf8 = toUtf8; + this.fromUtf8 = fromUtf8; + } + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = this.fromUtf8(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 /* boolTrue */ : 1 /* boolFalse */]); + case "byte": + return Uint8Array.from([2 /* byte */, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3 /* short */); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4 /* integer */); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5 /* long */; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6 /* byteArray */); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = this.fromUtf8(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7 /* string */); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8 /* timestamp */; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9 /* uuid */; + uuidBytes.set((0, import_util_hex_encoding.fromHex)(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } + parse(headers) { + const out = {}; + let position = 0; + while (position < headers.byteLength) { + const nameLength = headers.getUint8(position++); + const name = this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, nameLength)); + position += nameLength; + switch (headers.getUint8(position++)) { + case 0 /* boolTrue */: + out[name] = { + type: BOOLEAN_TAG, + value: true + }; + break; + case 1 /* boolFalse */: + out[name] = { + type: BOOLEAN_TAG, + value: false + }; + break; + case 2 /* byte */: + out[name] = { + type: BYTE_TAG, + value: headers.getInt8(position++) + }; + break; + case 3 /* short */: + out[name] = { + type: SHORT_TAG, + value: headers.getInt16(position, false) + }; + position += 2; + break; + case 4 /* integer */: + out[name] = { + type: INT_TAG, + value: headers.getInt32(position, false) + }; + position += 4; + break; + case 5 /* long */: + out[name] = { + type: LONG_TAG, + value: new Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)) + }; + position += 8; + break; + case 6 /* byteArray */: + const binaryLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: BINARY_TAG, + value: new Uint8Array(headers.buffer, headers.byteOffset + position, binaryLength) + }; + position += binaryLength; + break; + case 7 /* string */: + const stringLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: STRING_TAG, + value: this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, stringLength)) + }; + position += stringLength; + break; + case 8 /* timestamp */: + out[name] = { + type: TIMESTAMP_TAG, + value: new Date(new Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)).valueOf()) + }; + position += 8; + break; + case 9 /* uuid */: + const uuidBytes = new Uint8Array(headers.buffer, headers.byteOffset + position, 16); + position += 16; + out[name] = { + type: UUID_TAG, + value: `${(0, import_util_hex_encoding.toHex)(uuidBytes.subarray(0, 4))}-${(0, import_util_hex_encoding.toHex)(uuidBytes.subarray(4, 6))}-${(0, import_util_hex_encoding.toHex)( + uuidBytes.subarray(6, 8) + )}-${(0, import_util_hex_encoding.toHex)(uuidBytes.subarray(8, 10))}-${(0, import_util_hex_encoding.toHex)(uuidBytes.subarray(10))}` + }; + break; + default: + throw new Error(`Unrecognized header type tag`); } - ], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], - listForAuthenticatedUser: ["GET /user/migrations"], - listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}/repositories" - ], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: [ - "GET /user/migrations/{migration_id}/repositories", - {}, - { renamed: ["migrations", "listReposForAuthenticatedUser"] } - ], - mapCommitAuthor: [ - "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", - {}, - { - deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + return out; + } +}; +__name(_HeaderMarshaller, "HeaderMarshaller"); +var HeaderMarshaller = _HeaderMarshaller; +var BOOLEAN_TAG = "boolean"; +var BYTE_TAG = "byte"; +var SHORT_TAG = "short"; +var INT_TAG = "integer"; +var LONG_TAG = "long"; +var BINARY_TAG = "binary"; +var STRING_TAG = "string"; +var TIMESTAMP_TAG = "timestamp"; +var UUID_TAG = "uuid"; +var UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; + +// src/splitMessage.ts +var import_crc32 = __nccwpck_require__(47327); +var PRELUDE_MEMBER_LENGTH = 4; +var PRELUDE_LENGTH = PRELUDE_MEMBER_LENGTH * 2; +var CHECKSUM_LENGTH = 4; +var MINIMUM_MESSAGE_LENGTH = PRELUDE_LENGTH + CHECKSUM_LENGTH * 2; +function splitMessage({ byteLength, byteOffset, buffer }) { + if (byteLength < MINIMUM_MESSAGE_LENGTH) { + throw new Error("Provided message too short to accommodate event stream message overhead"); + } + const view = new DataView(buffer, byteOffset, byteLength); + const messageLength = view.getUint32(0, false); + if (byteLength !== messageLength) { + throw new Error("Reported message length does not match received message length"); + } + const headerLength = view.getUint32(PRELUDE_MEMBER_LENGTH, false); + const expectedPreludeChecksum = view.getUint32(PRELUDE_LENGTH, false); + const expectedMessageChecksum = view.getUint32(byteLength - CHECKSUM_LENGTH, false); + const checksummer = new import_crc32.Crc32().update(new Uint8Array(buffer, byteOffset, PRELUDE_LENGTH)); + if (expectedPreludeChecksum !== checksummer.digest()) { + throw new Error( + `The prelude checksum specified in the message (${expectedPreludeChecksum}) does not match the calculated CRC32 checksum (${checksummer.digest()})` + ); + } + checksummer.update( + new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH, byteLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH)) + ); + if (expectedMessageChecksum !== checksummer.digest()) { + throw new Error( + `The message checksum (${checksummer.digest()}) did not match the expected value of ${expectedMessageChecksum}` + ); + } + return { + headers: new DataView(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH, headerLength), + body: new Uint8Array( + buffer, + byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH + headerLength, + messageLength - headerLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH + CHECKSUM_LENGTH) + ) + }; +} +__name(splitMessage, "splitMessage"); + +// src/EventStreamCodec.ts +var _EventStreamCodec = class _EventStreamCodec { + constructor(toUtf8, fromUtf8) { + this.headerMarshaller = new HeaderMarshaller(toUtf8, fromUtf8); + this.messageBuffer = []; + this.isEndOfStream = false; + } + feed(message) { + this.messageBuffer.push(this.decode(message)); + } + endOfStream() { + this.isEndOfStream = true; + } + getMessage() { + const message = this.messageBuffer.pop(); + const isEndOfStream = this.isEndOfStream; + return { + getMessage() { + return message; + }, + isEndOfStream() { + return isEndOfStream; + } + }; + } + getAvailableMessages() { + const messages = this.messageBuffer; + this.messageBuffer = []; + const isEndOfStream = this.isEndOfStream; + return { + getMessages() { + return messages; + }, + isEndOfStream() { + return isEndOfStream; + } + }; + } + /** + * Convert a structured JavaScript object with tagged headers into a binary + * event stream message. + */ + encode({ headers: rawHeaders, body }) { + const headers = this.headerMarshaller.format(rawHeaders); + const length = headers.byteLength + body.byteLength + 16; + const out = new Uint8Array(length); + const view = new DataView(out.buffer, out.byteOffset, out.byteLength); + const checksum = new import_crc322.Crc32(); + view.setUint32(0, length, false); + view.setUint32(4, headers.byteLength, false); + view.setUint32(8, checksum.update(out.subarray(0, 8)).digest(), false); + out.set(headers, 12); + out.set(body, headers.byteLength + 12); + view.setUint32(length - 4, checksum.update(out.subarray(8, length - 4)).digest(), false); + return out; + } + /** + * Convert a binary event stream message into a JavaScript object with an + * opaque, binary body and tagged, parsed headers. + */ + decode(message) { + const { headers, body } = splitMessage(message); + return { headers: this.headerMarshaller.parse(headers), body }; + } + /** + * Convert a structured JavaScript object with tagged headers into a binary + * event stream message header. + */ + formatHeaders(rawHeaders) { + return this.headerMarshaller.format(rawHeaders); + } +}; +__name(_EventStreamCodec, "EventStreamCodec"); +var EventStreamCodec = _EventStreamCodec; + +// src/MessageDecoderStream.ts +var _MessageDecoderStream = class _MessageDecoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const bytes of this.options.inputStream) { + const decoded = this.options.decoder.decode(bytes); + yield decoded; + } + } +}; +__name(_MessageDecoderStream, "MessageDecoderStream"); +var MessageDecoderStream = _MessageDecoderStream; + +// src/MessageEncoderStream.ts +var _MessageEncoderStream = class _MessageEncoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const msg of this.options.messageStream) { + const encoded = this.options.encoder.encode(msg); + yield encoded; + } + if (this.options.includeEndFrame) { + yield new Uint8Array(0); + } + } +}; +__name(_MessageEncoderStream, "MessageEncoderStream"); +var MessageEncoderStream = _MessageEncoderStream; + +// src/SmithyMessageDecoderStream.ts +var _SmithyMessageDecoderStream = class _SmithyMessageDecoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const message of this.options.messageStream) { + const deserialized = await this.options.deserializer(message); + if (deserialized === void 0) + continue; + yield deserialized; + } + } +}; +__name(_SmithyMessageDecoderStream, "SmithyMessageDecoderStream"); +var SmithyMessageDecoderStream = _SmithyMessageDecoderStream; + +// src/SmithyMessageEncoderStream.ts +var _SmithyMessageEncoderStream = class _SmithyMessageEncoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const chunk of this.options.inputStream) { + const payloadBuf = this.options.serializer(chunk); + yield payloadBuf; + } + } +}; +__name(_SmithyMessageEncoderStream, "SmithyMessageEncoderStream"); +var SmithyMessageEncoderStream = _SmithyMessageEncoderStream; +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 53804: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromHex: () => fromHex, + toHex: () => toHex +}); +module.exports = __toCommonJS(src_exports); +var SHORT_TO_HEX = {}; +var HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +__name(fromHex, "fromHex"); +function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} +__name(toHex, "toHex"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 3081: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Hash = void 0; +const util_buffer_from_1 = __nccwpck_require__(31381); +const util_utf8_1 = __nccwpck_require__(41895); +const buffer_1 = __nccwpck_require__(14300); +const crypto_1 = __nccwpck_require__(6113); +class Hash { + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update((0, util_utf8_1.toUint8Array)(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret + ? (0, crypto_1.createHmac)(this.algorithmIdentifier, castSourceData(this.secret)) + : (0, crypto_1.createHash)(this.algorithmIdentifier); + } +} +exports.Hash = Hash; +function castSourceData(toCast, encoding) { + if (buffer_1.Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return (0, util_buffer_from_1.fromString)(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return (0, util_buffer_from_1.fromArrayBuffer)(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return (0, util_buffer_from_1.fromArrayBuffer)(toCast); +} + + +/***/ }), + +/***/ 4671: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HashCalculator = void 0; +const util_utf8_1 = __nccwpck_require__(41895); +const stream_1 = __nccwpck_require__(12781); +class HashCalculator extends stream_1.Writable { + constructor(hash, options) { + super(options); + this.hash = hash; + } + _write(chunk, encoding, callback) { + try { + this.hash.update((0, util_utf8_1.toUint8Array)(chunk)); + } + catch (err) { + return callback(err); + } + callback(); + } +} +exports.HashCalculator = HashCalculator; + + +/***/ }), + +/***/ 80075: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fileStreamHasher = void 0; +const fs_1 = __nccwpck_require__(57147); +const HashCalculator_1 = __nccwpck_require__(4671); +const fileStreamHasher = (hashCtor, fileStream) => new Promise((resolve, reject) => { + if (!isReadStream(fileStream)) { + reject(new Error("Unable to calculate hash for non-file streams.")); + return; + } + const fileStreamTee = (0, fs_1.createReadStream)(fileStream.path, { + start: fileStream.start, + end: fileStream.end, + }); + const hash = new hashCtor(); + const hashCalculator = new HashCalculator_1.HashCalculator(hash); + fileStreamTee.pipe(hashCalculator); + fileStreamTee.on("error", (err) => { + hashCalculator.end(); + reject(err); + }); + hashCalculator.on("error", reject); + hashCalculator.on("finish", function () { + hash.digest().then(resolve).catch(reject); + }); +}); +exports.fileStreamHasher = fileStreamHasher; +const isReadStream = (stream) => typeof stream.path === "string"; + + +/***/ }), + +/***/ 48866: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(80075), exports); +tslib_1.__exportStar(__nccwpck_require__(87715), exports); + + +/***/ }), + +/***/ 87715: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.readableStreamHasher = void 0; +const HashCalculator_1 = __nccwpck_require__(4671); +const readableStreamHasher = (hashCtor, readableStream) => { + if (readableStream.readableFlowing !== null) { + throw new Error("Unable to calculate hash for flowing readable stream"); + } + const hash = new hashCtor(); + const hashCalculator = new HashCalculator_1.HashCalculator(hash); + readableStream.pipe(hashCalculator); + return new Promise((resolve, reject) => { + readableStream.on("error", (err) => { + hashCalculator.end(); + reject(err); + }); + hashCalculator.on("error", reject); + hashCalculator.on("finish", () => { + hash.digest().then(resolve).catch(reject); + }); + }); +}; +exports.readableStreamHasher = readableStreamHasher; + + +/***/ }), + +/***/ 10780: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isArrayBuffer = void 0; +const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; +exports.isArrayBuffer = isArrayBuffer; + + +/***/ }), + +/***/ 82800: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getContentLengthPlugin = exports.contentLengthMiddlewareOptions = exports.contentLengthMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const CONTENT_LENGTH_HEADER = "content-length"; +function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (protocol_http_1.HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && + Object.keys(headers) + .map((str) => str.toLowerCase()) + .indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length), + }; + } + catch (error) { + } + } + } + return next({ + ...args, + request, + }); + }; +} +exports.contentLengthMiddleware = contentLengthMiddleware; +exports.contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true, +}; +const getContentLengthPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), exports.contentLengthMiddlewareOptions); + }, +}); +exports.getContentLengthPlugin = getContentLengthPlugin; + + +/***/ }), + +/***/ 465: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createConfigValueProvider = void 0; +const createConfigValueProvider = (configKey, canonicalEndpointParamKey, config) => { + const configProvider = async () => { + var _a; + const configValue = (_a = config[configKey]) !== null && _a !== void 0 ? _a : config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }; + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}; +exports.createConfigValueProvider = createConfigValueProvider; + + +/***/ }), + +/***/ 31518: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointFromConfig = void 0; +const node_config_provider_1 = __nccwpck_require__(33461); +const getEndpointUrlConfig_1 = __nccwpck_require__(7574); +const getEndpointFromConfig = async (serviceId) => (0, node_config_provider_1.loadConfig)((0, getEndpointUrlConfig_1.getEndpointUrlConfig)(serviceId))(); +exports.getEndpointFromConfig = getEndpointFromConfig; + + +/***/ }), + +/***/ 73929: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveParams = exports.getEndpointFromInstructions = void 0; +const service_customizations_1 = __nccwpck_require__(13105); +const createConfigValueProvider_1 = __nccwpck_require__(465); +const getEndpointFromConfig_1 = __nccwpck_require__(31518); +const toEndpointV1_1 = __nccwpck_require__(38938); +const getEndpointFromInstructions = async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + const endpointFromConfig = await (0, getEndpointFromConfig_1.getEndpointFromConfig)(clientConfig.serviceId || ""); + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve((0, toEndpointV1_1.toEndpointV1)(endpointFromConfig)); + } + } + const endpointParams = await (0, exports.resolveParams)(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}; +exports.getEndpointFromInstructions = getEndpointFromInstructions; +const resolveParams = async (commandInput, instructionsSupplier, clientConfig) => { + var _a; + const endpointParams = {}; + const instructions = ((_a = instructionsSupplier === null || instructionsSupplier === void 0 ? void 0 : instructionsSupplier.getEndpointParameterInstructions) === null || _a === void 0 ? void 0 : _a.call(instructionsSupplier)) || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await (0, createConfigValueProvider_1.createConfigValueProvider)(instruction.name, name, clientConfig)(); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await (0, service_customizations_1.resolveParamsForS3)(endpointParams); + } + return endpointParams; +}; +exports.resolveParams = resolveParams; + + +/***/ }), + +/***/ 7574: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointUrlConfig = void 0; +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); +exports.getEndpointUrlConfig = getEndpointUrlConfig; + + +/***/ }), + +/***/ 50890: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(73929), exports); +tslib_1.__exportStar(__nccwpck_require__(38938), exports); + + +/***/ }), + +/***/ 38938: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toEndpointV1 = void 0; +const url_parser_1 = __nccwpck_require__(14681); +const toEndpointV1 = (endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return (0, url_parser_1.parseUrl)(endpoint.url); + } + return endpoint; + } + return (0, url_parser_1.parseUrl)(endpoint); +}; +exports.toEndpointV1 = toEndpointV1; + + +/***/ }), + +/***/ 55520: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.endpointMiddleware = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const getEndpointFromInstructions_1 = __nccwpck_require__(73929); +const endpointMiddleware = ({ config, instructions, }) => { + return (next, context) => async (args) => { + var _a, _b, _c; + const endpoint = await (0, getEndpointFromInstructions_1.getEndpointFromInstructions)(args.input, { + getEndpointParameterInstructions() { + return instructions; + }, + }, { ...config }, context); + context.endpointV2 = endpoint; + context.authSchemes = (_a = endpoint.properties) === null || _a === void 0 ? void 0 : _a.authSchemes; + const authScheme = (_b = context.authSchemes) === null || _b === void 0 ? void 0 : _b[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = (0, util_middleware_1.getSmithyContext)(context); + const httpAuthOption = (_c = smithyContext === null || smithyContext === void 0 ? void 0 : smithyContext.selectedHttpAuthScheme) === null || _c === void 0 ? void 0 : _c.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign(httpAuthOption.signingProperties || {}, { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet, + }, authScheme.properties); + } + } + return next({ + ...args, + }); + }; +}; +exports.endpointMiddleware = endpointMiddleware; + + +/***/ }), + +/***/ 71329: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointPlugin = exports.endpointMiddlewareOptions = void 0; +const middleware_serde_1 = __nccwpck_require__(81238); +const endpointMiddleware_1 = __nccwpck_require__(55520); +exports.endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: middleware_serde_1.serializerMiddlewareOption.name, +}; +const getEndpointPlugin = (config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo((0, endpointMiddleware_1.endpointMiddleware)({ + config, + instructions, + }), exports.endpointMiddlewareOptions); + }, +}); +exports.getEndpointPlugin = getEndpointPlugin; + + +/***/ }), + +/***/ 82918: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(50890), exports); +tslib_1.__exportStar(__nccwpck_require__(55520), exports); +tslib_1.__exportStar(__nccwpck_require__(71329), exports); +tslib_1.__exportStar(__nccwpck_require__(74139), exports); +tslib_1.__exportStar(__nccwpck_require__(39720), exports); + + +/***/ }), + +/***/ 74139: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveEndpointConfig = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const toEndpointV1_1 = __nccwpck_require__(38938); +const resolveEndpointConfig = (input) => { + var _a, _b, _c; + const tls = (_a = input.tls) !== null && _a !== void 0 ? _a : true; + const { endpoint } = input; + const customEndpointProvider = endpoint != null ? async () => (0, toEndpointV1_1.toEndpointV1)(await (0, util_middleware_1.normalizeProvider)(endpoint)()) : undefined; + const isCustomEndpoint = !!endpoint; + return { + ...input, + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: (0, util_middleware_1.normalizeProvider)((_b = input.useDualstackEndpoint) !== null && _b !== void 0 ? _b : false), + useFipsEndpoint: (0, util_middleware_1.normalizeProvider)((_c = input.useFipsEndpoint) !== null && _c !== void 0 ? _c : false), + }; +}; +exports.resolveEndpointConfig = resolveEndpointConfig; + + +/***/ }), + +/***/ 13105: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(19194), exports); + + +/***/ }), + +/***/ 19194: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isArnBucketName = exports.isDnsCompatibleBucketName = exports.S3_HOSTNAME_PATTERN = exports.DOT_PATTERN = exports.resolveParamsForS3 = void 0; +const resolveParamsForS3 = async (endpointParams) => { + const bucket = (endpointParams === null || endpointParams === void 0 ? void 0 : endpointParams.Bucket) || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if ((0, exports.isArnBucketName)(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } + else if (!(0, exports.isDnsCompatibleBucketName)(bucket) || + (bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:")) || + bucket.toLowerCase() !== bucket || + bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}; +exports.resolveParamsForS3 = resolveParamsForS3; +const DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +const IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +const DOTS_PATTERN = /\.\./; +exports.DOT_PATTERN = /\./; +exports.S3_HOSTNAME_PATTERN = /^(.+\.)?s3(-fips)?(\.dualstack)?[.-]([a-z0-9-]+)\./; +const isDnsCompatibleBucketName = (bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName); +exports.isDnsCompatibleBucketName = isDnsCompatibleBucketName; +const isArnBucketName = (bucketName) => { + const [arn, partition, service, region, account, typeOrId] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = [arn, partition, service, account, typeOrId].filter(Boolean).length === 5; + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return arn === "arn" && !!partition && !!service && !!account && !!typeOrId; +}; +exports.isArnBucketName = isArnBucketName; + + +/***/ }), + +/***/ 39720: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 80155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AdaptiveRetryStrategy = void 0; +const util_retry_1 = __nccwpck_require__(84902); +const StandardRetryStrategy_1 = __nccwpck_require__(94582); +class AdaptiveRetryStrategy extends StandardRetryStrategy_1.StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options !== null && options !== void 0 ? options : {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter !== null && rateLimiter !== void 0 ? rateLimiter : new util_retry_1.DefaultRateLimiter(); + this.mode = util_retry_1.RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + }, + }); + } +} +exports.AdaptiveRetryStrategy = AdaptiveRetryStrategy; + + +/***/ }), + +/***/ 94582: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StandardRetryStrategy = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const service_error_classification_1 = __nccwpck_require__(6375); +const util_retry_1 = __nccwpck_require__(84902); +const uuid_1 = __nccwpck_require__(7761); +const defaultRetryQuota_1 = __nccwpck_require__(29991); +const delayDecider_1 = __nccwpck_require__(27233); +const retryDecider_1 = __nccwpck_require__(67653); +const util_1 = __nccwpck_require__(42827); +class StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + var _a, _b, _c; + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = util_retry_1.RETRY_MODES.STANDARD; + this.retryDecider = (_a = options === null || options === void 0 ? void 0 : options.retryDecider) !== null && _a !== void 0 ? _a : retryDecider_1.defaultRetryDecider; + this.delayDecider = (_b = options === null || options === void 0 ? void 0 : options.delayDecider) !== null && _b !== void 0 ? _b : delayDecider_1.defaultDelayDecider; + this.retryQuota = (_c = options === null || options === void 0 ? void 0 : options.retryQuota) !== null && _c !== void 0 ? _c : (0, defaultRetryQuota_1.getDefaultRetryQuota)(util_retry_1.INITIAL_RETRY_TOKENS); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } + catch (error) { + maxAttempts = util_retry_1.DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (protocol_http_1.HttpRequest.isInstance(request)) { + request.headers[util_retry_1.INVOCATION_ID_HEADER] = (0, uuid_1.v4)(); + } + while (true) { + try { + if (protocol_http_1.HttpRequest.isInstance(request)) { + request.headers[util_retry_1.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options === null || options === void 0 ? void 0 : options.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options === null || options === void 0 ? void 0 : options.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } + catch (e) { + const err = (0, util_1.asSdkError)(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider((0, service_error_classification_1.isThrottlingError)(err) ? util_retry_1.THROTTLING_RETRY_DELAY_BASE : util_retry_1.DEFAULT_RETRY_DELAY_BASE, attempts); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +} +exports.StandardRetryStrategy = StandardRetryStrategy; +const getDelayFromRetryAfterHeader = (response) => { + if (!protocol_http_1.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1000; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}; + + +/***/ }), + +/***/ 58709: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_RETRY_MODE_CONFIG_OPTIONS = exports.CONFIG_RETRY_MODE = exports.ENV_RETRY_MODE = exports.resolveRetryConfig = exports.NODE_MAX_ATTEMPT_CONFIG_OPTIONS = exports.CONFIG_MAX_ATTEMPTS = exports.ENV_MAX_ATTEMPTS = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const util_retry_1 = __nccwpck_require__(84902); +exports.ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +exports.CONFIG_MAX_ATTEMPTS = "max_attempts"; +exports.NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[exports.ENV_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${exports.ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[exports.CONFIG_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${exports.CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: util_retry_1.DEFAULT_MAX_ATTEMPTS, +}; +const resolveRetryConfig = (input) => { + var _a; + const { retryStrategy } = input; + const maxAttempts = (0, util_middleware_1.normalizeProvider)((_a = input.maxAttempts) !== null && _a !== void 0 ? _a : util_retry_1.DEFAULT_MAX_ATTEMPTS); + return { + ...input, + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await (0, util_middleware_1.normalizeProvider)(input.retryMode)(); + if (retryMode === util_retry_1.RETRY_MODES.ADAPTIVE) { + return new util_retry_1.AdaptiveRetryStrategy(maxAttempts); + } + return new util_retry_1.StandardRetryStrategy(maxAttempts); + }, + }; +}; +exports.resolveRetryConfig = resolveRetryConfig; +exports.ENV_RETRY_MODE = "AWS_RETRY_MODE"; +exports.CONFIG_RETRY_MODE = "retry_mode"; +exports.NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[exports.ENV_RETRY_MODE], + configFileSelector: (profile) => profile[exports.CONFIG_RETRY_MODE], + default: util_retry_1.DEFAULT_RETRY_MODE, +}; + + +/***/ }), + +/***/ 29991: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDefaultRetryQuota = void 0; +const util_retry_1 = __nccwpck_require__(84902); +const getDefaultRetryQuota = (initialRetryTokens, options) => { + var _a, _b, _c; + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = (_a = options === null || options === void 0 ? void 0 : options.noRetryIncrement) !== null && _a !== void 0 ? _a : util_retry_1.NO_RETRY_INCREMENT; + const retryCost = (_b = options === null || options === void 0 ? void 0 : options.retryCost) !== null && _b !== void 0 ? _b : util_retry_1.RETRY_COST; + const timeoutRetryCost = (_c = options === null || options === void 0 ? void 0 : options.timeoutRetryCost) !== null && _c !== void 0 ? _c : util_retry_1.TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = (error) => (error.name === "TimeoutError" ? timeoutRetryCost : retryCost); + const hasRetryTokens = (error) => getCapacityAmount(error) <= availableCapacity; + const retrieveRetryTokens = (error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }; + const releaseRetryTokens = (capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount !== null && capacityReleaseAmount !== void 0 ? capacityReleaseAmount : noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }; + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens, + }); +}; +exports.getDefaultRetryQuota = getDefaultRetryQuota; + + +/***/ }), + +/***/ 27233: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultDelayDecider = void 0; +const util_retry_1 = __nccwpck_require__(84902); +const defaultDelayDecider = (delayBase, attempts) => Math.floor(Math.min(util_retry_1.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); +exports.defaultDelayDecider = defaultDelayDecider; + + +/***/ }), + +/***/ 96039: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(80155), exports); +tslib_1.__exportStar(__nccwpck_require__(94582), exports); +tslib_1.__exportStar(__nccwpck_require__(58709), exports); +tslib_1.__exportStar(__nccwpck_require__(27233), exports); +tslib_1.__exportStar(__nccwpck_require__(76556), exports); +tslib_1.__exportStar(__nccwpck_require__(67653), exports); +tslib_1.__exportStar(__nccwpck_require__(81434), exports); + + +/***/ }), + +/***/ 18977: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isStreamingPayload = void 0; +const stream_1 = __nccwpck_require__(12781); +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof stream_1.Readable || + (typeof ReadableStream !== "undefined" && (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream); +exports.isStreamingPayload = isStreamingPayload; + + +/***/ }), + +/***/ 76556: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOmitRetryHeadersPlugin = exports.omitRetryHeadersMiddlewareOptions = exports.omitRetryHeadersMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const util_retry_1 = __nccwpck_require__(84902); +const omitRetryHeadersMiddleware = () => (next) => async (args) => { + const { request } = args; + if (protocol_http_1.HttpRequest.isInstance(request)) { + delete request.headers[util_retry_1.INVOCATION_ID_HEADER]; + delete request.headers[util_retry_1.REQUEST_HEADER]; + } + return next(args); +}; +exports.omitRetryHeadersMiddleware = omitRetryHeadersMiddleware; +exports.omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true, +}; +const getOmitRetryHeadersPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo((0, exports.omitRetryHeadersMiddleware)(), exports.omitRetryHeadersMiddlewareOptions); + }, +}); +exports.getOmitRetryHeadersPlugin = getOmitRetryHeadersPlugin; + + +/***/ }), + +/***/ 67653: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultRetryDecider = void 0; +const service_error_classification_1 = __nccwpck_require__(6375); +const defaultRetryDecider = (error) => { + if (!error) { + return false; + } + return (0, service_error_classification_1.isRetryableByTrait)(error) || (0, service_error_classification_1.isClockSkewError)(error) || (0, service_error_classification_1.isThrottlingError)(error) || (0, service_error_classification_1.isTransientError)(error); +}; +exports.defaultRetryDecider = defaultRetryDecider; + + +/***/ }), + +/***/ 81434: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRetryAfterHint = exports.getRetryPlugin = exports.retryMiddlewareOptions = exports.retryMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const service_error_classification_1 = __nccwpck_require__(6375); +const smithy_client_1 = __nccwpck_require__(63570); +const util_retry_1 = __nccwpck_require__(84902); +const uuid_1 = __nccwpck_require__(7761); +const isStreamingPayload_1 = __nccwpck_require__(18977); +const util_1 = __nccwpck_require__(42827); +const retryMiddleware = (options) => (next, context) => async (args) => { + var _a; + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = protocol_http_1.HttpRequest.isInstance(request); + if (isRequest) { + request.headers[util_retry_1.INVOCATION_ID_HEADER] = (0, uuid_1.v4)(); + } + while (true) { + try { + if (isRequest) { + request.headers[util_retry_1.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } + catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = (0, util_1.asSdkError)(e); + if (isRequest && (0, isStreamingPayload_1.isStreamingPayload)(request)) { + (_a = (context.logger instanceof smithy_client_1.NoOpLogger ? console : context.logger)) === null || _a === void 0 ? void 0 : _a.warn("An error was encountered in a non-retryable streaming request."); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } + catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } + else { + retryStrategy = retryStrategy; + if (retryStrategy === null || retryStrategy === void 0 ? void 0 : retryStrategy.mode) + context.userAgent = [...(context.userAgent || []), ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}; +exports.retryMiddleware = retryMiddleware; +const isRetryStrategyV2 = (retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && + typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && + typeof retryStrategy.recordSuccess !== "undefined"; +const getRetryErrorInfo = (error) => { + const errorInfo = { + errorType: getRetryErrorType(error), + }; + const retryAfterHint = (0, exports.getRetryAfterHint)(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}; +const getRetryErrorType = (error) => { + if ((0, service_error_classification_1.isThrottlingError)(error)) + return "THROTTLING"; + if ((0, service_error_classification_1.isTransientError)(error)) + return "TRANSIENT"; + if ((0, service_error_classification_1.isServerError)(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}; +exports.retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true, +}; +const getRetryPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.retryMiddleware)(options), exports.retryMiddlewareOptions); + }, +}); +exports.getRetryPlugin = getRetryPlugin; +const getRetryAfterHint = (response) => { + if (!protocol_http_1.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1000); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}; +exports.getRetryAfterHint = getRetryAfterHint; + + +/***/ }), + +/***/ 42827: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.asSdkError = void 0; +const asSdkError = (error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}; +exports.asSdkError = asSdkError; + + +/***/ }), + +/***/ 7761: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(36310)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(9465)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(86001)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(38310)); + +var _nil = _interopRequireDefault(__nccwpck_require__(3436)); + +var _version = _interopRequireDefault(__nccwpck_require__(17780)); + +var _validate = _interopRequireDefault(__nccwpck_require__(66992)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(79618)); + +var _parse = _interopRequireDefault(__nccwpck_require__(40086)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 11380: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 3436: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 40086: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(66992)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 3194: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 68136: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 46679: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 79618: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(66992)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 36310: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(68136)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(79618)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 9465: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2568)); + +var _md = _interopRequireDefault(__nccwpck_require__(11380)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 2568: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(79618)); + +var _parse = _interopRequireDefault(__nccwpck_require__(40086)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; } - ], - setLfsPreference: [ - "PATCH /repos/{owner}/{repo}/import/lfs", - {}, - { - deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 86001: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(68136)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(79618)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 38310: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2568)); + +var _sha = _interopRequireDefault(__nccwpck_require__(46679)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 66992: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(3194)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 17780: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(66992)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 21595: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deserializerMiddleware = void 0; +const deserializerMiddleware = (options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed, + }; + } + catch (error) { + Object.defineProperty(error, "$response", { + value: response, + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + error.message += "\n " + hint; + } + throw error; + } +}; +exports.deserializerMiddleware = deserializerMiddleware; + + +/***/ }), + +/***/ 81238: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(21595), exports); +tslib_1.__exportStar(__nccwpck_require__(72338), exports); +tslib_1.__exportStar(__nccwpck_require__(23566), exports); + + +/***/ }), + +/***/ 72338: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSerdePlugin = exports.serializerMiddlewareOption = exports.deserializerMiddlewareOption = void 0; +const deserializerMiddleware_1 = __nccwpck_require__(21595); +const serializerMiddleware_1 = __nccwpck_require__(23566); +exports.deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true, +}; +exports.serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true, +}; +function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add((0, deserializerMiddleware_1.deserializerMiddleware)(config, deserializer), exports.deserializerMiddlewareOption); + commandStack.add((0, serializerMiddleware_1.serializerMiddleware)(config, serializer), exports.serializerMiddlewareOption); + }, + }; +} +exports.getSerdePlugin = getSerdePlugin; + + +/***/ }), + +/***/ 23566: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.serializerMiddleware = void 0; +const serializerMiddleware = (options, serializer) => (next, context) => async (args) => { + var _a; + const endpoint = ((_a = context.endpointV2) === null || _a === void 0 ? void 0 : _a.url) && options.urlParser + ? async () => options.urlParser(context.endpointV2.url) + : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request, + }); +}; +exports.serializerMiddleware = serializerMiddleware; + + +/***/ }), + +/***/ 2404: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.constructStack = void 0; +const getAllAliases = (name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}; +const getMiddlewareNameWithAliases = (name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}; +const constructStack = () => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = new Set(); + const sort = (entries) => entries.sort((a, b) => stepWeights[b.step] - stepWeights[a.step] || + priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"]); + const removeByName = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const removeByReference = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const cloneTo = (toStack) => { + var _a; + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + (_a = toStack.identifyOnResolve) === null || _a === void 0 ? void 0 : _a.call(toStack, stack.identifyOnResolve()); + return toStack; + }; + const expandRelativeMiddlewareList = (from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }; + const getMiddlewareList = (debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === undefined) { + if (debug) { + return; + } + throw new Error(`${entry.toMiddleware} is not found when adding ` + + `${getMiddlewareNameWithAliases(entry.name, entry.aliases)} ` + + `middleware ${entry.relation} ${entry.toMiddleware}`); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries) + .map(expandRelativeMiddlewareList) + .reduce((wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, []); + return mainChain; + }; + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options, + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex((entry) => { var _a; return entry.name === alias || ((_a = entry.aliases) === null || _a === void 0 ? void 0 : _a.some((a) => a === alias)); }); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ` + + `${toOverride.priority} priority in ${toOverride.step} step cannot ` + + `be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ` + + `${entry.priority} priority in ${entry.step} step.`); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options, + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex((entry) => { var _a; return entry.name === alias || ((_a = entry.aliases) === null || _a === void 0 ? void 0 : _a.some((a) => a === alias)); }); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ` + + `${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden ` + + `by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} ` + + `"${entry.toMiddleware}" middleware.`); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo((0, exports.constructStack)()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + var _a, _b; + const cloned = cloneTo((0, exports.constructStack)()); + cloned.use(from); + cloned.identifyOnResolve(identifyOnResolve || cloned.identifyOnResolve() || ((_b = (_a = from.identifyOnResolve) === null || _a === void 0 ? void 0 : _a.call(from)) !== null && _b !== void 0 ? _b : false)); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + var _a; + const step = (_a = mw.step) !== null && _a !== void 0 ? _a : mw.relation + + " " + + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList() + .map((entry) => entry.middleware) + .reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + }, + }; + return stack; +}; +exports.constructStack = constructStack; +const stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1, +}; +const priorityWeights = { + high: 3, + normal: 2, + low: 1, +}; + + +/***/ }), + +/***/ 97911: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(2404), exports); + + +/***/ }), + +/***/ 54766: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadConfig = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const fromEnv_1 = __nccwpck_require__(15606); +const fromSharedConfigFiles_1 = __nccwpck_require__(45784); +const fromStatic_1 = __nccwpck_require__(23091); +const loadConfig = ({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, property_provider_1.memoize)((0, property_provider_1.chain)((0, fromEnv_1.fromEnv)(environmentVariableSelector), (0, fromSharedConfigFiles_1.fromSharedConfigFiles)(configFileSelector, configuration), (0, fromStatic_1.fromStatic)(defaultValue))); +exports.loadConfig = loadConfig; + + +/***/ }), + +/***/ 15606: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromEnv = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const fromEnv = (envVarSelector) => async () => { + try { + const config = envVarSelector(process.env); + if (config === undefined) { + throw new Error(); + } + return config; + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(e.message || `Cannot load config from environment variables with getter: ${envVarSelector}`); + } +}; +exports.fromEnv = fromEnv; + + +/***/ }), + +/***/ 45784: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromSharedConfigFiles = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const fromSharedConfigFiles = (configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = (0, shared_ini_file_loader_1.getProfileName)(init); + const { configFile, credentialsFile } = await (0, shared_ini_file_loader_1.loadSharedConfigFiles)(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" + ? { ...profileFromCredentials, ...profileFromConfig } + : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === undefined) { + throw new Error(); + } + return configValue; + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(e.message || `Cannot load config for profile ${profile} in SDK configuration files with getter: ${configSelector}`); + } +}; +exports.fromSharedConfigFiles = fromSharedConfigFiles; + + +/***/ }), + +/***/ 23091: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromStatic = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const isFunction = (func) => typeof func === "function"; +const fromStatic = (defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, property_provider_1.fromStatic)(defaultValue); +exports.fromStatic = fromStatic; + + +/***/ }), + +/***/ 33461: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(54766), exports); + + +/***/ }), + +/***/ 33946: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODEJS_TIMEOUT_ERROR_CODES = void 0; +exports.NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; + + +/***/ }), + +/***/ 70508: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getTransformedHeaders = void 0; +const getTransformedHeaders = (headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}; +exports.getTransformedHeaders = getTransformedHeaders; + + +/***/ }), + +/***/ 20258: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(96948), exports); +tslib_1.__exportStar(__nccwpck_require__(46999), exports); +tslib_1.__exportStar(__nccwpck_require__(81030), exports); + + +/***/ }), + +/***/ 96948: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttpHandler = exports.DEFAULT_REQUEST_TIMEOUT = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const querystring_builder_1 = __nccwpck_require__(68031); +const http_1 = __nccwpck_require__(13685); +const https_1 = __nccwpck_require__(95687); +const constants_1 = __nccwpck_require__(33946); +const get_transformed_headers_1 = __nccwpck_require__(70508); +const set_connection_timeout_1 = __nccwpck_require__(25545); +const set_socket_keep_alive_1 = __nccwpck_require__(83751); +const set_socket_timeout_1 = __nccwpck_require__(42618); +const write_request_body_1 = __nccwpck_require__(73766); +exports.DEFAULT_REQUEST_TIMEOUT = 0; +class NodeHttpHandler { + static create(instanceOrOptions) { + if (typeof (instanceOrOptions === null || instanceOrOptions === void 0 ? void 0 : instanceOrOptions.handle) === "function") { + return instanceOrOptions; + } + return new NodeHttpHandler(instanceOrOptions); + } + constructor(options) { + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }) + .catch(reject); + } + else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout !== null && requestTimeout !== void 0 ? requestTimeout : socketTimeout, + httpAgent: httpAgent || new http_1.Agent({ keepAlive, maxSockets }), + httpsAgent: httpsAgent || new https_1.Agent({ keepAlive, maxSockets }), + }; + } + destroy() { + var _a, _b, _c, _d; + (_b = (_a = this.config) === null || _a === void 0 ? void 0 : _a.httpAgent) === null || _b === void 0 ? void 0 : _b.destroy(); + (_d = (_c = this.config) === null || _c === void 0 ? void 0 : _c.httpsAgent) === null || _d === void 0 ? void 0 : _d.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + var _a, _b; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const queryString = (0, querystring_builder_1.buildQueryString)(request.query || {}); + let auth = undefined; + if (request.username != null || request.password != null) { + const username = (_a = request.username) !== null && _a !== void 0 ? _a : ""; + const password = (_b = request.password) !== null && _b !== void 0 ? _b : ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const nodeHttpsOptions = { + headers: request.headers, + host: request.hostname, + method: request.method, + path, + port: request.port, + agent: isSSL ? this.config.httpsAgent : this.config.httpAgent, + auth, + }; + const requestFunc = isSSL ? https_1.request : http_1.request; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new protocol_http_1.HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: (0, get_transformed_headers_1.getTransformedHeaders)(res.headers), + body: res, + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (constants_1.NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } + else { + reject(err); + } + }); + (0, set_connection_timeout_1.setConnectionTimeout)(req, reject, this.config.connectionTimeout); + (0, set_socket_timeout_1.setSocketTimeout)(req, reject, this.config.requestTimeout); + if (abortSignal) { + abortSignal.onabort = () => { + req.abort(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + } + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + (0, set_socket_keep_alive_1.setSocketKeepAlive)(req, { + keepAlive: httpAgent.keepAlive, + keepAliveMsecs: httpAgent.keepAliveMsecs, + }); + } + writeRequestBodyPromise = (0, write_request_body_1.writeRequestBody)(req, request, this.config.requestTimeout).catch(_reject); + }); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value, + }; + }); + } + httpHandlerConfigs() { + var _a; + return (_a = this.config) !== null && _a !== void 0 ? _a : {}; + } +} +exports.NodeHttpHandler = NodeHttpHandler; + + +/***/ }), + +/***/ 5771: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2ConnectionManager = void 0; +const tslib_1 = __nccwpck_require__(4351); +const http2_1 = tslib_1.__importDefault(__nccwpck_require__(85158)); +const node_http2_connection_pool_1 = __nccwpck_require__(95157); +class NodeHttp2ConnectionManager { + constructor(config) { + this.sessionCache = new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = http2_1.default.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error("Fail to set maxConcurrentStreams to " + + this.config.maxConcurrency + + "when creating new session for " + + requestContext.destination.toString()); + } + }); + } + session.unref(); + const destroySessionCb = () => { + session.destroy(); + this.deleteSession(url, session); + }; + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new node_http2_connection_pool_1.NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + var _a; + const cacheKey = this.getUrlString(requestContext); + (_a = this.sessionCache.get(cacheKey)) === null || _a === void 0 ? void 0 : _a.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +} +exports.NodeHttp2ConnectionManager = NodeHttp2ConnectionManager; + + +/***/ }), + +/***/ 95157: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2ConnectionPool = void 0; +class NodeHttp2ConnectionPool { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions !== null && sessions !== void 0 ? sessions : []; + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +} +exports.NodeHttp2ConnectionPool = NodeHttp2ConnectionPool; + + +/***/ }), + +/***/ 46999: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2Handler = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const querystring_builder_1 = __nccwpck_require__(68031); +const http2_1 = __nccwpck_require__(85158); +const get_transformed_headers_1 = __nccwpck_require__(70508); +const node_http2_connection_manager_1 = __nccwpck_require__(5771); +const write_request_body_1 = __nccwpck_require__(73766); +class NodeHttp2Handler { + static create(instanceOrOptions) { + if (typeof (instanceOrOptions === null || instanceOrOptions === void 0 ? void 0 : instanceOrOptions.handle) === "function") { + return instanceOrOptions; + } + return new NodeHttp2Handler(instanceOrOptions); + } + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new node_http2_connection_manager_1.NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((opts) => { + resolve(opts || {}); + }) + .catch(reject); + } + else { + resolve(options || {}); + } + }); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + var _a, _b, _c; + let fulfilled = false; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = (_a = request.username) !== null && _a !== void 0 ? _a : ""; + const password = (_b = request.password) !== null && _b !== void 0 ? _b : ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: (_c = this.config) === null || _c === void 0 ? void 0 : _c.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false, + }); + const rejectWithDestroy = (err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }; + const queryString = (0, querystring_builder_1.buildQueryString)(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [http2_1.constants.HTTP2_HEADER_PATH]: path, + [http2_1.constants.HTTP2_HEADER_METHOD]: method, + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new protocol_http_1.HttpResponse({ + statusCode: headers[":status"] || -1, + headers: (0, get_transformed_headers_1.getTransformedHeaders)(headers), + body: req, + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + abortSignal.onabort = () => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }; + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy(new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`)); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = (0, write_request_body_1.writeRequestBody)(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value, + }; + }); + } + httpHandlerConfigs() { + var _a; + return (_a = this.config) !== null && _a !== void 0 ? _a : {}; + } + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +} +exports.NodeHttp2Handler = NodeHttp2Handler; + + +/***/ }), + +/***/ 25545: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setConnectionTimeout = void 0; +const setConnectionTimeout = (request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return; + } + const timeoutId = setTimeout(() => { + request.destroy(); + reject(Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError", + })); + }, timeoutInMs); + request.on("socket", (socket) => { + if (socket.connecting) { + socket.on("connect", () => { + clearTimeout(timeoutId); + }); + } + else { + clearTimeout(timeoutId); + } + }); +}; +exports.setConnectionTimeout = setConnectionTimeout; + + +/***/ }), + +/***/ 83751: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setSocketKeepAlive = void 0; +const setSocketKeepAlive = (request, { keepAlive, keepAliveMsecs }) => { + if (keepAlive !== true) { + return; + } + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); +}; +exports.setSocketKeepAlive = setSocketKeepAlive; + + +/***/ }), + +/***/ 42618: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setSocketTimeout = void 0; +const setSocketTimeout = (request, reject, timeoutInMs = 0) => { + request.setTimeout(timeoutInMs, () => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }); +}; +exports.setSocketTimeout = setSocketTimeout; + + +/***/ }), + +/***/ 23211: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Collector = void 0; +const stream_1 = __nccwpck_require__(12781); +class Collector extends stream_1.Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +} +exports.Collector = Collector; + + +/***/ }), + +/***/ 81030: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.streamCollector = void 0; +const collector_1 = __nccwpck_require__(23211); +const streamCollector = (stream) => new Promise((resolve, reject) => { + const collector = new collector_1.Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); +}); +exports.streamCollector = streamCollector; + + +/***/ }), + +/***/ 73766: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.writeRequestBody = void 0; +const stream_1 = __nccwpck_require__(12781); +const MIN_WAIT_TIME = 1000; +async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + var _a; + const headers = (_a = request.headers) !== null && _a !== void 0 ? _a : {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let hasError = false; + if (expect === "100-continue") { + await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(setTimeout(resolve, Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + clearTimeout(timeoutId); + resolve(); + }); + httpRequest.on("error", () => { + hasError = true; + clearTimeout(timeoutId); + resolve(); + }); + }), + ]); + } + if (!hasError) { + writeBody(httpRequest, request.body); + } +} +exports.writeRequestBody = writeRequestBody; +function writeBody(httpRequest, body) { + if (body instanceof stream_1.Readable) { + body.pipe(httpRequest); + } + else if (body) { + httpRequest.end(Buffer.from(body)); + } + else { + httpRequest.end(); + } +} + + +/***/ }), + +/***/ 79721: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CredentialsProviderError: () => CredentialsProviderError, + ProviderError: () => ProviderError, + TokenProviderError: () => TokenProviderError, + chain: () => chain, + fromStatic: () => fromStatic, + memoize: () => memoize +}); +module.exports = __toCommonJS(src_exports); + +// src/ProviderError.ts +var _ProviderError = class _ProviderError extends Error { + constructor(message, tryNextLink = true) { + super(message); + this.tryNextLink = tryNextLink; + this.name = "ProviderError"; + Object.setPrototypeOf(this, _ProviderError.prototype); + } + static from(error, tryNextLink = true) { + return Object.assign(new this(error.message, tryNextLink), error); + } +}; +__name(_ProviderError, "ProviderError"); +var ProviderError = _ProviderError; + +// src/CredentialsProviderError.ts +var _CredentialsProviderError = class _CredentialsProviderError extends ProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, _CredentialsProviderError.prototype); + } +}; +__name(_CredentialsProviderError, "CredentialsProviderError"); +var CredentialsProviderError = _CredentialsProviderError; + +// src/TokenProviderError.ts +var _TokenProviderError = class _TokenProviderError extends ProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, _TokenProviderError.prototype); + } +}; +__name(_TokenProviderError, "TokenProviderError"); +var TokenProviderError = _TokenProviderError; + +// src/chain.ts +var chain = /* @__PURE__ */ __name((...providers) => async () => { + if (providers.length === 0) { + throw new ProviderError("No providers in chain"); + } + let lastProviderError; + for (const provider of providers) { + try { + const credentials = await provider(); + return credentials; + } catch (err) { + lastProviderError = err; + if (err == null ? void 0 : err.tryNextLink) { + continue; } - ], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: [ - "PUT /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + throw err; + } + } + throw lastProviderError; +}, "chain"); + +// src/fromStatic.ts +var fromStatic = /* @__PURE__ */ __name((staticValue) => () => Promise.resolve(staticValue), "fromStatic"); + +// src/memoize.ts +var memoize = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || (options == null ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(); } - ], - unlockRepoForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" - ], - unlockRepoForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" - ], - updateImport: [ - "PATCH /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + return resolved; + }; + } + return async (options) => { + if (!hasResult || (options == null ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}, "memoize"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 89179: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Field = void 0; +const types_1 = __nccwpck_require__(55756); +class Field { + constructor({ name, kind = types_1.FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + add(value) { + this.values.push(value); + } + set(values) { + this.values = values; + } + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + toString() { + return this.values.map((v) => (v.includes(",") || v.includes(" ") ? `"${v}"` : v)).join(", "); + } + get() { + return this.values; + } +} +exports.Field = Field; + + +/***/ }), + +/***/ 99242: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Fields = void 0; +class Fields { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + getField(name) { + return this.entries[name.toLowerCase()]; + } + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +} +exports.Fields = Fields; + + +/***/ }), + +/***/ 22474: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveHttpHandlerRuntimeConfig = exports.getHttpHandlerExtensionConfiguration = void 0; +const getHttpHandlerExtensionConfiguration = (runtimeConfig) => { + let httpHandler = runtimeConfig.httpHandler; + return { + setHttpHandler(handler) { + httpHandler = handler; + }, + httpHandler() { + return httpHandler; + }, + updateHttpClientConfig(key, value) { + httpHandler.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return httpHandler.httpHandlerConfigs(); + }, + }; +}; +exports.getHttpHandlerExtensionConfiguration = getHttpHandlerExtensionConfiguration; +const resolveHttpHandlerRuntimeConfig = (httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler(), + }; +}; +exports.resolveHttpHandlerRuntimeConfig = resolveHttpHandlerRuntimeConfig; + + +/***/ }), + +/***/ 91654: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(22474), exports); + + +/***/ }), + +/***/ 63206: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 38746: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpRequest = void 0; +class HttpRequest { + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol + ? options.protocol.slice(-1) !== ":" + ? `${options.protocol}:` + : options.protocol + : "https:"; + this.path = options.path ? (options.path.charAt(0) !== "/" ? `/${options.path}` : options.path) : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + static isInstance(request) { + if (!request) + return false; + const req = request; + return ("method" in req && + "protocol" in req && + "hostname" in req && + "path" in req && + typeof req["query"] === "object" && + typeof req["headers"] === "object"); + } + clone() { + const cloned = new HttpRequest({ + ...this, + headers: { ...this.headers }, + }); + if (cloned.query) + cloned.query = cloneQuery(cloned.query); + return cloned; + } +} +exports.HttpRequest = HttpRequest; +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; + }, {}); +} + + +/***/ }), + +/***/ 26322: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpResponse = void 0; +class HttpResponse { + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +} +exports.HttpResponse = HttpResponse; + + +/***/ }), + +/***/ 64418: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(91654), exports); +tslib_1.__exportStar(__nccwpck_require__(89179), exports); +tslib_1.__exportStar(__nccwpck_require__(99242), exports); +tslib_1.__exportStar(__nccwpck_require__(63206), exports); +tslib_1.__exportStar(__nccwpck_require__(38746), exports); +tslib_1.__exportStar(__nccwpck_require__(26322), exports); +tslib_1.__exportStar(__nccwpck_require__(61466), exports); +tslib_1.__exportStar(__nccwpck_require__(19135), exports); + + +/***/ }), + +/***/ 61466: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isValidHostname = void 0; +function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} +exports.isValidHostname = isValidHostname; + + +/***/ }), + +/***/ 19135: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 68031: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + buildQueryString: () => buildQueryString +}); +module.exports = __toCommonJS(src_exports); +var import_util_uri_escape = __nccwpck_require__(54197); +function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = (0, import_util_uri_escape.escapeUri)(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${(0, import_util_uri_escape.escapeUri)(value[i])}`); } - ] - }, - orgs: { - addSecurityManagerTeam: [ - "PUT /orgs/{org}/security-managers/teams/{team_slug}" - ], - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: [ - "PUT /orgs/{org}/outside_collaborators/{username}" - ], - createInvitation: ["POST /orgs/{org}/invitations"], - createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], - createOrUpdateCustomPropertiesValuesForRepos: [ - "PATCH /orgs/{org}/properties/values" - ], - createOrUpdateCustomProperty: [ - "PUT /orgs/{org}/properties/schema/{custom_property_name}" - ], - createWebhook: ["POST /orgs/{org}/hooks"], - delete: ["DELETE /orgs/{org}"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - enableOrDisableSecurityProductOnAllOrgRepos: [ - "POST /orgs/{org}/{security_product}/{enablement}" - ], - get: ["GET /orgs/{org}"], - getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], - getCustomProperty: [ - "GET /orgs/{org}/properties/schema/{custom_property_name}" - ], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: [ - "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" - ], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPatGrantRepositories: [ - "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" - ], - listPatGrantRequestRepositories: [ - "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" - ], - listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], - listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], - listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: [ - "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" - ], - removeCustomProperty: [ - "DELETE /orgs/{org}/properties/schema/{custom_property_name}" - ], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: [ - "DELETE /orgs/{org}/outside_collaborators/{username}" - ], - removePublicMembershipForAuthenticatedUser: [ - "DELETE /orgs/{org}/public_members/{username}" - ], - removeSecurityManagerTeam: [ - "DELETE /orgs/{org}/security-managers/teams/{team_slug}" - ], - reviewPatGrantRequest: [ - "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" - ], - reviewPatGrantRequestsInBulk: [ - "POST /orgs/{org}/personal-access-token-requests" - ], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: [ - "PUT /orgs/{org}/public_members/{username}" - ], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: [ - "PATCH /user/memberships/orgs/{org}" - ], - updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], - updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] - }, - packages: { - deletePackageForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}" - ], - deletePackageForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}" - ], - deletePackageForUser: [ - "DELETE /users/{username}/packages/{package_type}/{package_name}" - ], - deletePackageVersionForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - deletePackageVersionForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - deletePackageVersionForUser: [ - "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getAllPackageVersionsForAPackageOwnedByAnOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - {}, - { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } - ], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions", - {}, - { - renamed: [ - "packages", - "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" - ] + } else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${(0, import_util_uri_escape.escapeUri)(value)}`; } - ], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions" - ], - getAllPackageVersionsForPackageOwnedByOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" - ], - getAllPackageVersionsForPackageOwnedByUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions" - ], - getPackageForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}" - ], - getPackageForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}" - ], - getPackageForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}" - ], - getPackageVersionForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getPackageVersionForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getPackageVersionForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - listDockerMigrationConflictingPackagesForAuthenticatedUser: [ - "GET /user/docker/conflicts" - ], - listDockerMigrationConflictingPackagesForOrganization: [ - "GET /orgs/{org}/docker/conflicts" - ], - listDockerMigrationConflictingPackagesForUser: [ - "GET /users/{username}/docker/conflicts" - ], - listPackagesForAuthenticatedUser: ["GET /user/packages"], - listPackagesForOrganization: ["GET /orgs/{org}/packages"], - listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageForUser: [ - "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageVersionForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ], - restorePackageVersionForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ], - restorePackageVersionForUser: [ - "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ] - }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], - createCard: ["POST /projects/columns/{column_id}/cards"], - createColumn: ["POST /projects/{project_id}/columns"], - createForAuthenticatedUser: ["POST /user/projects"], - createForOrg: ["POST /orgs/{org}/projects"], - createForRepo: ["POST /repos/{owner}/{repo}/projects"], - delete: ["DELETE /projects/{project_id}"], - deleteCard: ["DELETE /projects/columns/cards/{card_id}"], - deleteColumn: ["DELETE /projects/columns/{column_id}"], - get: ["GET /projects/{project_id}"], - getCard: ["GET /projects/columns/cards/{card_id}"], - getColumn: ["GET /projects/columns/{column_id}"], - getPermissionForUser: [ - "GET /projects/{project_id}/collaborators/{username}/permission" - ], - listCards: ["GET /projects/columns/{column_id}/cards"], - listCollaborators: ["GET /projects/{project_id}/collaborators"], - listColumns: ["GET /projects/{project_id}/columns"], - listForOrg: ["GET /orgs/{org}/projects"], - listForRepo: ["GET /repos/{owner}/{repo}/projects"], - listForUser: ["GET /users/{username}/projects"], - moveCard: ["POST /projects/columns/cards/{card_id}/moves"], - moveColumn: ["POST /projects/columns/{column_id}/moves"], - removeCollaborator: [ - "DELETE /projects/{project_id}/collaborators/{username}" - ], - update: ["PATCH /projects/{project_id}"], - updateCard: ["PATCH /projects/columns/cards/{card_id}"], - updateColumn: ["PATCH /projects/columns/{column_id}"] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" - ], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" - ], - deletePendingReview: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - deleteReviewComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" - ], - dismissReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" - ], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" - ], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - listReviewComments: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" - ], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - requestReviewers: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - submitReview: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" - ], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" - ], - updateReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - updateReviewComment: [ - "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" - ] - }, - rateLimit: { get: ["GET /rate_limit"] }, - reactions: { - createForCommitComment: [ - "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" - ], - createForIssue: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" - ], - createForIssueComment: [ - "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" - ], - createForPullRequestReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" - ], - createForRelease: [ - "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" - ], - createForTeamDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" - ], - createForTeamDiscussionInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" - ], - deleteForCommitComment: [ - "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForIssue: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" - ], - deleteForIssueComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForPullRequestComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForRelease: [ - "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" - ], - deleteForTeamDiscussion: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" - ], - deleteForTeamDiscussionComment: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" - ], - listForCommitComment: [ - "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" - ], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: [ - "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" - ], - listForPullRequestReviewComment: [ - "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" - ], - listForRelease: [ - "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" - ], - listForTeamDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" - ], - listForTeamDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" - ] - }, - repos: { - acceptInvitation: [ - "PATCH /user/repository_invitations/{invitation_id}", - {}, - { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } - ], - acceptInvitationForAuthenticatedUser: [ - "PATCH /user/repository_invitations/{invitation_id}" - ], - addAppAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - addTeamAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - addUserAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - checkAutomatedSecurityFixes: [ - "GET /repos/{owner}/{repo}/automated-security-fixes" - ], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: [ - "GET /repos/{owner}/{repo}/vulnerability-alerts" - ], - codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: [ - "GET /repos/{owner}/{repo}/compare/{basehead}" - ], - createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: [ - "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" - ], - createCommitSignatureProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentBranchPolicy: [ - "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" - ], - createDeploymentProtectionRule: [ - "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" - ], - createDeploymentStatus: [ - "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" - ], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}" - ], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createOrgRuleset: ["POST /orgs/{org}/rulesets"], - createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages"], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], - createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], - createUsingTemplate: [ - "POST /repos/{template_owner}/{template_repo}/generate" - ], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: [ - "DELETE /user/repository_invitations/{invitation_id}", - {}, - { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } - ], - declineInvitationForAuthenticatedUser: [ - "DELETE /user/repository_invitations/{invitation_id}" - ], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" - ], - deleteAdminBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - deleteAnEnvironment: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}" - ], - deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" - ], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: [ - "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" - ], - deleteDeploymentBranchPolicy: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: [ - "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" - ], - deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: [ - "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" - ], - deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - deleteTagProtection: [ - "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" - ], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: [ - "DELETE /repos/{owner}/{repo}/automated-security-fixes" - ], - disableDeploymentProtectionRule: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" - ], - disablePrivateVulnerabilityReporting: [ - "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" - ], - disableVulnerabilityAlerts: [ - "DELETE /repos/{owner}/{repo}/vulnerability-alerts" - ], - downloadArchive: [ - "GET /repos/{owner}/{repo}/zipball/{ref}", - {}, - { renamed: ["repos", "downloadZipballArchive"] } - ], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: [ - "PUT /repos/{owner}/{repo}/automated-security-fixes" - ], - enablePrivateVulnerabilityReporting: [ - "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" - ], - enableVulnerabilityAlerts: [ - "PUT /repos/{owner}/{repo}/vulnerability-alerts" - ], - generateReleaseNotes: [ - "POST /repos/{owner}/{repo}/releases/generate-notes" - ], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" - ], - getAdminBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - getAllDeploymentProtectionRules: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" - ], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" - ], - getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" - ], - getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection" - ], - getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: [ - "GET /repos/{owner}/{repo}/collaborators/{username}/permission" - ], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getCustomDeploymentProtectionRule: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" - ], - getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentBranchPolicy: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - getDeploymentStatus: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" - ], - getEnvironment: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}" - ], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], - getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], - getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], - getOrgRulesets: ["GET /orgs/{org}/rulesets"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getRepoRuleSuite: [ - "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" - ], - getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], - getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], - getStatusChecksProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - getTeamsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" - ], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" - ], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" - ], - getWebhookDelivery: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" - ], - listActivities: ["GET /repos/{owner}/{repo}/activity"], - listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" - ], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" - ], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: [ - "GET /repos/{owner}/{repo}/commits/{ref}/statuses" - ], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listCustomDeploymentRuleIntegrations: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" - ], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentBranchPolicies: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" - ], - listDeploymentStatuses: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" - ], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" - ], - listReleaseAssets: [ - "GET /repos/{owner}/{repo}/releases/{release_id}/assets" - ], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" - ], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: [ - "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" - ], - removeAppAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - removeCollaborator: [ - "DELETE /repos/{owner}/{repo}/collaborators/{username}" - ], - removeStatusCheckContexts: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - removeStatusCheckProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - removeTeamAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - removeUserAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - setAppAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - setStatusCheckContexts: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - setTeamAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - setUserAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection" - ], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateDeploymentBranchPolicy: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: [ - "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" - ], - updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], - updatePullRequestReviewProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: [ - "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" - ], - updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - updateStatusCheckPotection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - {}, - { renamed: ["repos", "updateStatusCheckProtection"] } - ], - updateStatusCheckProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: [ - "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" - ], - uploadReleaseAsset: [ - "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", - { baseUrl: "https://uploads.github.com" } - ] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits"], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics"], - users: ["GET /search/users"] - }, - secretScanning: { - getAlert: [ - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" - ], - listAlertsForEnterprise: [ - "GET /enterprises/{enterprise}/secret-scanning/alerts" - ], - listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: [ - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" - ], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" - ] - }, - securityAdvisories: { - createPrivateVulnerabilityReport: [ - "POST /repos/{owner}/{repo}/security-advisories/reports" - ], - createRepositoryAdvisory: [ - "POST /repos/{owner}/{repo}/security-advisories" - ], - createRepositoryAdvisoryCveRequest: [ - "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" - ], - getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], - getRepositoryAdvisory: [ - "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" - ], - listGlobalAdvisories: ["GET /advisories"], - listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], - listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], - updateRepositoryAdvisory: [ - "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" - ] - }, - teams: { - addOrUpdateMembershipForUserInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - addOrUpdateProjectPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" - ], - addOrUpdateRepoPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - checkPermissionsForProjectInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" - ], - checkPermissionsForRepoInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" - ], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - deleteDiscussionInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - getDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - getMembershipForUserInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" - ], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/invitations" - ], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - removeProjectInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" - ], - removeRepoInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - updateDiscussionCommentInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - updateDiscussionInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + parts.push(qsEntry); + } + } + return parts.join("&"); +} +__name(buildQueryString, "buildQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 4769: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseQueryString: () => parseQueryString +}); +module.exports = __toCommonJS(src_exports); +function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } else if (Array.isArray(query[key])) { + query[key].push(value); + } else { + query[key] = [query[key], value]; + } + } + } + return query; +} +__name(parseQueryString, "parseQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 6375: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isClockSkewCorrectedError: () => isClockSkewCorrectedError, + isClockSkewError: () => isClockSkewError, + isRetryableByTrait: () => isRetryableByTrait, + isServerError: () => isServerError, + isThrottlingError: () => isThrottlingError, + isTransientError: () => isTransientError +}); +module.exports = __toCommonJS(src_exports); + +// src/constants.ts +var CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch" +]; +var THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException" + // DynamoDB +]; +var TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +var TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; + +// src/index.ts +var isRetryableByTrait = /* @__PURE__ */ __name((error) => error.$retryable !== void 0, "isRetryableByTrait"); +var isClockSkewError = /* @__PURE__ */ __name((error) => CLOCK_SKEW_ERROR_CODES.includes(error.name), "isClockSkewError"); +var isClockSkewCorrectedError = /* @__PURE__ */ __name((error) => { + var _a; + return (_a = error.$metadata) == null ? void 0 : _a.clockSkewCorrected; +}, "isClockSkewCorrectedError"); +var isThrottlingError = /* @__PURE__ */ __name((error) => { + var _a, _b; + return ((_a = error.$metadata) == null ? void 0 : _a.httpStatusCode) === 429 || THROTTLING_ERROR_CODES.includes(error.name) || ((_b = error.$retryable) == null ? void 0 : _b.throttling) == true; +}, "isThrottlingError"); +var isTransientError = /* @__PURE__ */ __name((error) => { + var _a; + return isClockSkewCorrectedError(error) || TRANSIENT_ERROR_CODES.includes(error.name) || NODEJS_TIMEOUT_ERROR_CODES.includes((error == null ? void 0 : error.code) || "") || TRANSIENT_ERROR_STATUS_CODES.includes(((_a = error.$metadata) == null ? void 0 : _a.httpStatusCode) || 0); +}, "isTransientError"); +var isServerError = /* @__PURE__ */ __name((error) => { + var _a; + if (((_a = error.$metadata) == null ? void 0 : _a.httpStatusCode) !== void 0) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !isTransientError(error)) { + return true; + } + return false; + } + return false; +}, "isServerError"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 68340: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHomeDir = void 0; +const os_1 = __nccwpck_require__(22037); +const path_1 = __nccwpck_require__(71017); +const homeDirCache = {}; +const getHomeDirCacheKey = () => { + if (process && process.geteuid) { + return `${process.geteuid()}`; + } + return "DEFAULT"; +}; +const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${path_1.sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + const homeDirCacheKey = getHomeDirCacheKey(); + if (!homeDirCache[homeDirCacheKey]) + homeDirCache[homeDirCacheKey] = (0, os_1.homedir)(); + return homeDirCache[homeDirCacheKey]; +}; +exports.getHomeDir = getHomeDir; + + +/***/ }), + +/***/ 24740: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSSOTokenFilepath = void 0; +const crypto_1 = __nccwpck_require__(6113); +const path_1 = __nccwpck_require__(71017); +const getHomeDir_1 = __nccwpck_require__(68340); +const getSSOTokenFilepath = (id) => { + const hasher = (0, crypto_1.createHash)("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "sso", "cache", `${cacheName}.json`); +}; +exports.getSSOTokenFilepath = getSSOTokenFilepath; + + +/***/ }), + +/***/ 69678: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSSOTokenFromFile = void 0; +const fs_1 = __nccwpck_require__(57147); +const getSSOTokenFilepath_1 = __nccwpck_require__(24740); +const { readFile } = fs_1.promises; +const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = (0, getSSOTokenFilepath_1.getSSOTokenFilepath)(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; +exports.getSSOTokenFromFile = getSSOTokenFromFile; + + +/***/ }), + +/***/ 43507: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_PREFIX_SEPARATOR: () => CONFIG_PREFIX_SEPARATOR, + DEFAULT_PROFILE: () => DEFAULT_PROFILE, + ENV_PROFILE: () => ENV_PROFILE, + getProfileName: () => getProfileName, + loadSharedConfigFiles: () => loadSharedConfigFiles, + loadSsoSessionData: () => loadSsoSessionData, + parseKnownFiles: () => parseKnownFiles +}); +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, __nccwpck_require__(68340), module.exports); + +// src/getProfileName.ts +var ENV_PROFILE = "AWS_PROFILE"; +var DEFAULT_PROFILE = "default"; +var getProfileName = /* @__PURE__ */ __name((init) => init.profile || process.env[ENV_PROFILE] || DEFAULT_PROFILE, "getProfileName"); + +// src/index.ts +__reExport(src_exports, __nccwpck_require__(24740), module.exports); +__reExport(src_exports, __nccwpck_require__(69678), module.exports); + +// src/getConfigData.ts +var import_types = __nccwpck_require__(82370); +var getConfigData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + if (indexOfSeparator === -1) { + return false; + } + return Object.values(import_types.IniSectionType).includes(key.substring(0, indexOfSeparator)); +}).reduce( + (acc, [key, value]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + const updatedKey = key.substring(0, indexOfSeparator) === import_types.IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; + acc[updatedKey] = value; + return acc; }, - users: { - addEmailForAuthenticated: [ - "POST /user/emails", - {}, - { renamed: ["users", "addEmailForAuthenticatedUser"] } - ], - addEmailForAuthenticatedUser: ["POST /user/emails"], - addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: [ - "POST /user/gpg_keys", - {}, - { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } - ], - createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: [ - "POST /user/keys", - {}, - { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } - ], - createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], - deleteEmailForAuthenticated: [ - "DELETE /user/emails", - {}, - { renamed: ["users", "deleteEmailForAuthenticatedUser"] } - ], - deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: [ - "DELETE /user/gpg_keys/{gpg_key_id}", - {}, - { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } - ], - deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: [ - "DELETE /user/keys/{key_id}", - {}, - { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } - ], - deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], - deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], - deleteSshSigningKeyForAuthenticatedUser: [ - "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" - ], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: [ - "GET /user/gpg_keys/{gpg_key_id}", - {}, - { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } - ], - getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: [ - "GET /user/keys/{key_id}", - {}, - { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } - ], - getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], - getSshSigningKeyForAuthenticatedUser: [ - "GET /user/ssh_signing_keys/{ssh_signing_key_id}" - ], - list: ["GET /users"], - listBlockedByAuthenticated: [ - "GET /user/blocks", - {}, - { renamed: ["users", "listBlockedByAuthenticatedUser"] } - ], - listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: [ - "GET /user/emails", - {}, - { renamed: ["users", "listEmailsForAuthenticatedUser"] } - ], - listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: [ - "GET /user/following", - {}, - { renamed: ["users", "listFollowedByAuthenticatedUser"] } - ], - listFollowedByAuthenticatedUser: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: [ - "GET /user/gpg_keys", - {}, - { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } - ], - listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: [ - "GET /user/public_emails", - {}, - { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } - ], - listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: [ - "GET /user/keys", - {}, - { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } - ], - listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], - listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], - listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], - listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], - setPrimaryEmailVisibilityForAuthenticated: [ - "PATCH /user/email/visibility", - {}, - { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } - ], - setPrimaryEmailVisibilityForAuthenticatedUser: [ - "PATCH /user/email/visibility" - ], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] + { + // Populate default profile, if present. + ...data.default && { default: data.default } + } +), "getConfigData"); + +// src/getConfigFilepath.ts +var import_path = __nccwpck_require__(71017); +var import_getHomeDir = __nccwpck_require__(68340); +var ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +var getConfigFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CONFIG_PATH] || (0, import_path.join)((0, import_getHomeDir.getHomeDir)(), ".aws", "config"), "getConfigFilepath"); + +// src/getCredentialsFilepath.ts + +var import_getHomeDir2 = __nccwpck_require__(68340); +var ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +var getCredentialsFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CREDENTIALS_PATH] || (0, import_path.join)((0, import_getHomeDir2.getHomeDir)(), ".aws", "credentials"), "getCredentialsFilepath"); + +// src/parseIni.ts + +var prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; +var profileNameBlockList = ["__proto__", "profile __proto__"]; +var parseIni = /* @__PURE__ */ __name((iniData) => { + const map = {}; + let currentSection; + let currentSubSection; + for (const iniLine of iniData.split(/\r?\n/)) { + const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); + const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; + if (isSection) { + currentSection = void 0; + currentSubSection = void 0; + const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); + const matches = prefixKeyRegex.exec(sectionName); + if (matches) { + const [, prefix, , name] = matches; + if (Object.values(import_types.IniSectionType).includes(prefix)) { + currentSection = [prefix, name].join(CONFIG_PREFIX_SEPARATOR); + } + } else { + currentSection = sectionName; + } + if (profileNameBlockList.includes(sectionName)) { + throw new Error(`Found invalid profile name "${sectionName}"`); + } + } else if (currentSection) { + const indexOfEqualsSign = trimmedLine.indexOf("="); + if (![0, -1].includes(indexOfEqualsSign)) { + const [name, value] = [ + trimmedLine.substring(0, indexOfEqualsSign).trim(), + trimmedLine.substring(indexOfEqualsSign + 1).trim() + ]; + if (value === "") { + currentSubSection = name; + } else { + if (currentSubSection && iniLine.trimStart() === iniLine) { + currentSubSection = void 0; + } + map[currentSection] = map[currentSection] || {}; + const key = currentSubSection ? [currentSubSection, name].join(CONFIG_PREFIX_SEPARATOR) : name; + map[currentSection][key] = value; + } + } + } + } + return map; +}, "parseIni"); + +// src/loadSharedConfigFiles.ts +var import_slurpFile = __nccwpck_require__(19155); +var swallowError = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var CONFIG_PREFIX_SEPARATOR = "."; +var loadSharedConfigFiles = /* @__PURE__ */ __name(async (init = {}) => { + const { filepath = getCredentialsFilepath(), configFilepath = getConfigFilepath() } = init; + const parsedFiles = await Promise.all([ + (0, import_slurpFile.slurpFile)(configFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).then(getConfigData).catch(swallowError), + (0, import_slurpFile.slurpFile)(filepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).catch(swallowError) + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1] + }; +}, "loadSharedConfigFiles"); + +// src/getSsoSessionData.ts + +var getSsoSessionData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => key.startsWith(import_types.IniSectionType.SSO_SESSION + CONFIG_PREFIX_SEPARATOR)).reduce((acc, [key, value]) => ({ ...acc, [key.substring(key.indexOf(CONFIG_PREFIX_SEPARATOR) + 1)]: value }), {}), "getSsoSessionData"); + +// src/loadSsoSessionData.ts +var import_slurpFile2 = __nccwpck_require__(19155); +var swallowError2 = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var loadSsoSessionData = /* @__PURE__ */ __name(async (init = {}) => (0, import_slurpFile2.slurpFile)(init.configFilepath ?? getConfigFilepath()).then(parseIni).then(getSsoSessionData).catch(swallowError2), "loadSsoSessionData"); + +// src/mergeConfigFiles.ts +var mergeConfigFiles = /* @__PURE__ */ __name((...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== void 0) { + Object.assign(merged[key], values); + } else { + merged[key] = values; + } + } + } + return merged; +}, "mergeConfigFiles"); + +// src/parseKnownFiles.ts +var parseKnownFiles = /* @__PURE__ */ __name(async (init) => { + const parsedFiles = await loadSharedConfigFiles(init); + return mergeConfigFiles(parsedFiles.configFile, parsedFiles.credentialsFile); +}, "parseKnownFiles"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 19155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.slurpFile = void 0; +const fs_1 = __nccwpck_require__(57147); +const { readFile } = fs_1.promises; +const filePromisesHash = {}; +const slurpFile = (path, options) => { + if (!filePromisesHash[path] || (options === null || options === void 0 ? void 0 : options.ignoreCache)) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; +exports.slurpFile = slurpFile; + + +/***/ }), + +/***/ 82370: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AlgorithmId: () => AlgorithmId, + EndpointURLScheme: () => EndpointURLScheme, + FieldPosition: () => FieldPosition, + HttpApiKeyAuthLocation: () => HttpApiKeyAuthLocation, + HttpAuthLocation: () => HttpAuthLocation, + IniSectionType: () => IniSectionType, + RequestHandlerProtocol: () => RequestHandlerProtocol, + SMITHY_CONTEXT_KEY: () => SMITHY_CONTEXT_KEY, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/auth/auth.ts +var HttpAuthLocation = /* @__PURE__ */ ((HttpAuthLocation2) => { + HttpAuthLocation2["HEADER"] = "header"; + HttpAuthLocation2["QUERY"] = "query"; + return HttpAuthLocation2; +})(HttpAuthLocation || {}); + +// src/auth/HttpApiKeyAuth.ts +var HttpApiKeyAuthLocation = /* @__PURE__ */ ((HttpApiKeyAuthLocation2) => { + HttpApiKeyAuthLocation2["HEADER"] = "header"; + HttpApiKeyAuthLocation2["QUERY"] = "query"; + return HttpApiKeyAuthLocation2; +})(HttpApiKeyAuthLocation || {}); + +// src/endpoint.ts +var EndpointURLScheme = /* @__PURE__ */ ((EndpointURLScheme2) => { + EndpointURLScheme2["HTTP"] = "http"; + EndpointURLScheme2["HTTPS"] = "https"; + return EndpointURLScheme2; +})(EndpointURLScheme || {}); + +// src/extensions/checksum.ts +var AlgorithmId = /* @__PURE__ */ ((AlgorithmId2) => { + AlgorithmId2["MD5"] = "md5"; + AlgorithmId2["CRC32"] = "crc32"; + AlgorithmId2["CRC32C"] = "crc32c"; + AlgorithmId2["SHA1"] = "sha1"; + AlgorithmId2["SHA256"] = "sha256"; + return AlgorithmId2; +})(AlgorithmId || {}); +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== void 0) { + checksumAlgorithms.push({ + algorithmId: () => "sha256" /* SHA256 */, + checksumConstructor: () => runtimeConfig.sha256 + }); + } + if (runtimeConfig.md5 != void 0) { + checksumAlgorithms.push({ + algorithmId: () => "md5" /* MD5 */, + checksumConstructor: () => runtimeConfig.md5 + }); } + return { + _checksumAlgorithms: checksumAlgorithms, + addChecksumAlgorithm(algo) { + this._checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return this._checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/defaultClientConfiguration.ts +var getDefaultClientConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + ...getChecksumConfiguration(runtimeConfig) + }; +}, "getDefaultClientConfiguration"); +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + ...resolveChecksumRuntimeConfig(config) + }; +}, "resolveDefaultRuntimeConfig"); + +// src/http.ts +var FieldPosition = /* @__PURE__ */ ((FieldPosition2) => { + FieldPosition2[FieldPosition2["HEADER"] = 0] = "HEADER"; + FieldPosition2[FieldPosition2["TRAILER"] = 1] = "TRAILER"; + return FieldPosition2; +})(FieldPosition || {}); + +// src/middleware.ts +var SMITHY_CONTEXT_KEY = "__smithy_context"; + +// src/profile.ts +var IniSectionType = /* @__PURE__ */ ((IniSectionType2) => { + IniSectionType2["PROFILE"] = "profile"; + IniSectionType2["SSO_SESSION"] = "sso-session"; + IniSectionType2["SERVICES"] = "services"; + return IniSectionType2; +})(IniSectionType || {}); + +// src/transfer.ts +var RequestHandlerProtocol = /* @__PURE__ */ ((RequestHandlerProtocol2) => { + RequestHandlerProtocol2["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol2["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol2["TDS_8_0"] = "tds/8.0"; + return RequestHandlerProtocol2; +})(RequestHandlerProtocol || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 39733: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SignatureV4 = void 0; +const eventstream_codec_1 = __nccwpck_require__(56459); +const util_hex_encoding_1 = __nccwpck_require__(45364); +const util_middleware_1 = __nccwpck_require__(2390); +const util_utf8_1 = __nccwpck_require__(41895); +const constants_1 = __nccwpck_require__(48644); +const credentialDerivation_1 = __nccwpck_require__(19623); +const getCanonicalHeaders_1 = __nccwpck_require__(51393); +const getCanonicalQuery_1 = __nccwpck_require__(33243); +const getPayloadHash_1 = __nccwpck_require__(48545); +const headerUtil_1 = __nccwpck_require__(62179); +const moveHeadersToQuery_1 = __nccwpck_require__(49828); +const prepareRequest_1 = __nccwpck_require__(60075); +const utilDate_1 = __nccwpck_require__(39299); +class SignatureV4 { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + this.headerMarshaller = new eventstream_codec_1.HeaderMarshaller(util_utf8_1.toUtf8, util_utf8_1.fromUtf8); + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = (0, util_middleware_1.normalizeProvider)(region); + this.credentialProvider = (0, util_middleware_1.normalizeProvider)(credentials); + } + async presign(originalRequest, options = {}) { + const { signingDate = new Date(), expiresIn = 3600, unsignableHeaders, unhoistableHeaders, signableHeaders, signingRegion, signingService, } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const { longDate, shortDate } = formatDate(signingDate); + if (expiresIn > constants_1.MAX_PRESIGNED_TTL) { + return Promise.reject("Signature version 4 presigned URLs" + " must have an expiration date less than one week in" + " the future"); + } + const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); + const request = (0, moveHeadersToQuery_1.moveHeadersToQuery)((0, prepareRequest_1.prepareRequest)(originalRequest), { unhoistableHeaders }); + if (credentials.sessionToken) { + request.query[constants_1.TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[constants_1.ALGORITHM_QUERY_PARAM] = constants_1.ALGORITHM_IDENTIFIER; + request.query[constants_1.CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[constants_1.AMZ_DATE_QUERY_PARAM] = longDate; + request.query[constants_1.EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = (0, getCanonicalHeaders_1.getCanonicalHeaders)(request, unsignableHeaders, signableHeaders); + request.query[constants_1.SIGNED_HEADERS_QUERY_PARAM] = getCanonicalHeaderList(canonicalHeaders); + request.query[constants_1.SIGNATURE_QUERY_PARAM] = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, await (0, getPayloadHash_1.getPayloadHash)(originalRequest, this.sha256))); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } + else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } + else if (toSign.message) { + return this.signMessage(toSign, options); + } + else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const { shortDate, longDate } = formatDate(signingDate); + const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); + const hashedPayload = await (0, getPayloadHash_1.getPayloadHash)({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = (0, util_hex_encoding_1.toHex)(await hash.digest()); + const stringToSign = [ + constants_1.EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload, + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = new Date(), signingRegion, signingService }) { + const promise = this.signEvent({ + headers: this.headerMarshaller.format(signableMessage.message.headers), + payload: signableMessage.message.body, + }, { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature, + }); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const { shortDate } = formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update((0, util_utf8_1.toUint8Array)(stringToSign)); + return (0, util_hex_encoding_1.toHex)(await hash.digest()); + } + async signRequest(requestToSign, { signingDate = new Date(), signableHeaders, unsignableHeaders, signingRegion, signingService, } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const request = (0, prepareRequest_1.prepareRequest)(requestToSign); + const { longDate, shortDate } = formatDate(signingDate); + const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); + request.headers[constants_1.AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[constants_1.TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await (0, getPayloadHash_1.getPayloadHash)(request, this.sha256); + if (!(0, headerUtil_1.hasHeader)(constants_1.SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[constants_1.SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = (0, getCanonicalHeaders_1.getCanonicalHeaders)(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, payloadHash)); + request.headers[constants_1.AUTH_HEADER] = + `${constants_1.ALGORITHM_IDENTIFIER} ` + + `Credential=${credentials.accessKeyId}/${scope}, ` + + `SignedHeaders=${getCanonicalHeaderList(canonicalHeaders)}, ` + + `Signature=${signature}`; + return request; + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${(0, getCanonicalQuery_1.getCanonicalQuery)(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest) { + const hash = new this.sha256(); + hash.update((0, util_utf8_1.toUint8Array)(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${constants_1.ALGORITHM_IDENTIFIER} +${longDate} +${credentialScope} +${(0, util_hex_encoding_1.toHex)(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if ((pathSegment === null || pathSegment === void 0 ? void 0 : pathSegment.length) === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } + else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${(path === null || path === void 0 ? void 0 : path.startsWith("/")) ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && (path === null || path === void 0 ? void 0 : path.endsWith("/")) ? "/" : ""}`; + const doubleEncoded = encodeURIComponent(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign(longDate, credentialScope, canonicalRequest); + const hash = new this.sha256(await keyPromise); + hash.update((0, util_utf8_1.toUint8Array)(stringToSign)); + return (0, util_hex_encoding_1.toHex)(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return (0, credentialDerivation_1.getSigningKey)(this.sha256, credentials, shortDate, region, service || this.service); + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || + typeof credentials.accessKeyId !== "string" || + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } +} +exports.SignatureV4 = SignatureV4; +const formatDate = (now) => { + const longDate = (0, utilDate_1.iso8601)(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8), + }; +}; +const getCanonicalHeaderList = (headers) => Object.keys(headers).sort().join(";"); + + +/***/ }), + +/***/ 69098: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.cloneQuery = exports.cloneRequest = void 0; +const cloneRequest = ({ headers, query, ...rest }) => ({ + ...rest, + headers: { ...headers }, + query: query ? (0, exports.cloneQuery)(query) : undefined, +}); +exports.cloneRequest = cloneRequest; +const cloneQuery = (query) => Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; +}, {}); +exports.cloneQuery = cloneQuery; + + +/***/ }), + +/***/ 48644: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MAX_PRESIGNED_TTL = exports.KEY_TYPE_IDENTIFIER = exports.MAX_CACHE_SIZE = exports.UNSIGNED_PAYLOAD = exports.EVENT_ALGORITHM_IDENTIFIER = exports.ALGORITHM_IDENTIFIER_V4A = exports.ALGORITHM_IDENTIFIER = exports.UNSIGNABLE_PATTERNS = exports.SEC_HEADER_PATTERN = exports.PROXY_HEADER_PATTERN = exports.ALWAYS_UNSIGNABLE_HEADERS = exports.HOST_HEADER = exports.TOKEN_HEADER = exports.SHA256_HEADER = exports.SIGNATURE_HEADER = exports.GENERATED_HEADERS = exports.DATE_HEADER = exports.AMZ_DATE_HEADER = exports.AUTH_HEADER = exports.REGION_SET_PARAM = exports.TOKEN_QUERY_PARAM = exports.SIGNATURE_QUERY_PARAM = exports.EXPIRES_QUERY_PARAM = exports.SIGNED_HEADERS_QUERY_PARAM = exports.AMZ_DATE_QUERY_PARAM = exports.CREDENTIAL_QUERY_PARAM = exports.ALGORITHM_QUERY_PARAM = void 0; +exports.ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +exports.CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +exports.AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +exports.SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +exports.EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +exports.SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +exports.TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +exports.REGION_SET_PARAM = "X-Amz-Region-Set"; +exports.AUTH_HEADER = "authorization"; +exports.AMZ_DATE_HEADER = exports.AMZ_DATE_QUERY_PARAM.toLowerCase(); +exports.DATE_HEADER = "date"; +exports.GENERATED_HEADERS = [exports.AUTH_HEADER, exports.AMZ_DATE_HEADER, exports.DATE_HEADER]; +exports.SIGNATURE_HEADER = exports.SIGNATURE_QUERY_PARAM.toLowerCase(); +exports.SHA256_HEADER = "x-amz-content-sha256"; +exports.TOKEN_HEADER = exports.TOKEN_QUERY_PARAM.toLowerCase(); +exports.HOST_HEADER = "host"; +exports.ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true, +}; +exports.PROXY_HEADER_PATTERN = /^proxy-/; +exports.SEC_HEADER_PATTERN = /^sec-/; +exports.UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +exports.ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +exports.ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +exports.EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +exports.UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +exports.MAX_CACHE_SIZE = 50; +exports.KEY_TYPE_IDENTIFIER = "aws4_request"; +exports.MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; + + +/***/ }), + +/***/ 19623: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.clearCredentialCache = exports.getSigningKey = exports.createScope = void 0; +const util_hex_encoding_1 = __nccwpck_require__(45364); +const util_utf8_1 = __nccwpck_require__(41895); +const constants_1 = __nccwpck_require__(48644); +const signingKeyCache = {}; +const cacheQueue = []; +const createScope = (shortDate, region, service) => `${shortDate}/${region}/${service}/${constants_1.KEY_TYPE_IDENTIFIER}`; +exports.createScope = createScope; +const getSigningKey = async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${(0, util_hex_encoding_1.toHex)(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > constants_1.MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, constants_1.KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return (signingKeyCache[cacheKey] = key); +}; +exports.getSigningKey = getSigningKey; +const clearCredentialCache = () => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}; +exports.clearCredentialCache = clearCredentialCache; +const hmac = (ctor, secret, data) => { + const hash = new ctor(secret); + hash.update((0, util_utf8_1.toUint8Array)(data)); + return hash.digest(); +}; + + +/***/ }), + +/***/ 51393: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCanonicalHeaders = void 0; +const constants_1 = __nccwpck_require__(48644); +const getCanonicalHeaders = ({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == undefined) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in constants_1.ALWAYS_UNSIGNABLE_HEADERS || + (unsignableHeaders === null || unsignableHeaders === void 0 ? void 0 : unsignableHeaders.has(canonicalHeaderName)) || + constants_1.PROXY_HEADER_PATTERN.test(canonicalHeaderName) || + constants_1.SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || (signableHeaders && !signableHeaders.has(canonicalHeaderName))) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}; +exports.getCanonicalHeaders = getCanonicalHeaders; + + +/***/ }), + +/***/ 33243: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCanonicalQuery = void 0; +const util_uri_escape_1 = __nccwpck_require__(54197); +const constants_1 = __nccwpck_require__(48644); +const getCanonicalQuery = ({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query).sort()) { + if (key.toLowerCase() === constants_1.SIGNATURE_HEADER) { + continue; + } + keys.push(key); + const value = query[key]; + if (typeof value === "string") { + serialized[key] = `${(0, util_uri_escape_1.escapeUri)(key)}=${(0, util_uri_escape_1.escapeUri)(value)}`; + } + else if (Array.isArray(value)) { + serialized[key] = value + .slice(0) + .reduce((encoded, value) => encoded.concat([`${(0, util_uri_escape_1.escapeUri)(key)}=${(0, util_uri_escape_1.escapeUri)(value)}`]), []) + .sort() + .join("&"); + } + } + return keys + .map((key) => serialized[key]) + .filter((serialized) => serialized) + .join("&"); +}; +exports.getCanonicalQuery = getCanonicalQuery; + + +/***/ }), + +/***/ 48545: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getPayloadHash = void 0; +const is_array_buffer_1 = __nccwpck_require__(10780); +const util_hex_encoding_1 = __nccwpck_require__(45364); +const util_utf8_1 = __nccwpck_require__(41895); +const constants_1 = __nccwpck_require__(48644); +const getPayloadHash = async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === constants_1.SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == undefined) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } + else if (typeof body === "string" || ArrayBuffer.isView(body) || (0, is_array_buffer_1.isArrayBuffer)(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update((0, util_utf8_1.toUint8Array)(body)); + return (0, util_hex_encoding_1.toHex)(await hashCtor.digest()); + } + return constants_1.UNSIGNED_PAYLOAD; +}; +exports.getPayloadHash = getPayloadHash; + + +/***/ }), + +/***/ 62179: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deleteHeader = exports.getHeaderValue = exports.hasHeader = void 0; +const hasHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}; +exports.hasHeader = hasHeader; +const getHeaderValue = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return headers[headerName]; + } + } + return undefined; +}; +exports.getHeaderValue = getHeaderValue; +const deleteHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + delete headers[headerName]; + } + } +}; +exports.deleteHeader = deleteHeader; + + +/***/ }), + +/***/ 11528: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareRequest = exports.moveHeadersToQuery = exports.getPayloadHash = exports.getCanonicalQuery = exports.getCanonicalHeaders = void 0; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(39733), exports); +var getCanonicalHeaders_1 = __nccwpck_require__(51393); +Object.defineProperty(exports, "getCanonicalHeaders", ({ enumerable: true, get: function () { return getCanonicalHeaders_1.getCanonicalHeaders; } })); +var getCanonicalQuery_1 = __nccwpck_require__(33243); +Object.defineProperty(exports, "getCanonicalQuery", ({ enumerable: true, get: function () { return getCanonicalQuery_1.getCanonicalQuery; } })); +var getPayloadHash_1 = __nccwpck_require__(48545); +Object.defineProperty(exports, "getPayloadHash", ({ enumerable: true, get: function () { return getPayloadHash_1.getPayloadHash; } })); +var moveHeadersToQuery_1 = __nccwpck_require__(49828); +Object.defineProperty(exports, "moveHeadersToQuery", ({ enumerable: true, get: function () { return moveHeadersToQuery_1.moveHeadersToQuery; } })); +var prepareRequest_1 = __nccwpck_require__(60075); +Object.defineProperty(exports, "prepareRequest", ({ enumerable: true, get: function () { return prepareRequest_1.prepareRequest; } })); +tslib_1.__exportStar(__nccwpck_require__(19623), exports); + + +/***/ }), + +/***/ 49828: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.moveHeadersToQuery = void 0; +const cloneRequest_1 = __nccwpck_require__(69098); +const moveHeadersToQuery = (request, options = {}) => { + var _a; + const { headers, query = {} } = typeof request.clone === "function" ? request.clone() : (0, cloneRequest_1.cloneRequest)(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if (lname.slice(0, 6) === "x-amz-" && !((_a = options.unhoistableHeaders) === null || _a === void 0 ? void 0 : _a.has(lname))) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query, + }; +}; +exports.moveHeadersToQuery = moveHeadersToQuery; + + +/***/ }), + +/***/ 60075: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareRequest = void 0; +const cloneRequest_1 = __nccwpck_require__(69098); +const constants_1 = __nccwpck_require__(48644); +const prepareRequest = (request) => { + request = typeof request.clone === "function" ? request.clone() : (0, cloneRequest_1.cloneRequest)(request); + for (const headerName of Object.keys(request.headers)) { + if (constants_1.GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}; +exports.prepareRequest = prepareRequest; + + +/***/ }), + +/***/ 39299: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toDate = exports.iso8601 = void 0; +const iso8601 = (time) => (0, exports.toDate)(time) + .toISOString() + .replace(/\.\d{3}Z$/, "Z"); +exports.iso8601 = iso8601; +const toDate = (time) => { + if (typeof time === "number") { + return new Date(time * 1000); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1000); + } + return new Date(time); + } + return time; +}; +exports.toDate = toDate; + + +/***/ }), + +/***/ 70438: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoOpLogger = void 0; +class NoOpLogger { + trace() { } + debug() { } + info() { } + warn() { } + error() { } +} +exports.NoOpLogger = NoOpLogger; + + +/***/ }), + +/***/ 61600: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Client = void 0; +const middleware_stack_1 = __nccwpck_require__(97911); +class Client { + constructor(config) { + this.middlewareStack = (0, middleware_stack_1.constructStack)(); + this.config = config; + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : undefined; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + if (callback) { + handler(command) + .then((result) => callback(null, result.output), (err) => callback(err)) + .catch(() => { }); + } + else { + return handler(command).then((result) => result.output); + } + } + destroy() { + if (this.config.requestHandler.destroy) + this.config.requestHandler.destroy(); + } +} +exports.Client = Client; + + +/***/ }), + +/***/ 32813: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.collectBody = void 0; +const util_stream_1 = __nccwpck_require__(96607); +const collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return util_stream_1.Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return util_stream_1.Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return util_stream_1.Uint8ArrayBlobAdapter.mutate(await fromContext); +}; +exports.collectBody = collectBody; + + +/***/ }), + +/***/ 75414: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Command = void 0; +const middleware_stack_1 = __nccwpck_require__(97911); +class Command { + constructor() { + this.middlewareStack = (0, middleware_stack_1.constructStack)(); + } +} +exports.Command = Command; + + +/***/ }), + +/***/ 92541: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SENSITIVE_STRING = void 0; +exports.SENSITIVE_STRING = "***SensitiveInformation***"; + + +/***/ }), + +/***/ 56929: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createAggregatedClient = void 0; +const createAggregatedClient = (commands, Client) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = async function (args, optionsOrCb, cb) { + const command = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + }; + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client.prototype[methodName] = methodImpl; + } +}; +exports.createAggregatedClient = createAggregatedClient; + + +/***/ }), + +/***/ 21737: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseEpochTimestamp = exports.parseRfc7231DateTime = exports.parseRfc3339DateTimeWithOffset = exports.parseRfc3339DateTime = exports.dateToUtcString = void 0; +const parse_utils_1 = __nccwpck_require__(74857); +const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +const MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +exports.dateToUtcString = dateToUtcString; +const RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +const parseRfc3339DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}; +exports.parseRfc3339DateTime = parseRfc3339DateTime; +const RFC3339_WITH_OFFSET = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/); +const parseRfc3339DateTimeWithOffset = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}; +exports.parseRfc3339DateTimeWithOffset = parseRfc3339DateTimeWithOffset; +const IMF_FIXDATE = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const RFC_850_DATE = new RegExp(/^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const ASC_TIME = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/); +const parseRfc7231DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate((0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year(buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds, + })); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate((0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr.trimLeft(), "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}; +exports.parseRfc7231DateTime = parseRfc7231DateTime; +const parseEpochTimestamp = (value) => { + if (value === null || value === undefined) { + return undefined; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } + else if (typeof value === "string") { + valueAsDouble = (0, parse_utils_1.strictParseDouble)(value); + } + else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1000)); +}; +exports.parseEpochTimestamp = parseEpochTimestamp; +const buildDate = (year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date(Date.UTC(year, adjustedMonth, day, parseDateValue(time.hours, "hour", 0, 23), parseDateValue(time.minutes, "minute", 0, 59), parseDateValue(time.seconds, "seconds", 0, 60), parseMilliseconds(time.fractionalMilliseconds))); +}; +const parseTwoDigitYear = (value) => { + const thisYear = new Date().getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}; +const FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1000; +const adjustRfc850Year = (input) => { + if (input.getTime() - new Date().getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date(Date.UTC(input.getUTCFullYear() - 100, input.getUTCMonth(), input.getUTCDate(), input.getUTCHours(), input.getUTCMinutes(), input.getUTCSeconds(), input.getUTCMilliseconds())); + } + return input; +}; +const parseMonthByShortName = (value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}; +const DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +const validateDayOfMonth = (year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}; +const isLeapYear = (year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}; +const parseDateValue = (value, type, lower, upper) => { + const dateVal = (0, parse_utils_1.strictParseByte)(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}; +const parseMilliseconds = (value) => { + if (value === null || value === undefined) { + return 0; + } + return (0, parse_utils_1.strictParseFloat32)("0." + value) * 1000; +}; +const parseOffsetToMilliseconds = (value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } + else if (directionStr == "-") { + direction = -1; + } + else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1000; +}; +const stripLeadingZeroes = (value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}; + + +/***/ }), + +/***/ 9681: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.withBaseException = exports.throwDefaultError = void 0; +const exceptions_1 = __nccwpck_require__(88074); +const throwDefaultError = ({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : undefined; + const response = new exceptionCtor({ + name: (parsedBody === null || parsedBody === void 0 ? void 0 : parsedBody.code) || (parsedBody === null || parsedBody === void 0 ? void 0 : parsedBody.Code) || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata, + }); + throw (0, exceptions_1.decorateServiceException)(response, parsedBody); +}; +exports.throwDefaultError = throwDefaultError; +const withBaseException = (ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + (0, exports.throwDefaultError)({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}; +exports.withBaseException = withBaseException; +const deserializeMetadata = (output) => { + var _a, _b; + return ({ + httpStatusCode: output.statusCode, + requestId: (_b = (_a = output.headers["x-amzn-requestid"]) !== null && _a !== void 0 ? _a : output.headers["x-amzn-request-id"]) !== null && _b !== void 0 ? _b : output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], + }); +}; + + +/***/ }), + +/***/ 11163: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadConfigsForDefaultMode = void 0; +const loadConfigsForDefaultMode = (mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100, + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 30000, + }; + default: + return {}; + } +}; +exports.loadConfigsForDefaultMode = loadConfigsForDefaultMode; + + +/***/ }), + +/***/ 91809: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.emitWarningIfUnsupportedVersion = void 0; +let warningEmitted = false; +const emitWarningIfUnsupportedVersion = (version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 14) { + warningEmitted = true; + } +}; +exports.emitWarningIfUnsupportedVersion = emitWarningIfUnsupportedVersion; + + +/***/ }), + +/***/ 88074: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decorateServiceException = exports.ServiceException = void 0; +class ServiceException extends Error { + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, ServiceException.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } +} +exports.ServiceException = ServiceException; +const decorateServiceException = (exception, additions = {}) => { + Object.entries(additions) + .filter(([, v]) => v !== undefined) + .forEach(([k, v]) => { + if (exception[k] == undefined || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}; +exports.decorateServiceException = decorateServiceException; + + +/***/ }), + +/***/ 76016: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.extendedEncodeURIComponent = void 0; +function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +exports.extendedEncodeURIComponent = extendedEncodeURIComponent; + + +/***/ }), + +/***/ 30941: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveChecksumRuntimeConfig = exports.getChecksumConfiguration = exports.AlgorithmId = void 0; +const types_1 = __nccwpck_require__(55756); +Object.defineProperty(exports, "AlgorithmId", ({ enumerable: true, get: function () { return types_1.AlgorithmId; } })); +const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in types_1.AlgorithmId) { + const algorithmId = types_1.AlgorithmId[id]; + if (runtimeConfig[algorithmId] === undefined) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId], + }); + } + return { + _checksumAlgorithms: checksumAlgorithms, + addChecksumAlgorithm(algo) { + this._checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return this._checksumAlgorithms; + }, + }; +}; +exports.getChecksumConfiguration = getChecksumConfiguration; +const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}; +exports.resolveChecksumRuntimeConfig = resolveChecksumRuntimeConfig; + + +/***/ }), + +/***/ 78643: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveDefaultRuntimeConfig = exports.getDefaultClientConfiguration = exports.getDefaultExtensionConfiguration = void 0; +const checksum_1 = __nccwpck_require__(30941); +const retry_1 = __nccwpck_require__(67367); +const getDefaultExtensionConfiguration = (runtimeConfig) => { + return { + ...(0, checksum_1.getChecksumConfiguration)(runtimeConfig), + ...(0, retry_1.getRetryConfiguration)(runtimeConfig), + }; +}; +exports.getDefaultExtensionConfiguration = getDefaultExtensionConfiguration; +exports.getDefaultClientConfiguration = exports.getDefaultExtensionConfiguration; +const resolveDefaultRuntimeConfig = (config) => { + return { + ...(0, checksum_1.resolveChecksumRuntimeConfig)(config), + ...(0, retry_1.resolveRetryRuntimeConfig)(config), + }; +}; +exports.resolveDefaultRuntimeConfig = resolveDefaultRuntimeConfig; + + +/***/ }), + +/***/ 1822: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(78643), exports); + + +/***/ }), + +/***/ 67367: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveRetryRuntimeConfig = exports.getRetryConfiguration = void 0; +const getRetryConfiguration = (runtimeConfig) => { + let _retryStrategy = runtimeConfig.retryStrategy; + return { + setRetryStrategy(retryStrategy) { + _retryStrategy = retryStrategy; + }, + retryStrategy() { + return _retryStrategy; + }, + }; +}; +exports.getRetryConfiguration = getRetryConfiguration; +const resolveRetryRuntimeConfig = (retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}; +exports.resolveRetryRuntimeConfig = resolveRetryRuntimeConfig; + + +/***/ }), + +/***/ 42638: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getArrayIfSingleItem = void 0; +const getArrayIfSingleItem = (mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray]; +exports.getArrayIfSingleItem = getArrayIfSingleItem; + + +/***/ }), + +/***/ 92188: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getValueFromTextNode = void 0; +const getValueFromTextNode = (obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== undefined) { + obj[key] = obj[key][textNodeName]; + } + else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = (0, exports.getValueFromTextNode)(obj[key]); + } + } + return obj; +}; +exports.getValueFromTextNode = getValueFromTextNode; + + +/***/ }), + +/***/ 63570: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(70438), exports); +tslib_1.__exportStar(__nccwpck_require__(61600), exports); +tslib_1.__exportStar(__nccwpck_require__(32813), exports); +tslib_1.__exportStar(__nccwpck_require__(75414), exports); +tslib_1.__exportStar(__nccwpck_require__(92541), exports); +tslib_1.__exportStar(__nccwpck_require__(56929), exports); +tslib_1.__exportStar(__nccwpck_require__(21737), exports); +tslib_1.__exportStar(__nccwpck_require__(9681), exports); +tslib_1.__exportStar(__nccwpck_require__(11163), exports); +tslib_1.__exportStar(__nccwpck_require__(91809), exports); +tslib_1.__exportStar(__nccwpck_require__(1822), exports); +tslib_1.__exportStar(__nccwpck_require__(88074), exports); +tslib_1.__exportStar(__nccwpck_require__(76016), exports); +tslib_1.__exportStar(__nccwpck_require__(42638), exports); +tslib_1.__exportStar(__nccwpck_require__(92188), exports); +tslib_1.__exportStar(__nccwpck_require__(32964), exports); +tslib_1.__exportStar(__nccwpck_require__(83495), exports); +tslib_1.__exportStar(__nccwpck_require__(74857), exports); +tslib_1.__exportStar(__nccwpck_require__(15342), exports); +tslib_1.__exportStar(__nccwpck_require__(53456), exports); +tslib_1.__exportStar(__nccwpck_require__(1752), exports); +tslib_1.__exportStar(__nccwpck_require__(92480), exports); + + +/***/ }), + +/***/ 32964: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LazyJsonString = exports.StringWrapper = void 0; +const StringWrapper = function () { + const Class = Object.getPrototypeOf(this).constructor; + const Constructor = Function.bind.apply(String, [null, ...arguments]); + const instance = new Constructor(); + Object.setPrototypeOf(instance, Class.prototype); + return instance; +}; +exports.StringWrapper = StringWrapper; +exports.StringWrapper.prototype = Object.create(String.prototype, { + constructor: { + value: exports.StringWrapper, + enumerable: false, + writable: true, + configurable: true, + }, +}); +Object.setPrototypeOf(exports.StringWrapper, String); +class LazyJsonString extends exports.StringWrapper { + deserializeJSON() { + return JSON.parse(super.toString()); + } + toJSON() { + return super.toString(); + } + static fromObject(object) { + if (object instanceof LazyJsonString) { + return object; + } + else if (object instanceof String || typeof object === "string") { + return new LazyJsonString(object); + } + return new LazyJsonString(JSON.stringify(object)); + } +} +exports.LazyJsonString = LazyJsonString; + + +/***/ }), + +/***/ 83495: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.take = exports.convertMap = exports.map = void 0; +function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } + else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } + else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +exports.map = map; +const convertMap = (target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}; +exports.convertMap = convertMap; +const take = (source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}; +exports.take = take; +const mapWithFilter = (target, filter, instructions) => { + return map(target, Object.entries(instructions).reduce((_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } + else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } + else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, {})); +}; +const applyInstruction = (target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if ((typeof filter === "function" && filter(source[sourceKey])) || (typeof filter !== "function" && !!filter)) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === undefined && (_value = value()) != null; + const customFilterPassed = (typeof filter === "function" && !!filter(void 0)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed) { + target[targetKey] = _value; + } + else if (customFilterPassed) { + target[targetKey] = value(); + } + } + else { + const defaultFilterPassed = filter === undefined && value != null; + const customFilterPassed = (typeof filter === "function" && !!filter(value)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}; +const nonNullish = (_) => _ != null; +const pass = (_) => _; + + +/***/ }), + +/***/ 74857: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = exports.strictParseByte = exports.strictParseShort = exports.strictParseInt32 = exports.strictParseInt = exports.strictParseLong = exports.limitedParseFloat32 = exports.limitedParseFloat = exports.handleFloat = exports.limitedParseDouble = exports.strictParseFloat32 = exports.strictParseFloat = exports.strictParseDouble = exports.expectUnion = exports.expectString = exports.expectObject = exports.expectNonNull = exports.expectByte = exports.expectShort = exports.expectInt32 = exports.expectInt = exports.expectLong = exports.expectFloat32 = exports.expectNumber = exports.expectBoolean = exports.parseBoolean = void 0; +const parseBoolean = (value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}; +exports.parseBoolean = parseBoolean; +const expectBoolean = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + exports.logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + exports.logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}; +exports.expectBoolean = expectBoolean; +const expectNumber = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + exports.logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}; +exports.expectNumber = expectNumber; +const MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +const expectFloat32 = (value) => { + const expected = (0, exports.expectNumber)(value); + if (expected !== undefined && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}; +exports.expectFloat32 = expectFloat32; +const expectLong = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}; +exports.expectLong = expectLong; +exports.expectInt = exports.expectLong; +const expectInt32 = (value) => expectSizedInt(value, 32); +exports.expectInt32 = expectInt32; +const expectShort = (value) => expectSizedInt(value, 16); +exports.expectShort = expectShort; +const expectByte = (value) => expectSizedInt(value, 8); +exports.expectByte = expectByte; +const expectSizedInt = (value, size) => { + const expected = (0, exports.expectLong)(value); + if (expected !== undefined && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}; +const castInt = (value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}; +const expectNonNull = (value, location) => { + if (value === null || value === undefined) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}; +exports.expectNonNull = expectNonNull; +const expectObject = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}; +exports.expectObject = expectObject; +const expectString = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + exports.logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}; +exports.expectString = expectString; +const expectUnion = (value) => { + if (value === null || value === undefined) { + return undefined; + } + const asObject = (0, exports.expectObject)(value); + const setKeys = Object.entries(asObject) + .filter(([, v]) => v != null) + .map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}; +exports.expectUnion = expectUnion; +const strictParseDouble = (value) => { + if (typeof value == "string") { + return (0, exports.expectNumber)(parseNumber(value)); + } + return (0, exports.expectNumber)(value); +}; +exports.strictParseDouble = strictParseDouble; +exports.strictParseFloat = exports.strictParseDouble; +const strictParseFloat32 = (value) => { + if (typeof value == "string") { + return (0, exports.expectFloat32)(parseNumber(value)); + } + return (0, exports.expectFloat32)(value); +}; +exports.strictParseFloat32 = strictParseFloat32; +const NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +const parseNumber = (value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}; +const limitedParseDouble = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return (0, exports.expectNumber)(value); +}; +exports.limitedParseDouble = limitedParseDouble; +exports.handleFloat = exports.limitedParseDouble; +exports.limitedParseFloat = exports.limitedParseDouble; +const limitedParseFloat32 = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return (0, exports.expectFloat32)(value); +}; +exports.limitedParseFloat32 = limitedParseFloat32; +const parseFloatString = (value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}; +const strictParseLong = (value) => { + if (typeof value === "string") { + return (0, exports.expectLong)(parseNumber(value)); + } + return (0, exports.expectLong)(value); +}; +exports.strictParseLong = strictParseLong; +exports.strictParseInt = exports.strictParseLong; +const strictParseInt32 = (value) => { + if (typeof value === "string") { + return (0, exports.expectInt32)(parseNumber(value)); + } + return (0, exports.expectInt32)(value); +}; +exports.strictParseInt32 = strictParseInt32; +const strictParseShort = (value) => { + if (typeof value === "string") { + return (0, exports.expectShort)(parseNumber(value)); + } + return (0, exports.expectShort)(value); +}; +exports.strictParseShort = strictParseShort; +const strictParseByte = (value) => { + if (typeof value === "string") { + return (0, exports.expectByte)(parseNumber(value)); + } + return (0, exports.expectByte)(value); +}; +exports.strictParseByte = strictParseByte; +const stackTraceWarning = (message) => { + return String(new TypeError(message).stack || message) + .split("\n") + .slice(0, 5) + .filter((s) => !s.includes("stackTraceWarning")) + .join("\n"); +}; +exports.logger = { + warn: console.warn, +}; + + +/***/ }), + +/***/ 15342: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolvedPath = void 0; +const extended_encode_uri_component_1 = __nccwpck_require__(76016); +const resolvedPath = (resolvedPath, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== undefined) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath = resolvedPath.replace(uriLabel, isGreedyLabel + ? labelValue + .split("/") + .map((segment) => (0, extended_encode_uri_component_1.extendedEncodeURIComponent)(segment)) + .join("/") + : (0, extended_encode_uri_component_1.extendedEncodeURIComponent)(labelValue)); + } + else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath; }; -var endpoints_default = Endpoints; +exports.resolvedPath = resolvedPath; -// pkg/dist-src/endpoints-to-methods.js -var endpointMethodsMap = /* @__PURE__ */ new Map(); -for (const [scope, endpoints] of Object.entries(endpoints_default)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign( - { - method, - url - }, - defaults - ); - if (!endpointMethodsMap.has(scope)) { - endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + +/***/ }), + +/***/ 53456: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.serializeFloat = void 0; +const serializeFloat = (value) => { + if (value !== value) { + return "NaN"; } - endpointMethodsMap.get(scope).set(methodName, { - scope, - methodName, - endpointDefaults, - decorations - }); - } -} -var handler = { - has({ scope }, methodName) { - return endpointMethodsMap.get(scope).has(methodName); - }, - getOwnPropertyDescriptor(target, methodName) { - return { - value: this.get(target, methodName), - // ensures method is in the cache - configurable: true, - writable: true, - enumerable: true - }; - }, - defineProperty(target, methodName, descriptor) { - Object.defineProperty(target.cache, methodName, descriptor); - return true; - }, - deleteProperty(target, methodName) { - delete target.cache[methodName]; - return true; - }, - ownKeys({ scope }) { - return [...endpointMethodsMap.get(scope).keys()]; - }, - set(target, methodName, value) { - return target.cache[methodName] = value; - }, - get({ octokit, scope, cache }, methodName) { - if (cache[methodName]) { - return cache[methodName]; + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; } - const method = endpointMethodsMap.get(scope).get(methodName); - if (!method) { - return void 0; +}; +exports.serializeFloat = serializeFloat; + + +/***/ }), + +/***/ 1752: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports._json = void 0; +const _json = (obj) => { + if (obj == null) { + return {}; } - const { endpointDefaults, decorations } = method; - if (decorations) { - cache[methodName] = decorate( - octokit, - scope, - methodName, - endpointDefaults, - decorations - ); - } else { - cache[methodName] = octokit.request.defaults(endpointDefaults); + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(exports._json); } - return cache[methodName]; - } -}; -function endpointsToMethods(octokit) { - const newMethods = {}; - for (const scope of endpointMethodsMap.keys()) { - newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); - } - return newMethods; -} -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - function withDecorations(...args) { - let options = requestWithDefaults.endpoint.merge(...args); - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: void 0 - }); - return requestWithDefaults(options); + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = (0, exports._json)(obj[key]); + } + return target; } - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn( - `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` - ); + return obj; +}; +exports._json = _json; + + +/***/ }), + +/***/ 92480: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.splitEvery = void 0; +function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); } - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; } - if (decorations.renamedParameters) { - const options2 = requestWithDefaults.endpoint.merge(...args); - for (const [name, alias] of Object.entries( - decorations.renamedParameters - )) { - if (name in options2) { - octokit.log.warn( - `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` - ); - if (!(alias in options2)) { - options2[alias] = options2[name]; - } - delete options2[name]; + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } + else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; } - } - return requestWithDefaults(options2); } - return requestWithDefaults(...args); - } - return Object.assign(withDecorations, requestWithDefaults); + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; } +exports.splitEvery = splitEvery; -// pkg/dist-src/index.js -function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit); - return { - rest: api - }; -} -restEndpointMethods.VERSION = VERSION; -function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit); - return { - ...api, - rest: api - }; -} -legacyRestEndpointMethods.VERSION = VERSION; -// Annotate the CommonJS export names for ESM import in node: -0 && (0); + +/***/ }), + +/***/ 74075: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 10537: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 93242: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -var __create = Object.create; -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __getProtoOf = Object.getPrototypeOf; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( - // If the importer is in node compatibility mode or this is not an ESM - // file that has been converted to a CommonJS file using a Babel- - // compatible transform (i.e. "__esModule" has not been set), then set - // "default" to the CommonJS "module.exports" for node compatibility. - isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, - mod -)); -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpApiKeyAuthLocation = void 0; +var HttpApiKeyAuthLocation; +(function (HttpApiKeyAuthLocation) { + HttpApiKeyAuthLocation["HEADER"] = "header"; + HttpApiKeyAuthLocation["QUERY"] = "query"; +})(HttpApiKeyAuthLocation = exports.HttpApiKeyAuthLocation || (exports.HttpApiKeyAuthLocation = {})); -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - RequestError: () => RequestError -}); -module.exports = __toCommonJS(dist_src_exports); -var import_deprecation = __nccwpck_require__(58932); -var import_once = __toESM(__nccwpck_require__(1223)); -var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); -var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); -var RequestError = class extends Error { - constructor(message, statusCode, options) { - super(message); - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - this.name = "HttpError"; - this.status = statusCode; - let headers; - if ("headers" in options && typeof options.headers !== "undefined") { - headers = options.headers; - } - if ("response" in options) { - this.response = options.response; - headers = options.response.headers; + +/***/ }), + +/***/ 81851: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 91530: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 74020: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 52263: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 79467: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpAuthLocation = void 0; +var HttpAuthLocation; +(function (HttpAuthLocation) { + HttpAuthLocation["HEADER"] = "header"; + HttpAuthLocation["QUERY"] = "query"; +})(HttpAuthLocation = exports.HttpAuthLocation || (exports.HttpAuthLocation = {})); + + +/***/ }), + +/***/ 11239: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(79467), exports); +tslib_1.__exportStar(__nccwpck_require__(93242), exports); +tslib_1.__exportStar(__nccwpck_require__(81851), exports); +tslib_1.__exportStar(__nccwpck_require__(91530), exports); +tslib_1.__exportStar(__nccwpck_require__(74020), exports); +tslib_1.__exportStar(__nccwpck_require__(52263), exports); + + +/***/ }), + +/***/ 63274: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 78340: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 4744: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 68270: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 39580: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 57628: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(39580), exports); +tslib_1.__exportStar(__nccwpck_require__(98398), exports); +tslib_1.__exportStar(__nccwpck_require__(76522), exports); + + +/***/ }), + +/***/ 98398: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 76522: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 89035: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7225: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 54126: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointURLScheme = void 0; +var EndpointURLScheme; +(function (EndpointURLScheme) { + EndpointURLScheme["HTTP"] = "http"; + EndpointURLScheme["HTTPS"] = "https"; +})(EndpointURLScheme = exports.EndpointURLScheme || (exports.EndpointURLScheme = {})); + + +/***/ }), + +/***/ 55612: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 43084: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 89843: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 63799: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 21550: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(55612), exports); +tslib_1.__exportStar(__nccwpck_require__(43084), exports); +tslib_1.__exportStar(__nccwpck_require__(89843), exports); +tslib_1.__exportStar(__nccwpck_require__(57658), exports); +tslib_1.__exportStar(__nccwpck_require__(63799), exports); + + +/***/ }), + +/***/ 57658: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 88508: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8947: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveChecksumRuntimeConfig = exports.getChecksumConfiguration = exports.AlgorithmId = void 0; +var AlgorithmId; +(function (AlgorithmId) { + AlgorithmId["MD5"] = "md5"; + AlgorithmId["CRC32"] = "crc32"; + AlgorithmId["CRC32C"] = "crc32c"; + AlgorithmId["SHA1"] = "sha1"; + AlgorithmId["SHA256"] = "sha256"; +})(AlgorithmId = exports.AlgorithmId || (exports.AlgorithmId = {})); +const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.SHA256, + checksumConstructor: () => runtimeConfig.sha256, + }); } - const requestCopy = Object.assign({}, options.request); - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace( - / .*$/, - " [REDACTED]" - ) - }); + if (runtimeConfig.md5 != undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.MD5, + checksumConstructor: () => runtimeConfig.md5, + }); } - requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - Object.defineProperty(this, "code", { - get() { - logOnceCode( - new import_deprecation.Deprecation( - "[@octokit/request-error] `error.code` is deprecated, use `error.status`." - ) - ); - return statusCode; - } - }); - Object.defineProperty(this, "headers", { - get() { - logOnceHeaders( - new import_deprecation.Deprecation( - "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." - ) - ); - return headers || {}; - } + return { + _checksumAlgorithms: checksumAlgorithms, + addChecksumAlgorithm(algo) { + this._checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return this._checksumAlgorithms; + }, + }; +}; +exports.getChecksumConfiguration = getChecksumConfiguration; +const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); }); - } + return runtimeConfig; }; -// Annotate the CommonJS export names for ESM import in node: -0 && (0); +exports.resolveChecksumRuntimeConfig = resolveChecksumRuntimeConfig; /***/ }), -/***/ 36234: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 89169: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveDefaultRuntimeConfig = exports.getDefaultClientConfiguration = void 0; +const checksum_1 = __nccwpck_require__(8947); +const getDefaultClientConfiguration = (runtimeConfig) => { + return { + ...(0, checksum_1.getChecksumConfiguration)(runtimeConfig), + }; }; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; +exports.getDefaultClientConfiguration = getDefaultClientConfiguration; +const resolveDefaultRuntimeConfig = (config) => { + return { + ...(0, checksum_1.resolveChecksumRuntimeConfig)(config), + }; }; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); +exports.resolveDefaultRuntimeConfig = resolveDefaultRuntimeConfig; + + +/***/ }), + +/***/ 32245: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 47447: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AlgorithmId = void 0; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(89169), exports); +tslib_1.__exportStar(__nccwpck_require__(32245), exports); +var checksum_1 = __nccwpck_require__(8947); +Object.defineProperty(exports, "AlgorithmId", ({ enumerable: true, get: function () { return checksum_1.AlgorithmId; } })); + + +/***/ }), + +/***/ 18883: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FieldPosition = void 0; +var FieldPosition; +(function (FieldPosition) { + FieldPosition[FieldPosition["HEADER"] = 0] = "HEADER"; + FieldPosition[FieldPosition["TRAILER"] = 1] = "TRAILER"; +})(FieldPosition = exports.FieldPosition || (exports.FieldPosition = {})); + + +/***/ }), + +/***/ 12842: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 197: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7545: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 49123: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 28006: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(197), exports); +tslib_1.__exportStar(__nccwpck_require__(7545), exports); +tslib_1.__exportStar(__nccwpck_require__(49123), exports); +tslib_1.__exportStar(__nccwpck_require__(84476), exports); + + +/***/ }), + +/***/ 84476: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 55756: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(74075), exports); +tslib_1.__exportStar(__nccwpck_require__(11239), exports); +tslib_1.__exportStar(__nccwpck_require__(63274), exports); +tslib_1.__exportStar(__nccwpck_require__(78340), exports); +tslib_1.__exportStar(__nccwpck_require__(4744), exports); +tslib_1.__exportStar(__nccwpck_require__(68270), exports); +tslib_1.__exportStar(__nccwpck_require__(57628), exports); +tslib_1.__exportStar(__nccwpck_require__(89035), exports); +tslib_1.__exportStar(__nccwpck_require__(7225), exports); +tslib_1.__exportStar(__nccwpck_require__(54126), exports); +tslib_1.__exportStar(__nccwpck_require__(21550), exports); +tslib_1.__exportStar(__nccwpck_require__(88508), exports); +tslib_1.__exportStar(__nccwpck_require__(47447), exports); +tslib_1.__exportStar(__nccwpck_require__(18883), exports); +tslib_1.__exportStar(__nccwpck_require__(12842), exports); +tslib_1.__exportStar(__nccwpck_require__(28006), exports); +tslib_1.__exportStar(__nccwpck_require__(52866), exports); +tslib_1.__exportStar(__nccwpck_require__(17756), exports); +tslib_1.__exportStar(__nccwpck_require__(45489), exports); +tslib_1.__exportStar(__nccwpck_require__(26524), exports); +tslib_1.__exportStar(__nccwpck_require__(14603), exports); +tslib_1.__exportStar(__nccwpck_require__(83752), exports); +tslib_1.__exportStar(__nccwpck_require__(30774), exports); +tslib_1.__exportStar(__nccwpck_require__(14089), exports); +tslib_1.__exportStar(__nccwpck_require__(45678), exports); +tslib_1.__exportStar(__nccwpck_require__(69926), exports); +tslib_1.__exportStar(__nccwpck_require__(9945), exports); +tslib_1.__exportStar(__nccwpck_require__(28564), exports); +tslib_1.__exportStar(__nccwpck_require__(61285), exports); +tslib_1.__exportStar(__nccwpck_require__(50364), exports); +tslib_1.__exportStar(__nccwpck_require__(69304), exports); +tslib_1.__exportStar(__nccwpck_require__(46098), exports); +tslib_1.__exportStar(__nccwpck_require__(10375), exports); +tslib_1.__exportStar(__nccwpck_require__(66894), exports); +tslib_1.__exportStar(__nccwpck_require__(57887), exports); +tslib_1.__exportStar(__nccwpck_require__(66255), exports); + + +/***/ }), + +/***/ 52866: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 17756: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SMITHY_CONTEXT_KEY = void 0; +exports.SMITHY_CONTEXT_KEY = "__smithy_context"; + + +/***/ }), + +/***/ 45489: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 26524: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IniSectionType = void 0; +var IniSectionType; +(function (IniSectionType) { + IniSectionType["PROFILE"] = "profile"; + IniSectionType["SSO_SESSION"] = "sso-session"; + IniSectionType["SERVICES"] = "services"; +})(IniSectionType = exports.IniSectionType || (exports.IniSectionType = {})); + + +/***/ }), + +/***/ 14603: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 83752: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 30774: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 14089: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 45678: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 69926: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9945: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 28564: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 61285: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 50364: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RequestHandlerProtocol = void 0; +var RequestHandlerProtocol; +(function (RequestHandlerProtocol) { + RequestHandlerProtocol["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol["TDS_8_0"] = "tds/8.0"; +})(RequestHandlerProtocol = exports.RequestHandlerProtocol || (exports.RequestHandlerProtocol = {})); + + +/***/ }), + +/***/ 69304: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 46098: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - request: () => request -}); -module.exports = __toCommonJS(dist_src_exports); -var import_endpoint = __nccwpck_require__(59440); -var import_universal_user_agent = __nccwpck_require__(45030); -// pkg/dist-src/version.js -var VERSION = "8.1.6"; +/***/ }), -// pkg/dist-src/is-plain-object.js -function isPlainObject(value) { - if (typeof value !== "object" || value === null) - return false; - if (Object.prototype.toString.call(value) !== "[object Object]") - return false; - const proto = Object.getPrototypeOf(value); - if (proto === null) - return true; - const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; - return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); -} +/***/ 10375: +/***/ ((__unused_webpack_module, exports) => { -// pkg/dist-src/fetch-wrapper.js -var import_request_error = __nccwpck_require__(10537); +"use strict"; -// pkg/dist-src/get-buffer-response.js -function getBufferResponse(response) { - return response.arrayBuffer(); -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -// pkg/dist-src/fetch-wrapper.js -function fetchWrapper(requestOptions) { - var _a, _b, _c; - const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; - if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } - let headers = {}; - let status; - let url; - let { fetch } = globalThis; - if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { - fetch = requestOptions.request.fetch; - } - if (!fetch) { - throw new Error( - "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" - ); - } - return fetch(requestOptions.url, { - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, - // duplex must be set if request.body is ReadableStream or Async Iterables. - // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. - ...requestOptions.body && { duplex: "half" } - }).then(async (response) => { - url = response.url; - status = response.status; - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - if ("deprecation" in headers) { - const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); - const deprecationLink = matches && matches.pop(); - log.warn( - `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` - ); - } - if (status === 204 || status === 205) { - return; - } - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } - throw new import_request_error.RequestError(response.statusText, status, { - response: { - url, - status, - headers, - data: void 0 - }, - request: requestOptions - }); - } - if (status === 304) { - throw new import_request_error.RequestError("Not modified", status, { - response: { - url, - status, - headers, - data: await getResponseData(response) - }, - request: requestOptions - }); - } - if (status >= 400) { - const data = await getResponseData(response); - const error = new import_request_error.RequestError(toErrorMessage(data), status, { - response: { - url, - status, - headers, - data - }, - request: requestOptions - }); - throw error; - } - return parseSuccessResponseBody ? await getResponseData(response) : response.body; - }).then((data) => { - return { - status, - url, - headers, - data - }; - }).catch((error) => { - if (error instanceof import_request_error.RequestError) - throw error; - else if (error.name === "AbortError") - throw error; - let message = error.message; - if (error.name === "TypeError" && "cause" in error) { - if (error.cause instanceof Error) { - message = error.cause.message; - } else if (typeof error.cause === "string") { - message = error.cause; - } - } - throw new import_request_error.RequestError(message, 500, { - request: requestOptions - }); - }); -} -async function getResponseData(response) { - const contentType = response.headers.get("content-type"); - if (/application\/json/.test(contentType)) { - return response.json().catch(() => response.text()).catch(() => ""); - } - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - return getBufferResponse(response); -} -function toErrorMessage(data) { - if (typeof data === "string") - return data; - if ("message" in data) { - if (Array.isArray(data.errors)) { - return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; - } - return data.message; - } - return `Unknown error: ${JSON.stringify(data)}`; -} -// pkg/dist-src/with-defaults.js -function withDefaults(oldEndpoint, newDefaults) { - const endpoint2 = oldEndpoint.defaults(newDefaults); - const newApi = function(route, parameters) { - const endpointOptions = endpoint2.merge(route, parameters); - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint2.parse(endpointOptions)); - } - const request2 = (route2, parameters2) => { - return fetchWrapper( - endpoint2.parse(endpoint2.merge(route2, parameters2)) - ); - }; - Object.assign(request2, { - endpoint: endpoint2, - defaults: withDefaults.bind(null, endpoint2) - }); - return endpointOptions.request.hook(request2, endpointOptions); - }; - return Object.assign(newApi, { - endpoint: endpoint2, - defaults: withDefaults.bind(null, endpoint2) - }); -} +/***/ }), -// pkg/dist-src/index.js -var request = withDefaults(import_endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` - } -}); -// Annotate the CommonJS export names for ESM import in node: -0 && (0); +/***/ 66894: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 57171: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 57887: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ContextAPI = void 0; -const NoopContextManager_1 = __nccwpck_require__(54118); -const global_utils_1 = __nccwpck_require__(63979); -const diag_1 = __nccwpck_require__(11877); -const API_NAME = 'context'; -const NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); -/** - * Singleton object which represents the entry point to the OpenTelemetry Context API - */ -class ContextAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { } - /** Get the singleton instance of the Context API */ - static getInstance() { - if (!this._instance) { - this._instance = new ContextAPI(); - } - return this._instance; - } - /** - * Set the current context manager. - * - * @returns true if the context manager was successfully registered, else false - */ - setGlobalContextManager(contextManager) { - return (0, global_utils_1.registerGlobal)(API_NAME, contextManager, diag_1.DiagAPI.instance()); - } - /** - * Get the currently active context - */ - active() { - return this._getContextManager().active(); - } - /** - * Execute a function with an active context - * - * @param context context to be active during function execution - * @param fn function to execute in a context - * @param thisArg optional receiver to be used for calling fn - * @param args optional arguments forwarded to fn - */ - with(context, fn, thisArg, ...args) { - return this._getContextManager().with(context, fn, thisArg, ...args); - } - /** - * Bind a context to a target function or event emitter - * - * @param context context to bind to the event emitter or function. Defaults to the currently active context - * @param target function or event emitter to bind - */ - bind(context, target) { - return this._getContextManager().bind(context, target); - } - _getContextManager() { - return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_CONTEXT_MANAGER; - } - /** Disable and remove the global context manager */ - disable() { - this._getContextManager().disable(); - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); - } -} -exports.ContextAPI = ContextAPI; -//# sourceMappingURL=context.js.map + /***/ }), -/***/ 11877: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 66255: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DiagAPI = void 0; -const ComponentLogger_1 = __nccwpck_require__(17978); -const logLevelLogger_1 = __nccwpck_require__(99639); -const types_1 = __nccwpck_require__(78077); -const global_utils_1 = __nccwpck_require__(63979); -const API_NAME = 'diag'; -/** - * Singleton object which represents the entry point to the OpenTelemetry internal - * diagnostic API - */ -class DiagAPI { - /** - * Private internal constructor - * @private - */ - constructor() { - function _logProxy(funcName) { - return function (...args) { - const logger = (0, global_utils_1.getGlobal)('diag'); - // shortcut if logger not set - if (!logger) - return; - return logger[funcName](...args); - }; - } - // Using self local variable for minification purposes as 'this' cannot be minified - const self = this; - // DiagAPI specific functions - const setLogger = (logger, optionsOrLogLevel = { logLevel: types_1.DiagLogLevel.INFO }) => { - var _a, _b, _c; - if (logger === self) { - // There isn't much we can do here. - // Logging to the console might break the user application. - // Try to log to self. If a logger was previously registered it will receive the log. - const err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); - self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); - return false; - } - if (typeof optionsOrLogLevel === 'number') { - optionsOrLogLevel = { - logLevel: optionsOrLogLevel, - }; - } - const oldLogger = (0, global_utils_1.getGlobal)('diag'); - const newLogger = (0, logLevelLogger_1.createLogLevelDiagLogger)((_b = optionsOrLogLevel.logLevel) !== null && _b !== void 0 ? _b : types_1.DiagLogLevel.INFO, logger); - // There already is an logger registered. We'll let it know before overwriting it. - if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { - const stack = (_c = new Error().stack) !== null && _c !== void 0 ? _c : ''; - oldLogger.warn(`Current logger will be overwritten from ${stack}`); - newLogger.warn(`Current logger will overwrite one already registered from ${stack}`); - } - return (0, global_utils_1.registerGlobal)('diag', newLogger, self, true); - }; - self.setLogger = setLogger; - self.disable = () => { - (0, global_utils_1.unregisterGlobal)(API_NAME, self); - }; - self.createComponentLogger = (options) => { - return new ComponentLogger_1.DiagComponentLogger(options); - }; - self.verbose = _logProxy('verbose'); - self.debug = _logProxy('debug'); - self.info = _logProxy('info'); - self.warn = _logProxy('warn'); - self.error = _logProxy('error'); - } - /** Get the singleton instance of the DiagAPI API */ - static instance() { - if (!this._instance) { - this._instance = new DiagAPI(); - } - return this._instance; - } -} -exports.DiagAPI = DiagAPI; -//# sourceMappingURL=diag.js.map + /***/ }), -/***/ 17696: +/***/ 14681: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MetricsAPI = void 0; -const NoopMeterProvider_1 = __nccwpck_require__(72647); -const global_utils_1 = __nccwpck_require__(63979); -const diag_1 = __nccwpck_require__(11877); -const API_NAME = 'metrics'; -/** - * Singleton object which represents the entry point to the OpenTelemetry Metrics API - */ -class MetricsAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { } - /** Get the singleton instance of the Metrics API */ - static getInstance() { - if (!this._instance) { - this._instance = new MetricsAPI(); - } - return this._instance; - } - /** - * Set the current global meter provider. - * Returns true if the meter provider was successfully registered, else false. - */ - setGlobalMeterProvider(provider) { - return (0, global_utils_1.registerGlobal)(API_NAME, provider, diag_1.DiagAPI.instance()); - } - /** - * Returns the global meter provider. - */ - getMeterProvider() { - return (0, global_utils_1.getGlobal)(API_NAME) || NoopMeterProvider_1.NOOP_METER_PROVIDER; - } - /** - * Returns a meter from the global meter provider. - */ - getMeter(name, version, options) { - return this.getMeterProvider().getMeter(name, version, options); +exports.parseUrl = void 0; +const querystring_parser_1 = __nccwpck_require__(4769); +const parseUrl = (url) => { + if (typeof url === "string") { + return (0, exports.parseUrl)(new URL(url)); } - /** Remove the global meter provider */ - disable() { - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = (0, querystring_parser_1.parseQueryString)(search); } -} -exports.MetricsAPI = MetricsAPI; -//# sourceMappingURL=metrics.js.map + return { + hostname, + port: port ? parseInt(port) : undefined, + protocol, + path: pathname, + query, + }; +}; +exports.parseUrl = parseUrl; + /***/ }), -/***/ 89909: +/***/ 30305: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PropagationAPI = void 0; -const global_utils_1 = __nccwpck_require__(63979); -const NoopTextMapPropagator_1 = __nccwpck_require__(72368); -const TextMapPropagator_1 = __nccwpck_require__(80865); -const context_helpers_1 = __nccwpck_require__(37682); -const utils_1 = __nccwpck_require__(28136); -const diag_1 = __nccwpck_require__(11877); -const API_NAME = 'propagation'; -const NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); -/** - * Singleton object which represents the entry point to the OpenTelemetry Propagation API - */ -class PropagationAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { - this.createBaggage = utils_1.createBaggage; - this.getBaggage = context_helpers_1.getBaggage; - this.getActiveBaggage = context_helpers_1.getActiveBaggage; - this.setBaggage = context_helpers_1.setBaggage; - this.deleteBaggage = context_helpers_1.deleteBaggage; - } - /** Get the singleton instance of the Propagator API */ - static getInstance() { - if (!this._instance) { - this._instance = new PropagationAPI(); - } - return this._instance; - } - /** - * Set the current propagator. - * - * @returns true if the propagator was successfully registered, else false - */ - setGlobalPropagator(propagator) { - return (0, global_utils_1.registerGlobal)(API_NAME, propagator, diag_1.DiagAPI.instance()); - } - /** - * Inject context into a carrier to be propagated inter-process - * - * @param context Context carrying tracing data to inject - * @param carrier carrier to inject context into - * @param setter Function used to set values on the carrier - */ - inject(context, carrier, setter = TextMapPropagator_1.defaultTextMapSetter) { - return this._getGlobalPropagator().inject(context, carrier, setter); - } - /** - * Extract context from a carrier - * - * @param context Context which the newly created context will inherit from - * @param carrier Carrier to extract context from - * @param getter Function used to extract keys from a carrier - */ - extract(context, carrier, getter = TextMapPropagator_1.defaultTextMapGetter) { - return this._getGlobalPropagator().extract(context, carrier, getter); - } - /** - * Return a list of all fields which may be used by the propagator. - */ - fields() { - return this._getGlobalPropagator().fields(); - } - /** Remove the global propagator */ - disable() { - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); +exports.fromBase64 = void 0; +const util_buffer_from_1 = __nccwpck_require__(31381); +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); } - _getGlobalPropagator() { - return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); } -} -exports.PropagationAPI = PropagationAPI; -//# sourceMappingURL=propagation.js.map + const buffer = (0, util_buffer_from_1.fromString)(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; +exports.fromBase64 = fromBase64; + /***/ }), -/***/ 81539: +/***/ 75600: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TraceAPI = void 0; -const global_utils_1 = __nccwpck_require__(63979); -const ProxyTracerProvider_1 = __nccwpck_require__(2285); -const spancontext_utils_1 = __nccwpck_require__(49745); -const context_utils_1 = __nccwpck_require__(23326); -const diag_1 = __nccwpck_require__(11877); -const API_NAME = 'trace'; -/** - * Singleton object which represents the entry point to the OpenTelemetry Tracing API - */ -class TraceAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { - this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); - this.wrapSpanContext = spancontext_utils_1.wrapSpanContext; - this.isSpanContextValid = spancontext_utils_1.isSpanContextValid; - this.deleteSpan = context_utils_1.deleteSpan; - this.getSpan = context_utils_1.getSpan; - this.getActiveSpan = context_utils_1.getActiveSpan; - this.getSpanContext = context_utils_1.getSpanContext; - this.setSpan = context_utils_1.setSpan; - this.setSpanContext = context_utils_1.setSpanContext; - } - /** Get the singleton instance of the Trace API */ - static getInstance() { - if (!this._instance) { - this._instance = new TraceAPI(); - } - return this._instance; - } - /** - * Set the current global tracer. - * - * @returns true if the tracer provider was successfully registered, else false - */ - setGlobalTracerProvider(provider) { - const success = (0, global_utils_1.registerGlobal)(API_NAME, this._proxyTracerProvider, diag_1.DiagAPI.instance()); - if (success) { - this._proxyTracerProvider.setDelegate(provider); - } - return success; - } - /** - * Returns the global tracer provider. - */ - getTracerProvider() { - return (0, global_utils_1.getGlobal)(API_NAME) || this._proxyTracerProvider; - } - /** - * Returns a tracer from the global tracer provider. - */ - getTracer(name, version) { - return this.getTracerProvider().getTracer(name, version); - } - /** Remove the global tracer provider */ - disable() { - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); - this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); - } -} -exports.TraceAPI = TraceAPI; -//# sourceMappingURL=trace.js.map +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(30305), exports); +tslib_1.__exportStar(__nccwpck_require__(74730), exports); + /***/ }), -/***/ 37682: +/***/ 74730: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.deleteBaggage = exports.setBaggage = exports.getActiveBaggage = exports.getBaggage = void 0; -const context_1 = __nccwpck_require__(57171); -const context_2 = __nccwpck_require__(78242); -/** - * Baggage key - */ -const BAGGAGE_KEY = (0, context_2.createContextKey)('OpenTelemetry Baggage Key'); -/** - * Retrieve the current baggage from the given context - * - * @param {Context} Context that manage all context values - * @returns {Baggage} Extracted baggage from the context - */ -function getBaggage(context) { - return context.getValue(BAGGAGE_KEY) || undefined; -} -exports.getBaggage = getBaggage; -/** - * Retrieve the current baggage from the active/current context - * - * @returns {Baggage} Extracted baggage from the context - */ -function getActiveBaggage() { - return getBaggage(context_1.ContextAPI.getInstance().active()); -} -exports.getActiveBaggage = getActiveBaggage; -/** - * Store a baggage in the given context - * - * @param {Context} Context that manage all context values - * @param {Baggage} baggage that will be set in the actual context - */ -function setBaggage(context, baggage) { - return context.setValue(BAGGAGE_KEY, baggage); -} -exports.setBaggage = setBaggage; -/** - * Delete the baggage stored in the given context - * - * @param {Context} Context that manage all context values - */ -function deleteBaggage(context) { - return context.deleteValue(BAGGAGE_KEY); -} -exports.deleteBaggage = deleteBaggage; -//# sourceMappingURL=context-helpers.js.map +exports.toBase64 = void 0; +const util_buffer_from_1 = __nccwpck_require__(31381); +const toBase64 = (input) => (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +exports.toBase64 = toBase64; + /***/ }), -/***/ 84811: -/***/ ((__unused_webpack_module, exports) => { +/***/ 54880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BaggageImpl = void 0; -class BaggageImpl { - constructor(entries) { - this._entries = entries ? new Map(entries) : new Map(); +exports.calculateBodyLength = void 0; +const fs_1 = __nccwpck_require__(57147); +const calculateBodyLength = (body) => { + if (!body) { + return 0; } - getEntry(key) { - const entry = this._entries.get(key); - if (!entry) { - return undefined; - } - return Object.assign({}, entry); + if (typeof body === "string") { + return Buffer.from(body).length; } - getAllEntries() { - return Array.from(this._entries.entries()).map(([k, v]) => [k, v]); + else if (typeof body.byteLength === "number") { + return body.byteLength; } - setEntry(key, entry) { - const newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.set(key, entry); - return newBaggage; + else if (typeof body.size === "number") { + return body.size; } - removeEntry(key) { - const newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.delete(key); - return newBaggage; + else if (typeof body.start === "number" && typeof body.end === "number") { + return body.end + 1 - body.start; } - removeEntries(...keys) { - const newBaggage = new BaggageImpl(this._entries); - for (const key of keys) { - newBaggage._entries.delete(key); - } - return newBaggage; + else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return (0, fs_1.lstatSync)(body.path).size; } - clear() { - return new BaggageImpl(); + else if (typeof body.fd === "number") { + return (0, fs_1.fstatSync)(body.fd).size; } -} -exports.BaggageImpl = BaggageImpl; -//# sourceMappingURL=baggage-impl.js.map + throw new Error(`Body Length computation failed for ${body}`); +}; +exports.calculateBodyLength = calculateBodyLength; + /***/ }), -/***/ 23542: -/***/ ((__unused_webpack_module, exports) => { +/***/ 68075: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.baggageEntryMetadataSymbol = void 0; -/** - * Symbol used to make BaggageEntryMetadata an opaque type - */ -exports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); -//# sourceMappingURL=symbol.js.map +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(54880), exports); + /***/ }), -/***/ 28136: +/***/ 31381: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; -const diag_1 = __nccwpck_require__(11877); -const baggage_impl_1 = __nccwpck_require__(84811); -const symbol_1 = __nccwpck_require__(23542); -const diag = diag_1.DiagAPI.instance(); -/** - * Create a new Baggage with optional entries - * - * @param entries An array of baggage entries the new baggage should contain - */ -function createBaggage(entries = {}) { - return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); -} -exports.createBaggage = createBaggage; -/** - * Create a serializable BaggageEntryMetadata object from a string. - * - * @param str string metadata. Format is currently not defined by the spec and has no special meaning. - * - */ -function baggageEntryMetadataFromString(str) { - if (typeof str !== 'string') { - diag.error(`Cannot create baggage metadata from unknown type: ${typeof str}`); - str = ''; +exports.fromString = exports.fromArrayBuffer = void 0; +const is_array_buffer_1 = __nccwpck_require__(10780); +const buffer_1 = __nccwpck_require__(14300); +const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!(0, is_array_buffer_1.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); } - return { - __TYPE__: symbol_1.baggageEntryMetadataSymbol, - toString() { - return str; - }, - }; -} -exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; -//# sourceMappingURL=utils.js.map + return buffer_1.Buffer.from(input, offset, length); +}; +exports.fromArrayBuffer = fromArrayBuffer; +const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? buffer_1.Buffer.from(input, encoding) : buffer_1.Buffer.from(input); +}; +exports.fromString = fromString; + /***/ }), -/***/ 7393: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 42491: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.context = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const context_1 = __nccwpck_require__(57171); -/** Entrypoint for context API */ -exports.context = context_1.ContextAPI.getInstance(); -//# sourceMappingURL=context-api.js.map +exports.booleanSelector = exports.SelectorType = void 0; +var SelectorType; +(function (SelectorType) { + SelectorType["ENV"] = "env"; + SelectorType["CONFIG"] = "shared config entry"; +})(SelectorType = exports.SelectorType || (exports.SelectorType = {})); +const booleanSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}; +exports.booleanSelector = booleanSelector; + /***/ }), -/***/ 54118: +/***/ 83375: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoopContextManager = void 0; -const context_1 = __nccwpck_require__(78242); -class NoopContextManager { - active() { - return context_1.ROOT_CONTEXT; - } - with(_context, fn, thisArg, ...args) { - return fn.call(thisArg, ...args); - } - bind(_context, target) { - return target; - } - enable() { - return this; - } - disable() { - return this; - } -} -exports.NoopContextManager = NoopContextManager; -//# sourceMappingURL=NoopContextManager.js.map +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(42491), exports); + /***/ }), -/***/ 78242: +/***/ 56470: /***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ROOT_CONTEXT = exports.createContextKey = void 0; -/** Get a key to uniquely identify a context value */ -function createContextKey(description) { - // The specification states that for the same input, multiple calls should - // return different keys. Due to the nature of the JS dependency management - // system, this creates problems where multiple versions of some package - // could hold different keys for the same property. - // - // Therefore, we use Symbol.for which returns the same key for the same input. - return Symbol.for(description); -} -exports.createContextKey = createContextKey; -class BaseContext { - /** - * Construct a new context which inherits values from an optional parent context. - * - * @param parentContext a context from which to inherit values - */ - constructor(parentContext) { - // for minification - const self = this; - self._currentContext = parentContext ? new Map(parentContext) : new Map(); - self.getValue = (key) => self._currentContext.get(key); - self.setValue = (key, value) => { - const context = new BaseContext(self._currentContext); - context._currentContext.set(key, value); - return context; - }; - self.deleteValue = (key) => { - const context = new BaseContext(self._currentContext); - context._currentContext.delete(key); - return context; - }; - } -} -/** The root context is used as the default parent context when there is no active context */ -exports.ROOT_CONTEXT = new BaseContext(); -//# sourceMappingURL=context.js.map +exports.IMDS_REGION_PATH = exports.DEFAULTS_MODE_OPTIONS = exports.ENV_IMDS_DISABLED = exports.AWS_DEFAULT_REGION_ENV = exports.AWS_REGION_ENV = exports.AWS_EXECUTION_ENV = void 0; +exports.AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +exports.AWS_REGION_ENV = "AWS_REGION"; +exports.AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +exports.ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +exports.DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +exports.IMDS_REGION_PATH = "/latest/meta-data/placement/region"; + + +/***/ }), + +/***/ 15577: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_DEFAULTS_MODE_CONFIG_OPTIONS = void 0; +const AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +const AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +exports.NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy", +}; + /***/ }), -/***/ 39721: +/***/ 72429: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.diag = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const diag_1 = __nccwpck_require__(11877); -/** - * Entrypoint for Diag API. - * Defines Diagnostic handler used for internal diagnostic logging operations. - * The default provides a Noop DiagLogger implementation which may be changed via the - * diag.setLogger(logger: DiagLogger) function. - */ -exports.diag = diag_1.DiagAPI.instance(); -//# sourceMappingURL=diag-api.js.map +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(46217), exports); + /***/ }), -/***/ 17978: +/***/ 46217: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DiagComponentLogger = void 0; -const global_utils_1 = __nccwpck_require__(63979); -/** - * Component Logger which is meant to be used as part of any component which - * will add automatically additional namespace in front of the log message. - * It will then forward all message to global diag logger - * @example - * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); - * cLogger.debug('test'); - * // @opentelemetry/instrumentation-http test - */ -class DiagComponentLogger { - constructor(props) { - this._namespace = props.namespace || 'DiagComponentLogger'; - } - debug(...args) { - return logProxy('debug', this._namespace, args); - } - error(...args) { - return logProxy('error', this._namespace, args); - } - info(...args) { - return logProxy('info', this._namespace, args); +exports.resolveDefaultsModeConfig = void 0; +const config_resolver_1 = __nccwpck_require__(53098); +const credential_provider_imds_1 = __nccwpck_require__(7477); +const node_config_provider_1 = __nccwpck_require__(33461); +const property_provider_1 = __nccwpck_require__(79721); +const constants_1 = __nccwpck_require__(56470); +const defaultsModeConfig_1 = __nccwpck_require__(15577); +const resolveDefaultsModeConfig = ({ region = (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS), defaultsMode = (0, node_config_provider_1.loadConfig)(defaultsModeConfig_1.NODE_DEFAULTS_MODE_CONFIG_OPTIONS), } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); } - warn(...args) { - return logProxy('warn', this._namespace, args); +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; +const resolveNodeDefaultsModeAuto = async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } + else { + return "cross-region"; + } } - verbose(...args) { - return logProxy('verbose', this._namespace, args); + return "standard"; +}; +const inferPhysicalRegion = async () => { + var _a; + if (process.env[constants_1.AWS_EXECUTION_ENV] && (process.env[constants_1.AWS_REGION_ENV] || process.env[constants_1.AWS_DEFAULT_REGION_ENV])) { + return (_a = process.env[constants_1.AWS_REGION_ENV]) !== null && _a !== void 0 ? _a : process.env[constants_1.AWS_DEFAULT_REGION_ENV]; } -} -exports.DiagComponentLogger = DiagComponentLogger; -function logProxy(funcName, namespace, args) { - const logger = (0, global_utils_1.getGlobal)('diag'); - // shortcut if logger not set - if (!logger) { - return; + if (!process.env[constants_1.ENV_IMDS_DISABLED]) { + try { + const endpoint = await (0, credential_provider_imds_1.getInstanceMetadataEndpoint)(); + return (await (0, credential_provider_imds_1.httpRequest)({ ...endpoint, path: constants_1.IMDS_REGION_PATH })).toString(); + } + catch (e) { + } } - args.unshift(namespace); - return logger[funcName](...args); -} -//# sourceMappingURL=ComponentLogger.js.map +}; + /***/ }), -/***/ 3041: +/***/ 71280: /***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DiagConsoleLogger = void 0; -const consoleMap = [ - { n: 'error', c: 'error' }, - { n: 'warn', c: 'warn' }, - { n: 'info', c: 'info' }, - { n: 'debug', c: 'debug' }, - { n: 'verbose', c: 'trace' }, -]; -/** - * A simple Immutable Console based diagnostic logger which will output any messages to the Console. - * If you want to limit the amount of logging to a specific level or lower use the - * {@link createLogLevelDiagLogger} - */ -class DiagConsoleLogger { - constructor() { - function _consoleFunc(funcName) { - return function (...args) { - if (console) { - // Some environments only expose the console when the F12 developer console is open - // eslint-disable-next-line no-console - let theFunc = console[funcName]; - if (typeof theFunc !== 'function') { - // Not all environments support all functions - // eslint-disable-next-line no-console - theFunc = console.log; - } - // One last final check - if (typeof theFunc === 'function') { - return theFunc.apply(console, args); - } - } - }; - } - for (let i = 0; i < consoleMap.length; i++) { - this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); - } - } -} -exports.DiagConsoleLogger = DiagConsoleLogger; -//# sourceMappingURL=consoleLogger.js.map +exports.debugId = void 0; +exports.debugId = "endpoints"; + /***/ }), -/***/ 99639: +/***/ 30540: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createLogLevelDiagLogger = void 0; -const types_1 = __nccwpck_require__(78077); -function createLogLevelDiagLogger(maxLevel, logger) { - if (maxLevel < types_1.DiagLogLevel.NONE) { - maxLevel = types_1.DiagLogLevel.NONE; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(71280), exports); +tslib_1.__exportStar(__nccwpck_require__(48927), exports); + + +/***/ }), + +/***/ 48927: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toDebugString = void 0; +function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; } - else if (maxLevel > types_1.DiagLogLevel.ALL) { - maxLevel = types_1.DiagLogLevel.ALL; + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; } - // In case the logger is null or undefined - logger = logger || {}; - function _filterFunc(funcName, theLevel) { - const theFunc = logger[funcName]; - if (typeof theFunc === 'function' && maxLevel >= theLevel) { - return theFunc.bind(logger); - } - return function () { }; + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; } - return { - error: _filterFunc('error', types_1.DiagLogLevel.ERROR), - warn: _filterFunc('warn', types_1.DiagLogLevel.WARN), - info: _filterFunc('info', types_1.DiagLogLevel.INFO), - debug: _filterFunc('debug', types_1.DiagLogLevel.DEBUG), - verbose: _filterFunc('verbose', types_1.DiagLogLevel.VERBOSE), - }; + return JSON.stringify(input, null, 2); } -exports.createLogLevelDiagLogger = createLogLevelDiagLogger; -//# sourceMappingURL=logLevelLogger.js.map +exports.toDebugString = toDebugString; + /***/ }), -/***/ 78077: -/***/ ((__unused_webpack_module, exports) => { +/***/ 45473: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DiagLogLevel = void 0; -/** - * Defines the available internal logging levels for the diagnostic logger, the numeric values - * of the levels are defined to match the original values from the initial LogLevel to avoid - * compatibility/migration issues for any implementation that assume the numeric ordering. - */ -var DiagLogLevel; -(function (DiagLogLevel) { - /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ - DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; - /** Identifies an error scenario */ - DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; - /** Identifies a warning scenario */ - DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; - /** General informational log message */ - DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; - /** General debug log message */ - DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; - /** - * Detailed trace level logging should only be used for development, should only be set - * in a development environment. - */ - DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; - /** Used to set the logging level to include all logging */ - DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; -})(DiagLogLevel = exports.DiagLogLevel || (exports.DiagLogLevel = {})); -//# sourceMappingURL=types.js.map +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(55402), exports); +tslib_1.__exportStar(__nccwpck_require__(55021), exports); +tslib_1.__exportStar(__nccwpck_require__(38824), exports); +tslib_1.__exportStar(__nccwpck_require__(78693), exports); +tslib_1.__exportStar(__nccwpck_require__(75442), exports); + /***/ }), -/***/ 65163: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 29132: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.trace = exports.propagation = exports.metrics = exports.diag = exports.context = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.createTraceState = exports.TraceFlags = exports.SpanStatusCode = exports.SpanKind = exports.SamplingDecision = exports.ProxyTracerProvider = exports.ProxyTracer = exports.defaultTextMapSetter = exports.defaultTextMapGetter = exports.ValueType = exports.createNoopMeter = exports.DiagLogLevel = exports.DiagConsoleLogger = exports.ROOT_CONTEXT = exports.createContextKey = exports.baggageEntryMetadataFromString = void 0; -var utils_1 = __nccwpck_require__(28136); -Object.defineProperty(exports, "baggageEntryMetadataFromString", ({ enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } })); -// Context APIs -var context_1 = __nccwpck_require__(78242); -Object.defineProperty(exports, "createContextKey", ({ enumerable: true, get: function () { return context_1.createContextKey; } })); -Object.defineProperty(exports, "ROOT_CONTEXT", ({ enumerable: true, get: function () { return context_1.ROOT_CONTEXT; } })); -// Diag APIs -var consoleLogger_1 = __nccwpck_require__(3041); -Object.defineProperty(exports, "DiagConsoleLogger", ({ enumerable: true, get: function () { return consoleLogger_1.DiagConsoleLogger; } })); -var types_1 = __nccwpck_require__(78077); -Object.defineProperty(exports, "DiagLogLevel", ({ enumerable: true, get: function () { return types_1.DiagLogLevel; } })); -// Metrics APIs -var NoopMeter_1 = __nccwpck_require__(4837); -Object.defineProperty(exports, "createNoopMeter", ({ enumerable: true, get: function () { return NoopMeter_1.createNoopMeter; } })); -var Metric_1 = __nccwpck_require__(89999); -Object.defineProperty(exports, "ValueType", ({ enumerable: true, get: function () { return Metric_1.ValueType; } })); -// Propagation APIs -var TextMapPropagator_1 = __nccwpck_require__(80865); -Object.defineProperty(exports, "defaultTextMapGetter", ({ enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapGetter; } })); -Object.defineProperty(exports, "defaultTextMapSetter", ({ enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapSetter; } })); -var ProxyTracer_1 = __nccwpck_require__(43503); -Object.defineProperty(exports, "ProxyTracer", ({ enumerable: true, get: function () { return ProxyTracer_1.ProxyTracer; } })); -var ProxyTracerProvider_1 = __nccwpck_require__(2285); -Object.defineProperty(exports, "ProxyTracerProvider", ({ enumerable: true, get: function () { return ProxyTracerProvider_1.ProxyTracerProvider; } })); -var SamplingResult_1 = __nccwpck_require__(33209); -Object.defineProperty(exports, "SamplingDecision", ({ enumerable: true, get: function () { return SamplingResult_1.SamplingDecision; } })); -var span_kind_1 = __nccwpck_require__(31424); -Object.defineProperty(exports, "SpanKind", ({ enumerable: true, get: function () { return span_kind_1.SpanKind; } })); -var status_1 = __nccwpck_require__(48845); -Object.defineProperty(exports, "SpanStatusCode", ({ enumerable: true, get: function () { return status_1.SpanStatusCode; } })); -var trace_flags_1 = __nccwpck_require__(26905); -Object.defineProperty(exports, "TraceFlags", ({ enumerable: true, get: function () { return trace_flags_1.TraceFlags; } })); -var utils_2 = __nccwpck_require__(32615); -Object.defineProperty(exports, "createTraceState", ({ enumerable: true, get: function () { return utils_2.createTraceState; } })); -var spancontext_utils_1 = __nccwpck_require__(49745); -Object.defineProperty(exports, "isSpanContextValid", ({ enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } })); -Object.defineProperty(exports, "isValidTraceId", ({ enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } })); -Object.defineProperty(exports, "isValidSpanId", ({ enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } })); -var invalid_span_constants_1 = __nccwpck_require__(91760); -Object.defineProperty(exports, "INVALID_SPANID", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPANID; } })); -Object.defineProperty(exports, "INVALID_TRACEID", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_TRACEID; } })); -Object.defineProperty(exports, "INVALID_SPAN_CONTEXT", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPAN_CONTEXT; } })); -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const context_api_1 = __nccwpck_require__(7393); -Object.defineProperty(exports, "context", ({ enumerable: true, get: function () { return context_api_1.context; } })); -const diag_api_1 = __nccwpck_require__(39721); -Object.defineProperty(exports, "diag", ({ enumerable: true, get: function () { return diag_api_1.diag; } })); -const metrics_api_1 = __nccwpck_require__(72601); -Object.defineProperty(exports, "metrics", ({ enumerable: true, get: function () { return metrics_api_1.metrics; } })); -const propagation_api_1 = __nccwpck_require__(17591); -Object.defineProperty(exports, "propagation", ({ enumerable: true, get: function () { return propagation_api_1.propagation; } })); -const trace_api_1 = __nccwpck_require__(98989); -Object.defineProperty(exports, "trace", ({ enumerable: true, get: function () { return trace_api_1.trace; } })); -// Default export. -exports["default"] = { - context: context_api_1.context, - diag: diag_api_1.diag, - metrics: metrics_api_1.metrics, - propagation: propagation_api_1.propagation, - trace: trace_api_1.trace, -}; -//# sourceMappingURL=index.js.map +exports.booleanEquals = void 0; +const booleanEquals = (value1, value2) => value1 === value2; +exports.booleanEquals = booleanEquals; + /***/ }), -/***/ 63979: +/***/ 84624: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; -const platform_1 = __nccwpck_require__(99957); -const version_1 = __nccwpck_require__(98996); -const semver_1 = __nccwpck_require__(81522); -const major = version_1.VERSION.split('.')[0]; -const GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(`opentelemetry.js.api.${major}`); -const _global = platform_1._globalThis; -function registerGlobal(type, instance, diag, allowOverride = false) { - var _a; - const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { - version: version_1.VERSION, - }); - if (!allowOverride && api[type]) { - // already registered an API of this type - const err = new Error(`@opentelemetry/api: Attempted duplicate registration of API: ${type}`); - diag.error(err.stack || err.message); - return false; - } - if (api.version !== version_1.VERSION) { - // All registered APIs must be of the same version exactly - const err = new Error(`@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${version_1.VERSION}`); - diag.error(err.stack || err.message); - return false; - } - api[type] = instance; - diag.debug(`@opentelemetry/api: Registered a global for ${type} v${version_1.VERSION}.`); - return true; -} -exports.registerGlobal = registerGlobal; -function getGlobal(type) { - var _a, _b; - const globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; - if (!globalVersion || !(0, semver_1.isCompatible)(globalVersion)) { - return; +exports.getAttr = void 0; +const types_1 = __nccwpck_require__(75442); +const getAttrPathList_1 = __nccwpck_require__(91311); +const getAttr = (value, path) => (0, getAttrPathList_1.getAttrPathList)(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new types_1.EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); } - return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; -} -exports.getGlobal = getGlobal; -function unregisterGlobal(type, diag) { - diag.debug(`@opentelemetry/api: Unregistering a global for ${type} v${version_1.VERSION}.`); - const api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; - if (api) { - delete api[type]; + else if (Array.isArray(acc)) { + return acc[parseInt(index)]; } -} -exports.unregisterGlobal = unregisterGlobal; -//# sourceMappingURL=global-utils.js.map + return acc[index]; +}, value); +exports.getAttr = getAttr; + /***/ }), -/***/ 81522: +/***/ 91311: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isCompatible = exports._makeCompatibilityCheck = void 0; -const version_1 = __nccwpck_require__(98996); -const re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; -/** - * Create a function to test an API version to see if it is compatible with the provided ownVersion. - * - * The returned function has the following semantics: - * - Exact match is always compatible - * - Major versions must match exactly - * - 1.x package cannot use global 2.x package - * - 2.x package cannot use global 1.x package - * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API - * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects - * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 - * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor - * - Patch and build tag differences are not considered at this time - * - * @param ownVersion version which should be checked against - */ -function _makeCompatibilityCheck(ownVersion) { - const acceptedVersions = new Set([ownVersion]); - const rejectedVersions = new Set(); - const myVersionMatch = ownVersion.match(re); - if (!myVersionMatch) { - // we cannot guarantee compatibility so we always return noop - return () => false; - } - const ownVersionParsed = { - major: +myVersionMatch[1], - minor: +myVersionMatch[2], - patch: +myVersionMatch[3], - prerelease: myVersionMatch[4], - }; - // if ownVersion has a prerelease tag, versions must match exactly - if (ownVersionParsed.prerelease != null) { - return function isExactmatch(globalVersion) { - return globalVersion === ownVersion; - }; - } - function _reject(v) { - rejectedVersions.add(v); - return false; - } - function _accept(v) { - acceptedVersions.add(v); - return true; - } - return function isCompatible(globalVersion) { - if (acceptedVersions.has(globalVersion)) { - return true; - } - if (rejectedVersions.has(globalVersion)) { - return false; - } - const globalVersionMatch = globalVersion.match(re); - if (!globalVersionMatch) { - // cannot parse other version - // we cannot guarantee compatibility so we always noop - return _reject(globalVersion); - } - const globalVersionParsed = { - major: +globalVersionMatch[1], - minor: +globalVersionMatch[2], - patch: +globalVersionMatch[3], - prerelease: globalVersionMatch[4], - }; - // if globalVersion has a prerelease tag, versions must match exactly - if (globalVersionParsed.prerelease != null) { - return _reject(globalVersion); - } - // major versions must match - if (ownVersionParsed.major !== globalVersionParsed.major) { - return _reject(globalVersion); - } - if (ownVersionParsed.major === 0) { - if (ownVersionParsed.minor === globalVersionParsed.minor && - ownVersionParsed.patch <= globalVersionParsed.patch) { - return _accept(globalVersion); +exports.getAttrPathList = void 0; +const types_1 = __nccwpck_require__(75442); +const getAttrPathList = (path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new types_1.EndpointError(`Path: '${path}' does not end with ']'`); } - return _reject(globalVersion); + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new types_1.EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); } - if (ownVersionParsed.minor <= globalVersionParsed.minor) { - return _accept(globalVersion); + else { + pathList.push(part); } - return _reject(globalVersion); - }; -} -exports._makeCompatibilityCheck = _makeCompatibilityCheck; -/** - * Test an API version to see if it is compatible with this API. - * - * - Exact match is always compatible - * - Major versions must match exactly - * - 1.x package cannot use global 2.x package - * - 2.x package cannot use global 1.x package - * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API - * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects - * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 - * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor - * - Patch and build tag differences are not considered at this time - * - * @param version version of the API requesting an instance of the global API - */ -exports.isCompatible = _makeCompatibilityCheck(version_1.VERSION); -//# sourceMappingURL=semver.js.map + } + return pathList; +}; +exports.getAttrPathList = getAttrPathList; + /***/ }), -/***/ 72601: +/***/ 36559: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.metrics = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const metrics_1 = __nccwpck_require__(17696); -/** Entrypoint for metrics API */ -exports.metrics = metrics_1.MetricsAPI.getInstance(); -//# sourceMappingURL=metrics-api.js.map +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(29132), exports); +tslib_1.__exportStar(__nccwpck_require__(84624), exports); +tslib_1.__exportStar(__nccwpck_require__(71231), exports); +tslib_1.__exportStar(__nccwpck_require__(55021), exports); +tslib_1.__exportStar(__nccwpck_require__(42249), exports); +tslib_1.__exportStar(__nccwpck_require__(84654), exports); +tslib_1.__exportStar(__nccwpck_require__(72512), exports); +tslib_1.__exportStar(__nccwpck_require__(49245), exports); +tslib_1.__exportStar(__nccwpck_require__(51482), exports); + /***/ }), -/***/ 89999: +/***/ 55402: /***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ValueType = void 0; -/** The Type of value. It describes how the data is reported. */ -var ValueType; -(function (ValueType) { - ValueType[ValueType["INT"] = 0] = "INT"; - ValueType[ValueType["DOUBLE"] = 1] = "DOUBLE"; -})(ValueType = exports.ValueType || (exports.ValueType = {})); -//# sourceMappingURL=Metric.js.map +exports.isIpAddress = void 0; +const IP_V4_REGEX = new RegExp(`^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$`); +const isIpAddress = (value) => IP_V4_REGEX.test(value) || (value.startsWith("[") && value.endsWith("]")); +exports.isIpAddress = isIpAddress; + /***/ }), -/***/ 4837: +/***/ 71231: /***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createNoopMeter = exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = exports.NOOP_OBSERVABLE_GAUGE_METRIC = exports.NOOP_OBSERVABLE_COUNTER_METRIC = exports.NOOP_UP_DOWN_COUNTER_METRIC = exports.NOOP_HISTOGRAM_METRIC = exports.NOOP_COUNTER_METRIC = exports.NOOP_METER = exports.NoopObservableUpDownCounterMetric = exports.NoopObservableGaugeMetric = exports.NoopObservableCounterMetric = exports.NoopObservableMetric = exports.NoopHistogramMetric = exports.NoopUpDownCounterMetric = exports.NoopCounterMetric = exports.NoopMetric = exports.NoopMeter = void 0; -/** - * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses - * constant NoopMetrics for all of its methods. - */ -class NoopMeter { - constructor() { } - /** - * @see {@link Meter.createHistogram} - */ - createHistogram(_name, _options) { - return exports.NOOP_HISTOGRAM_METRIC; - } - /** - * @see {@link Meter.createCounter} - */ - createCounter(_name, _options) { - return exports.NOOP_COUNTER_METRIC; - } - /** - * @see {@link Meter.createUpDownCounter} - */ - createUpDownCounter(_name, _options) { - return exports.NOOP_UP_DOWN_COUNTER_METRIC; - } - /** - * @see {@link Meter.createObservableGauge} - */ - createObservableGauge(_name, _options) { - return exports.NOOP_OBSERVABLE_GAUGE_METRIC; - } - /** - * @see {@link Meter.createObservableCounter} - */ - createObservableCounter(_name, _options) { - return exports.NOOP_OBSERVABLE_COUNTER_METRIC; - } - /** - * @see {@link Meter.createObservableUpDownCounter} - */ - createObservableUpDownCounter(_name, _options) { - return exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; - } - /** - * @see {@link Meter.addBatchObservableCallback} - */ - addBatchObservableCallback(_callback, _observables) { } - /** - * @see {@link Meter.removeBatchObservableCallback} - */ - removeBatchObservableCallback(_callback) { } -} -exports.NoopMeter = NoopMeter; -class NoopMetric { -} -exports.NoopMetric = NoopMetric; -class NoopCounterMetric extends NoopMetric { - add(_value, _attributes) { } -} -exports.NoopCounterMetric = NoopCounterMetric; -class NoopUpDownCounterMetric extends NoopMetric { - add(_value, _attributes) { } -} -exports.NoopUpDownCounterMetric = NoopUpDownCounterMetric; -class NoopHistogramMetric extends NoopMetric { - record(_value, _attributes) { } -} -exports.NoopHistogramMetric = NoopHistogramMetric; -class NoopObservableMetric { - addCallback(_callback) { } - removeCallback(_callback) { } -} -exports.NoopObservableMetric = NoopObservableMetric; -class NoopObservableCounterMetric extends NoopObservableMetric { -} -exports.NoopObservableCounterMetric = NoopObservableCounterMetric; -class NoopObservableGaugeMetric extends NoopObservableMetric { -} -exports.NoopObservableGaugeMetric = NoopObservableGaugeMetric; -class NoopObservableUpDownCounterMetric extends NoopObservableMetric { -} -exports.NoopObservableUpDownCounterMetric = NoopObservableUpDownCounterMetric; -exports.NOOP_METER = new NoopMeter(); -// Synchronous instruments -exports.NOOP_COUNTER_METRIC = new NoopCounterMetric(); -exports.NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); -exports.NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); -// Asynchronous instruments -exports.NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); -exports.NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); -exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); -/** - * Create a no-op Meter - */ -function createNoopMeter() { - return exports.NOOP_METER; -} -exports.createNoopMeter = createNoopMeter; -//# sourceMappingURL=NoopMeter.js.map +exports.isSet = void 0; +const isSet = (value) => value != null; +exports.isSet = isSet; + /***/ }), -/***/ 72647: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 55021: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NOOP_METER_PROVIDER = exports.NoopMeterProvider = void 0; -const NoopMeter_1 = __nccwpck_require__(4837); -/** - * An implementation of the {@link MeterProvider} which returns an impotent Meter - * for all calls to `getMeter` - */ -class NoopMeterProvider { - getMeter(_name, _version, _options) { - return NoopMeter_1.NOOP_METER; +exports.isValidHostLabel = void 0; +const VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +const isValidHostLabel = (value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); } -} -exports.NoopMeterProvider = NoopMeterProvider; -exports.NOOP_METER_PROVIDER = new NoopMeterProvider(); -//# sourceMappingURL=NoopMeterProvider.js.map - -/***/ }), - -/***/ 99957: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); + const labels = value.split("."); + for (const label of labels) { + if (!(0, exports.isValidHostLabel)(label)) { + return false; + } + } + return true; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -__exportStar(__nccwpck_require__(87200), exports); -//# sourceMappingURL=index.js.map +exports.isValidHostLabel = isValidHostLabel; + /***/ }), -/***/ 89406: +/***/ 42249: /***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports._globalThis = void 0; -/** only globals that common to node and browsers are allowed */ -// eslint-disable-next-line node/no-unsupported-features/es-builtins -exports._globalThis = typeof globalThis === 'object' ? globalThis : global; -//# sourceMappingURL=globalThis.js.map - -/***/ }), - -/***/ 87200: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; +exports.not = void 0; +const not = (value) => !value; +exports.not = not; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -__exportStar(__nccwpck_require__(89406), exports); -//# sourceMappingURL=index.js.map /***/ }), -/***/ 17591: +/***/ 84654: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.propagation = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const propagation_1 = __nccwpck_require__(89909); -/** Entrypoint for propagation API */ -exports.propagation = propagation_1.PropagationAPI.getInstance(); -//# sourceMappingURL=propagation-api.js.map +exports.parseURL = void 0; +const types_1 = __nccwpck_require__(55756); +const isIpAddress_1 = __nccwpck_require__(55402); +const DEFAULT_PORTS = { + [types_1.EndpointURLScheme.HTTP]: 80, + [types_1.EndpointURLScheme.HTTPS]: 443, +}; +const parseURL = (value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname, port, protocol = "", path = "", query = {} } = value; + const url = new URL(`${protocol}//${hostname}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query) + .map(([k, v]) => `${k}=${v}`) + .join("&"); + return url; + } + return new URL(value); + } + catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(types_1.EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = (0, isIpAddress_1.isIpAddress)(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || + (typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`)); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp, + }; +}; +exports.parseURL = parseURL; + /***/ }), -/***/ 72368: +/***/ 72512: /***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoopTextMapPropagator = void 0; -/** - * No-op implementations of {@link TextMapPropagator}. - */ -class NoopTextMapPropagator { - /** Noop inject function does nothing */ - inject(_context, _carrier) { } - /** Noop extract function does nothing and returns the input context */ - extract(context, _carrier) { - return context; - } - fields() { - return []; - } -} -exports.NoopTextMapPropagator = NoopTextMapPropagator; -//# sourceMappingURL=NoopTextMapPropagator.js.map +exports.stringEquals = void 0; +const stringEquals = (value1, value2) => value1 === value2; +exports.stringEquals = stringEquals; + /***/ }), -/***/ 80865: +/***/ 49245: /***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.defaultTextMapSetter = exports.defaultTextMapGetter = void 0; -exports.defaultTextMapGetter = { - get(carrier, key) { - if (carrier == null) { - return undefined; - } - return carrier[key]; - }, - keys(carrier) { - if (carrier == null) { - return []; - } - return Object.keys(carrier); - }, -}; -exports.defaultTextMapSetter = { - set(carrier, key, value) { - if (carrier == null) { - return; - } - carrier[key] = value; - }, +exports.substring = void 0; +const substring = (input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); }; -//# sourceMappingURL=TextMapPropagator.js.map - -/***/ }), - -/***/ 98989: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +exports.substring = substring; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.trace = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const trace_1 = __nccwpck_require__(81539); -/** Entrypoint for trace API */ -exports.trace = trace_1.TraceAPI.getInstance(); -//# sourceMappingURL=trace-api.js.map /***/ }), -/***/ 81462: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 51482: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NonRecordingSpan = void 0; -const invalid_span_constants_1 = __nccwpck_require__(91760); -/** - * The NonRecordingSpan is the default {@link Span} that is used when no Span - * implementation is available. All operations are no-op including context - * propagation. - */ -class NonRecordingSpan { - constructor(_spanContext = invalid_span_constants_1.INVALID_SPAN_CONTEXT) { - this._spanContext = _spanContext; - } - // Returns a SpanContext. - spanContext() { - return this._spanContext; - } - // By default does nothing - setAttribute(_key, _value) { - return this; - } - // By default does nothing - setAttributes(_attributes) { - return this; - } - // By default does nothing - addEvent(_name, _attributes) { - return this; - } - // By default does nothing - setStatus(_status) { - return this; - } - // By default does nothing - updateName(_name) { - return this; - } - // By default does nothing - end(_endTime) { } - // isRecording always returns false for NonRecordingSpan. - isRecording() { - return false; - } - // By default does nothing - recordException(_exception, _time) { } -} -exports.NonRecordingSpan = NonRecordingSpan; -//# sourceMappingURL=NonRecordingSpan.js.map +exports.uriEncode = void 0; +const uriEncode = (value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`); +exports.uriEncode = uriEncode; + /***/ }), -/***/ 17606: +/***/ 78693: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoopTracer = void 0; -const context_1 = __nccwpck_require__(57171); -const context_utils_1 = __nccwpck_require__(23326); -const NonRecordingSpan_1 = __nccwpck_require__(81462); -const spancontext_utils_1 = __nccwpck_require__(49745); -const contextApi = context_1.ContextAPI.getInstance(); -/** - * No-op implementations of {@link Tracer}. - */ -class NoopTracer { - // startSpan starts a noop span. - startSpan(name, options, context = contextApi.active()) { - const root = Boolean(options === null || options === void 0 ? void 0 : options.root); - if (root) { - return new NonRecordingSpan_1.NonRecordingSpan(); - } - const parentFromContext = context && (0, context_utils_1.getSpanContext)(context); - if (isSpanContext(parentFromContext) && - (0, spancontext_utils_1.isSpanContextValid)(parentFromContext)) { - return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); - } - else { - return new NonRecordingSpan_1.NonRecordingSpan(); +exports.resolveEndpoint = void 0; +const debug_1 = __nccwpck_require__(30540); +const types_1 = __nccwpck_require__(75442); +const utils_1 = __nccwpck_require__(96871); +const resolveEndpoint = (ruleSetObject, options) => { + var _a, _b, _c, _d, _e, _f; + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, `${debug_1.debugId} Initial EndpointParams: ${(0, debug_1.toDebugString)(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters) + .filter(([, v]) => v.default != null) + .map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = (_c = endpointParams[paramKey]) !== null && _c !== void 0 ? _c : paramDefaultValue; } } - startActiveSpan(name, arg2, arg3, arg4) { - let opts; - let ctx; - let fn; - if (arguments.length < 2) { - return; - } - else if (arguments.length === 2) { - fn = arg2; + const requiredParams = Object.entries(parameters) + .filter(([, v]) => v.required) + .map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new types_1.EndpointError(`Missing required parameter: '${requiredParam}'`); } - else if (arguments.length === 3) { - opts = arg2; - fn = arg3; + } + const endpoint = (0, utils_1.evaluateRules)(rules, { endpointParams, logger, referenceRecord: {} }); + if ((_d = options.endpointParams) === null || _d === void 0 ? void 0 : _d.Endpoint) { + try { + const givenEndpoint = new URL(options.endpointParams.Endpoint); + const { protocol, port } = givenEndpoint; + endpoint.url.protocol = protocol; + endpoint.url.port = port; } - else { - opts = arg2; - ctx = arg3; - fn = arg4; + catch (e) { } - const parentContext = ctx !== null && ctx !== void 0 ? ctx : contextApi.active(); - const span = this.startSpan(name, opts, parentContext); - const contextWithSpanSet = (0, context_utils_1.setSpan)(parentContext, span); - return contextApi.with(contextWithSpanSet, fn, undefined, span); } -} -exports.NoopTracer = NoopTracer; -function isSpanContext(spanContext) { - return (typeof spanContext === 'object' && - typeof spanContext['spanId'] === 'string' && - typeof spanContext['traceId'] === 'string' && - typeof spanContext['traceFlags'] === 'number'); -} -//# sourceMappingURL=NoopTracer.js.map + (_f = (_e = options.logger) === null || _e === void 0 ? void 0 : _e.debug) === null || _f === void 0 ? void 0 : _f.call(_e, `${debug_1.debugId} Resolved endpoint: ${(0, debug_1.toDebugString)(endpoint)}`); + return endpoint; +}; +exports.resolveEndpoint = resolveEndpoint; + /***/ }), -/***/ 23259: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 84213: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoopTracerProvider = void 0; -const NoopTracer_1 = __nccwpck_require__(17606); -/** - * An implementation of the {@link TracerProvider} which returns an impotent - * Tracer for all calls to `getTracer`. - * - * All operations are no-op. - */ -class NoopTracerProvider { - getTracer(_name, _version, _options) { - return new NoopTracer_1.NoopTracer(); +exports.EndpointError = void 0; +class EndpointError extends Error { + constructor(message) { + super(message); + this.name = "EndpointError"; } } -exports.NoopTracerProvider = NoopTracerProvider; -//# sourceMappingURL=NoopTracerProvider.js.map +exports.EndpointError = EndpointError; + /***/ }), -/***/ 43503: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 34073: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ProxyTracer = void 0; -const NoopTracer_1 = __nccwpck_require__(17606); -const NOOP_TRACER = new NoopTracer_1.NoopTracer(); -/** - * Proxy tracer provided by the proxy tracer provider - */ -class ProxyTracer { - constructor(_provider, name, version, options) { - this._provider = _provider; - this.name = name; - this.version = version; - this.options = options; - } - startSpan(name, options, context) { - return this._getTracer().startSpan(name, options, context); - } - startActiveSpan(_name, _options, _context, _fn) { - const tracer = this._getTracer(); - return Reflect.apply(tracer.startActiveSpan, tracer, arguments); - } - /** - * Try to get a tracer from the proxy tracer provider. - * If the proxy tracer provider has no delegate, return a noop tracer. - */ - _getTracer() { - if (this._delegate) { - return this._delegate; - } - const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); - if (!tracer) { - return NOOP_TRACER; - } - this._delegate = tracer; - return this._delegate; - } -} -exports.ProxyTracer = ProxyTracer; -//# sourceMappingURL=ProxyTracer.js.map + /***/ }), -/***/ 2285: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 72533: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ProxyTracerProvider = void 0; -const ProxyTracer_1 = __nccwpck_require__(43503); -const NoopTracerProvider_1 = __nccwpck_require__(23259); -const NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); -/** - * Tracer provider which provides {@link ProxyTracer}s. - * - * Before a delegate is set, tracers provided are NoOp. - * When a delegate is set, traces are provided from the delegate. - * When a delegate is set after tracers have already been provided, - * all tracers already provided will use the provided delegate implementation. - */ -class ProxyTracerProvider { - /** - * Get a {@link ProxyTracer} - */ - getTracer(name, version, options) { - var _a; - return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer_1.ProxyTracer(this, name, version, options)); - } - getDelegate() { - var _a; - return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; - } - /** - * Set the delegate tracer provider - */ - setDelegate(delegate) { - this._delegate = delegate; - } - getDelegateTracer(name, version, options) { - var _a; - return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); - } -} -exports.ProxyTracerProvider = ProxyTracerProvider; -//# sourceMappingURL=ProxyTracerProvider.js.map + /***/ }), -/***/ 33209: +/***/ 63135: /***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SamplingDecision = void 0; -/** - * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. - * A sampling decision that determines how a {@link Span} will be recorded - * and collected. - */ -var SamplingDecision; -(function (SamplingDecision) { - /** - * `Span.isRecording() === false`, span will not be recorded and all events - * and attributes will be dropped. - */ - SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; - /** - * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} - * MUST NOT be set. - */ - SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; - /** - * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} - * MUST be set. - */ - SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; -})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {})); -//# sourceMappingURL=SamplingResult.js.map + /***/ }), -/***/ 23326: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 19136: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSpanContext = exports.setSpanContext = exports.deleteSpan = exports.setSpan = exports.getActiveSpan = exports.getSpan = void 0; -const context_1 = __nccwpck_require__(78242); -const NonRecordingSpan_1 = __nccwpck_require__(81462); -const context_2 = __nccwpck_require__(57171); -/** - * span key - */ -const SPAN_KEY = (0, context_1.createContextKey)('OpenTelemetry Context Key SPAN'); -/** - * Return the span if one exists - * - * @param context context to get span from - */ -function getSpan(context) { - return context.getValue(SPAN_KEY) || undefined; -} -exports.getSpan = getSpan; -/** - * Gets the span from the current context, if one exists. - */ -function getActiveSpan() { - return getSpan(context_2.ContextAPI.getInstance().active()); -} -exports.getActiveSpan = getActiveSpan; -/** - * Set the span on a context - * - * @param context context to use as parent - * @param span span to set active - */ -function setSpan(context, span) { - return context.setValue(SPAN_KEY, span); -} -exports.setSpan = setSpan; -/** - * Remove current span stored in the context - * - * @param context context to delete span from - */ -function deleteSpan(context) { - return context.deleteValue(SPAN_KEY); -} -exports.deleteSpan = deleteSpan; -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context context to set active span on - * @param spanContext span context to be wrapped - */ -function setSpanContext(context, spanContext) { - return setSpan(context, new NonRecordingSpan_1.NonRecordingSpan(spanContext)); -} -exports.setSpanContext = setSpanContext; -/** - * Get the span context of the span if it exists. - * - * @param context context to get values from - */ -function getSpanContext(context) { - var _a; - return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); -} -exports.getSpanContext = getSpanContext; -//# sourceMappingURL=context-utils.js.map + /***/ }), -/***/ 62110: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 28344: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TraceStateImpl = void 0; -const tracestate_validators_1 = __nccwpck_require__(54864); -const MAX_TRACE_STATE_ITEMS = 32; -const MAX_TRACE_STATE_LEN = 512; -const LIST_MEMBERS_SEPARATOR = ','; -const LIST_MEMBER_KEY_VALUE_SPLITTER = '='; -/** - * TraceState must be a class and not a simple object type because of the spec - * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). - * - * Here is the list of allowed mutations: - * - New key-value pair should be added into the beginning of the list - * - The value of any key can be updated. Modified keys MUST be moved to the - * beginning of the list. - */ -class TraceStateImpl { - constructor(rawTraceState) { - this._internalState = new Map(); - if (rawTraceState) - this._parse(rawTraceState); - } - set(key, value) { - // TODO: Benchmark the different approaches(map vs list) and - // use the faster one. - const traceState = this._clone(); - if (traceState._internalState.has(key)) { - traceState._internalState.delete(key); - } - traceState._internalState.set(key, value); - return traceState; - } - unset(key) { - const traceState = this._clone(); - traceState._internalState.delete(key); - return traceState; - } - get(key) { - return this._internalState.get(key); - } - serialize() { - return this._keys() - .reduce((agg, key) => { - agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key)); - return agg; - }, []) - .join(LIST_MEMBERS_SEPARATOR); - } - _parse(rawTraceState) { - if (rawTraceState.length > MAX_TRACE_STATE_LEN) - return; - this._internalState = rawTraceState - .split(LIST_MEMBERS_SEPARATOR) - .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning - .reduce((agg, part) => { - const listMember = part.trim(); // Optional Whitespace (OWS) handling - const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); - if (i !== -1) { - const key = listMember.slice(0, i); - const value = listMember.slice(i + 1, part.length); - if ((0, tracestate_validators_1.validateKey)(key) && (0, tracestate_validators_1.validateValue)(value)) { - agg.set(key, value); - } - else { - // TODO: Consider to add warning log - } - } - return agg; - }, new Map()); - // Because of the reverse() requirement, trunc must be done after map is created - if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { - this._internalState = new Map(Array.from(this._internalState.entries()) - .reverse() // Use reverse same as original tracestate parse chain - .slice(0, MAX_TRACE_STATE_ITEMS)); - } - } - _keys() { - return Array.from(this._internalState.keys()).reverse(); - } - _clone() { - const traceState = new TraceStateImpl(); - traceState._internalState = new Map(this._internalState); - return traceState; - } -} -exports.TraceStateImpl = TraceStateImpl; -//# sourceMappingURL=tracestate-impl.js.map + /***/ }), -/***/ 54864: -/***/ ((__unused_webpack_module, exports) => { +/***/ 75442: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validateValue = exports.validateKey = void 0; -const VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; -const VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`; -const VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`; -const VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`); -const VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; -const INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; -/** - * Key is opaque string up to 256 characters printable. It MUST begin with a - * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, - * underscores _, dashes -, asterisks *, and forward slashes /. - * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the - * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. - * see https://www.w3.org/TR/trace-context/#key - */ -function validateKey(key) { - return VALID_KEY_REGEX.test(key); -} -exports.validateKey = validateKey; -/** - * Value is opaque string up to 256 characters printable ASCII RFC0020 - * characters (i.e., the range 0x20 to 0x7E) except comma , and =. - */ -function validateValue(value) { - return (VALID_VALUE_BASE_REGEX.test(value) && - !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); -} -exports.validateValue = validateValue; -//# sourceMappingURL=tracestate-validators.js.map +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(84213), exports); +tslib_1.__exportStar(__nccwpck_require__(34073), exports); +tslib_1.__exportStar(__nccwpck_require__(72533), exports); +tslib_1.__exportStar(__nccwpck_require__(63135), exports); +tslib_1.__exportStar(__nccwpck_require__(19136), exports); +tslib_1.__exportStar(__nccwpck_require__(28344), exports); +tslib_1.__exportStar(__nccwpck_require__(42535), exports); + /***/ }), -/***/ 32615: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 42535: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createTraceState = void 0; -const tracestate_impl_1 = __nccwpck_require__(62110); -function createTraceState(rawTraceState) { - return new tracestate_impl_1.TraceStateImpl(rawTraceState); -} -exports.createTraceState = createTraceState; -//# sourceMappingURL=utils.js.map + /***/ }), -/***/ 91760: +/***/ 66318: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; -const trace_flags_1 = __nccwpck_require__(26905); -exports.INVALID_SPANID = '0000000000000000'; -exports.INVALID_TRACEID = '00000000000000000000000000000000'; -exports.INVALID_SPAN_CONTEXT = { - traceId: exports.INVALID_TRACEID, - spanId: exports.INVALID_SPANID, - traceFlags: trace_flags_1.TraceFlags.NONE, +exports.callFunction = void 0; +const customEndpointFunctions_1 = __nccwpck_require__(38824); +const endpointFunctions_1 = __nccwpck_require__(70953); +const evaluateExpression_1 = __nccwpck_require__(91692); +const callFunction = ({ fn, argv }, options) => { + const evaluatedArgs = argv.map((arg) => ["boolean", "number"].includes(typeof arg) ? arg : (0, evaluateExpression_1.evaluateExpression)(arg, "arg", options)); + const fnSegments = fn.split("."); + if (fnSegments[0] in customEndpointFunctions_1.customEndpointFunctions && fnSegments[1] != null) { + return customEndpointFunctions_1.customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); + } + return endpointFunctions_1.endpointFunctions[fn](...evaluatedArgs); }; -//# sourceMappingURL=invalid-span-constants.js.map +exports.callFunction = callFunction; + /***/ }), -/***/ 31424: +/***/ 38824: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SpanKind = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var SpanKind; -(function (SpanKind) { - /** Default value. Indicates that the span is used internally. */ - SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; - /** - * Indicates that the span covers server-side handling of an RPC or other - * remote request. - */ - SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; - /** - * Indicates that the span covers the client-side wrapper around an RPC or - * other remote request. - */ - SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; - /** - * Indicates that the span describes producer sending a message to a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; - /** - * Indicates that the span describes consumer receiving a message from a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; -})(SpanKind = exports.SpanKind || (exports.SpanKind = {})); -//# sourceMappingURL=span_kind.js.map +exports.customEndpointFunctions = void 0; +exports.customEndpointFunctions = {}; + /***/ }), -/***/ 49745: +/***/ 70953: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -const invalid_span_constants_1 = __nccwpck_require__(91760); -const NonRecordingSpan_1 = __nccwpck_require__(81462); -const VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; -const VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; -function isValidTraceId(traceId) { - return VALID_TRACEID_REGEX.test(traceId) && traceId !== invalid_span_constants_1.INVALID_TRACEID; -} -exports.isValidTraceId = isValidTraceId; -function isValidSpanId(spanId) { - return VALID_SPANID_REGEX.test(spanId) && spanId !== invalid_span_constants_1.INVALID_SPANID; -} -exports.isValidSpanId = isValidSpanId; -/** - * Returns true if this {@link SpanContext} is valid. - * @return true if this {@link SpanContext} is valid. - */ -function isSpanContextValid(spanContext) { - return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); -} -exports.isSpanContextValid = isSpanContextValid; -/** - * Wrap the given {@link SpanContext} in a new non-recording {@link Span} - * - * @param spanContext span context to be wrapped - * @returns a new non-recording {@link Span} with the provided context - */ -function wrapSpanContext(spanContext) { - return new NonRecordingSpan_1.NonRecordingSpan(spanContext); -} -exports.wrapSpanContext = wrapSpanContext; -//# sourceMappingURL=spancontext-utils.js.map +exports.endpointFunctions = void 0; +const lib_1 = __nccwpck_require__(36559); +exports.endpointFunctions = { + booleanEquals: lib_1.booleanEquals, + getAttr: lib_1.getAttr, + isSet: lib_1.isSet, + isValidHostLabel: lib_1.isValidHostLabel, + not: lib_1.not, + parseURL: lib_1.parseURL, + stringEquals: lib_1.stringEquals, + substring: lib_1.substring, + uriEncode: lib_1.uriEncode, +}; + /***/ }), -/***/ 48845: -/***/ ((__unused_webpack_module, exports) => { +/***/ 42138: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SpanStatusCode = void 0; -/** - * An enumeration of status codes. - */ -var SpanStatusCode; -(function (SpanStatusCode) { - /** - * The default status. - */ - SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; - /** - * The operation has been validated by an Application developer or - * Operator to have completed successfully. - */ - SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; - /** - * The operation contains an error. - */ - SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; -})(SpanStatusCode = exports.SpanStatusCode || (exports.SpanStatusCode = {})); -//# sourceMappingURL=status.js.map +exports.evaluateCondition = void 0; +const debug_1 = __nccwpck_require__(30540); +const types_1 = __nccwpck_require__(75442); +const callFunction_1 = __nccwpck_require__(66318); +const evaluateCondition = ({ assign, ...fnArgs }, options) => { + var _a, _b; + if (assign && assign in options.referenceRecord) { + throw new types_1.EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = (0, callFunction_1.callFunction)(fnArgs, options); + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `evaluateCondition: ${(0, debug_1.toDebugString)(fnArgs)} = ${(0, debug_1.toDebugString)(value)}`); + return { + result: value === "" ? true : !!value, + ...(assign != null && { toAssign: { name: assign, value } }), + }; +}; +exports.evaluateCondition = evaluateCondition; + /***/ }), -/***/ 26905: -/***/ ((__unused_webpack_module, exports) => { +/***/ 69584: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TraceFlags = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var TraceFlags; -(function (TraceFlags) { - /** Represents no flag set. */ - TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; - /** Bit to represent whether trace is sampled in trace flags. */ - TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; -})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {})); -//# sourceMappingURL=trace_flags.js.map +exports.evaluateConditions = void 0; +const debug_1 = __nccwpck_require__(30540); +const evaluateCondition_1 = __nccwpck_require__(42138); +const evaluateConditions = (conditions = [], options) => { + var _a, _b; + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = (0, evaluateCondition_1.evaluateCondition)(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord, + }, + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `assign: ${toAssign.name} := ${(0, debug_1.toDebugString)(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}; +exports.evaluateConditions = evaluateConditions; + /***/ }), -/***/ 98996: -/***/ ((__unused_webpack_module, exports) => { +/***/ 14405: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.VERSION = void 0; -// this is autogenerated file, see scripts/version-update.js -exports.VERSION = '1.4.1'; -//# sourceMappingURL=version.js.map +exports.evaluateEndpointRule = void 0; +const debug_1 = __nccwpck_require__(30540); +const evaluateConditions_1 = __nccwpck_require__(69584); +const getEndpointHeaders_1 = __nccwpck_require__(57225); +const getEndpointProperties_1 = __nccwpck_require__(83067); +const getEndpointUrl_1 = __nccwpck_require__(25672); +const evaluateEndpointRule = (endpointRule, options) => { + var _a, _b; + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }; + const { url, properties, headers } = endpoint; + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `Resolving endpoint from template: ${(0, debug_1.toDebugString)(endpoint)}`); + return { + ...(headers != undefined && { + headers: (0, getEndpointHeaders_1.getEndpointHeaders)(headers, endpointRuleOptions), + }), + ...(properties != undefined && { + properties: (0, getEndpointProperties_1.getEndpointProperties)(properties, endpointRuleOptions), + }), + url: (0, getEndpointUrl_1.getEndpointUrl)(url, endpointRuleOptions), + }; +}; +exports.evaluateEndpointRule = evaluateEndpointRule; + /***/ }), -/***/ 43779: +/***/ 57563: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = exports.DEFAULT_USE_DUALSTACK_ENDPOINT = exports.CONFIG_USE_DUALSTACK_ENDPOINT = exports.ENV_USE_DUALSTACK_ENDPOINT = void 0; -const util_config_provider_1 = __nccwpck_require__(83375); -exports.ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; -exports.CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; -exports.DEFAULT_USE_DUALSTACK_ENDPOINT = false; -exports.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { - environmentVariableSelector: (env) => (0, util_config_provider_1.booleanSelector)(env, exports.ENV_USE_DUALSTACK_ENDPOINT, util_config_provider_1.SelectorType.ENV), - configFileSelector: (profile) => (0, util_config_provider_1.booleanSelector)(profile, exports.CONFIG_USE_DUALSTACK_ENDPOINT, util_config_provider_1.SelectorType.CONFIG), - default: false, +exports.evaluateErrorRule = void 0; +const types_1 = __nccwpck_require__(75442); +const evaluateConditions_1 = __nccwpck_require__(69584); +const evaluateExpression_1 = __nccwpck_require__(91692); +const evaluateErrorRule = (errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); + if (!result) { + return; + } + throw new types_1.EndpointError((0, evaluateExpression_1.evaluateExpression)(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + })); }; +exports.evaluateErrorRule = evaluateErrorRule; /***/ }), -/***/ 17994: +/***/ 91692: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = exports.DEFAULT_USE_FIPS_ENDPOINT = exports.CONFIG_USE_FIPS_ENDPOINT = exports.ENV_USE_FIPS_ENDPOINT = void 0; -const util_config_provider_1 = __nccwpck_require__(83375); -exports.ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; -exports.CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; -exports.DEFAULT_USE_FIPS_ENDPOINT = false; -exports.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { - environmentVariableSelector: (env) => (0, util_config_provider_1.booleanSelector)(env, exports.ENV_USE_FIPS_ENDPOINT, util_config_provider_1.SelectorType.ENV), - configFileSelector: (profile) => (0, util_config_provider_1.booleanSelector)(profile, exports.CONFIG_USE_FIPS_ENDPOINT, util_config_provider_1.SelectorType.CONFIG), - default: false, +exports.evaluateExpression = void 0; +const types_1 = __nccwpck_require__(75442); +const callFunction_1 = __nccwpck_require__(66318); +const evaluateTemplate_1 = __nccwpck_require__(21922); +const getReferenceValue_1 = __nccwpck_require__(17142); +const evaluateExpression = (obj, keyName, options) => { + if (typeof obj === "string") { + return (0, evaluateTemplate_1.evaluateTemplate)(obj, options); + } + else if (obj["fn"]) { + return (0, callFunction_1.callFunction)(obj, options); + } + else if (obj["ref"]) { + return (0, getReferenceValue_1.getReferenceValue)(obj, options); + } + throw new types_1.EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); }; +exports.evaluateExpression = evaluateExpression; /***/ }), -/***/ 18421: +/***/ 48830: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(43779), exports); -tslib_1.__exportStar(__nccwpck_require__(17994), exports); -tslib_1.__exportStar(__nccwpck_require__(37432), exports); -tslib_1.__exportStar(__nccwpck_require__(61892), exports); +exports.evaluateRules = void 0; +const types_1 = __nccwpck_require__(75442); +const evaluateEndpointRule_1 = __nccwpck_require__(14405); +const evaluateErrorRule_1 = __nccwpck_require__(57563); +const evaluateTreeRule_1 = __nccwpck_require__(55085); +const evaluateRules = (rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = (0, evaluateEndpointRule_1.evaluateEndpointRule)(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else if (rule.type === "error") { + (0, evaluateErrorRule_1.evaluateErrorRule)(rule, options); + } + else if (rule.type === "tree") { + const endpointOrUndefined = (0, evaluateTreeRule_1.evaluateTreeRule)(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else { + throw new types_1.EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new types_1.EndpointError(`Rules evaluation failed`); +}; +exports.evaluateRules = evaluateRules; /***/ }), -/***/ 37432: +/***/ 21922: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveCustomEndpointsConfig = void 0; -const util_middleware_1 = __nccwpck_require__(2390); -const resolveCustomEndpointsConfig = (input) => { - var _a, _b; - const { endpoint, urlParser } = input; - return { - ...input, - tls: (_a = input.tls) !== null && _a !== void 0 ? _a : true, - endpoint: (0, util_middleware_1.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), - isCustomEndpoint: true, - useDualstackEndpoint: (0, util_middleware_1.normalizeProvider)((_b = input.useDualstackEndpoint) !== null && _b !== void 0 ? _b : false), +exports.evaluateTemplate = void 0; +const lib_1 = __nccwpck_require__(36559); +const evaluateTemplate = (template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord, }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push((0, lib_1.getAttr)(templateContext[refName], attrName)); + } + else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); }; -exports.resolveCustomEndpointsConfig = resolveCustomEndpointsConfig; +exports.evaluateTemplate = evaluateTemplate; /***/ }), -/***/ 61892: +/***/ 55085: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveEndpointsConfig = void 0; -const util_middleware_1 = __nccwpck_require__(2390); -const getEndpointFromRegion_1 = __nccwpck_require__(48570); -const resolveEndpointsConfig = (input) => { - var _a, _b; - const useDualstackEndpoint = (0, util_middleware_1.normalizeProvider)((_a = input.useDualstackEndpoint) !== null && _a !== void 0 ? _a : false); - const { endpoint, useFipsEndpoint, urlParser } = input; - return { - ...input, - tls: (_b = input.tls) !== null && _b !== void 0 ? _b : true, - endpoint: endpoint - ? (0, util_middleware_1.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) - : () => (0, getEndpointFromRegion_1.getEndpointFromRegion)({ ...input, useDualstackEndpoint, useFipsEndpoint }), - isCustomEndpoint: !!endpoint, - useDualstackEndpoint, - }; +exports.evaluateTreeRule = void 0; +const evaluateConditions_1 = __nccwpck_require__(69584); +const evaluateRules_1 = __nccwpck_require__(48830); +const evaluateTreeRule = (treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); + if (!result) { + return; + } + return (0, evaluateRules_1.evaluateRules)(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }); }; -exports.resolveEndpointsConfig = resolveEndpointsConfig; +exports.evaluateTreeRule = evaluateTreeRule; /***/ }), -/***/ 48570: -/***/ ((__unused_webpack_module, exports) => { +/***/ 57225: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEndpointFromRegion = void 0; -const getEndpointFromRegion = async (input) => { - var _a; - const { tls = true } = input; - const region = await input.region(); - const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); - if (!dnsHostRegex.test(region)) { - throw new Error("Invalid region in client config"); - } - const useDualstackEndpoint = await input.useDualstackEndpoint(); - const useFipsEndpoint = await input.useFipsEndpoint(); - const { hostname } = (_a = (await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint }))) !== null && _a !== void 0 ? _a : {}; - if (!hostname) { - throw new Error("Cannot resolve hostname from client config"); - } - return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); -}; -exports.getEndpointFromRegion = getEndpointFromRegion; +exports.getEndpointHeaders = void 0; +const types_1 = __nccwpck_require__(75442); +const evaluateExpression_1 = __nccwpck_require__(91692); +const getEndpointHeaders = (headers, options) => Object.entries(headers).reduce((acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = (0, evaluateExpression_1.evaluateExpression)(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new types_1.EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }), +}), {}); +exports.getEndpointHeaders = getEndpointHeaders; /***/ }), -/***/ 53098: +/***/ 83067: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(18421), exports); -tslib_1.__exportStar(__nccwpck_require__(221), exports); -tslib_1.__exportStar(__nccwpck_require__(86985), exports); +exports.getEndpointProperties = void 0; +const getEndpointProperty_1 = __nccwpck_require__(26152); +const getEndpointProperties = (properties, options) => Object.entries(properties).reduce((acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: (0, getEndpointProperty_1.getEndpointProperty)(propertyVal, options), +}), {}); +exports.getEndpointProperties = getEndpointProperties; /***/ }), -/***/ 33898: -/***/ ((__unused_webpack_module, exports) => { +/***/ 26152: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NODE_REGION_CONFIG_FILE_OPTIONS = exports.NODE_REGION_CONFIG_OPTIONS = exports.REGION_INI_NAME = exports.REGION_ENV_NAME = void 0; -exports.REGION_ENV_NAME = "AWS_REGION"; -exports.REGION_INI_NAME = "region"; -exports.NODE_REGION_CONFIG_OPTIONS = { - environmentVariableSelector: (env) => env[exports.REGION_ENV_NAME], - configFileSelector: (profile) => profile[exports.REGION_INI_NAME], - default: () => { - throw new Error("Region is missing"); - }, -}; -exports.NODE_REGION_CONFIG_FILE_OPTIONS = { - preferredFile: "credentials", +exports.getEndpointProperty = void 0; +const types_1 = __nccwpck_require__(75442); +const evaluateTemplate_1 = __nccwpck_require__(21922); +const getEndpointProperties_1 = __nccwpck_require__(83067); +const getEndpointProperty = (property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => (0, exports.getEndpointProperty)(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return (0, evaluateTemplate_1.evaluateTemplate)(property, options); + case "object": + if (property === null) { + throw new types_1.EndpointError(`Unexpected endpoint property: ${property}`); + } + return (0, getEndpointProperties_1.getEndpointProperties)(property, options); + case "boolean": + return property; + default: + throw new types_1.EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } }; +exports.getEndpointProperty = getEndpointProperty; /***/ }), -/***/ 49506: +/***/ 25672: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getRealRegion = void 0; -const isFipsRegion_1 = __nccwpck_require__(43870); -const getRealRegion = (region) => (0, isFipsRegion_1.isFipsRegion)(region) - ? ["fips-aws-global", "aws-fips"].includes(region) - ? "us-east-1" - : region.replace(/fips-(dkr-|prod-)?|-fips/, "") - : region; -exports.getRealRegion = getRealRegion; +exports.getEndpointUrl = void 0; +const types_1 = __nccwpck_require__(75442); +const evaluateExpression_1 = __nccwpck_require__(91692); +const getEndpointUrl = (endpointUrl, options) => { + const expression = (0, evaluateExpression_1.evaluateExpression)(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } + catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new types_1.EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}; +exports.getEndpointUrl = getEndpointUrl; /***/ }), -/***/ 221: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 17142: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(33898), exports); -tslib_1.__exportStar(__nccwpck_require__(87065), exports); +exports.getReferenceValue = void 0; +const getReferenceValue = ({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord, + }; + return referenceRecord[ref]; +}; +exports.getReferenceValue = getReferenceValue; /***/ }), -/***/ 43870: -/***/ ((__unused_webpack_module, exports) => { +/***/ 96871: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isFipsRegion = void 0; -const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); -exports.isFipsRegion = isFipsRegion; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(38824), exports); +tslib_1.__exportStar(__nccwpck_require__(48830), exports); /***/ }), -/***/ 87065: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 45364: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveRegionConfig = void 0; -const getRealRegion_1 = __nccwpck_require__(49506); -const isFipsRegion_1 = __nccwpck_require__(43870); -const resolveRegionConfig = (input) => { - const { region, useFipsEndpoint } = input; - if (!region) { - throw new Error("Region is missing"); +exports.toHex = exports.fromHex = void 0; +const SHORT_TO_HEX = {}; +const HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; } - return { - ...input, - region: async () => { - if (typeof region === "string") { - return (0, getRealRegion_1.getRealRegion)(region); - } - const providedRegion = await region(); - return (0, getRealRegion_1.getRealRegion)(providedRegion); - }, - useFipsEndpoint: async () => { - const providedRegion = typeof region === "string" ? region : await region(); - if ((0, isFipsRegion_1.isFipsRegion)(providedRegion)) { - return true; - } - return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); - }, - }; -}; -exports.resolveRegionConfig = resolveRegionConfig; + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } + else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +exports.fromHex = fromHex; +function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} +exports.toHex = toHex; /***/ }), -/***/ 19814: -/***/ ((__unused_webpack_module, exports) => { +/***/ 85730: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSmithyContext = void 0; +const types_1 = __nccwpck_require__(55756); +const getSmithyContext = (context) => context[types_1.SMITHY_CONTEXT_KEY] || (context[types_1.SMITHY_CONTEXT_KEY] = {}); +exports.getSmithyContext = getSmithyContext; /***/ }), -/***/ 14832: -/***/ ((__unused_webpack_module, exports) => { +/***/ 2390: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(85730), exports); +tslib_1.__exportStar(__nccwpck_require__(80149), exports); /***/ }), -/***/ 99760: +/***/ 80149: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getHostnameFromVariants = void 0; -const getHostnameFromVariants = (variants = [], { useFipsEndpoint, useDualstackEndpoint }) => { - var _a; - return (_a = variants.find(({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack"))) === null || _a === void 0 ? void 0 : _a.hostname; +exports.normalizeProvider = void 0; +const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; }; -exports.getHostnameFromVariants = getHostnameFromVariants; +exports.normalizeProvider = normalizeProvider; /***/ }), -/***/ 77792: +/***/ 65053: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getRegionInfo = void 0; -const getHostnameFromVariants_1 = __nccwpck_require__(99760); -const getResolvedHostname_1 = __nccwpck_require__(1487); -const getResolvedPartition_1 = __nccwpck_require__(44441); -const getResolvedSigningRegion_1 = __nccwpck_require__(92281); -const getRegionInfo = (region, { useFipsEndpoint = false, useDualstackEndpoint = false, signingService, regionHash, partitionHash, }) => { - var _a, _b, _c, _d, _e, _f; - const partition = (0, getResolvedPartition_1.getResolvedPartition)(region, { partitionHash }); - const resolvedRegion = region in regionHash ? region : (_b = (_a = partitionHash[partition]) === null || _a === void 0 ? void 0 : _a.endpoint) !== null && _b !== void 0 ? _b : region; - const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; - const regionHostname = (0, getHostnameFromVariants_1.getHostnameFromVariants)((_c = regionHash[resolvedRegion]) === null || _c === void 0 ? void 0 : _c.variants, hostnameOptions); - const partitionHostname = (0, getHostnameFromVariants_1.getHostnameFromVariants)((_d = partitionHash[partition]) === null || _d === void 0 ? void 0 : _d.variants, hostnameOptions); - const hostname = (0, getResolvedHostname_1.getResolvedHostname)(resolvedRegion, { regionHostname, partitionHostname }); - if (hostname === undefined) { - throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); +exports.AdaptiveRetryStrategy = void 0; +const config_1 = __nccwpck_require__(93435); +const DefaultRateLimiter_1 = __nccwpck_require__(22234); +const StandardRetryStrategy_1 = __nccwpck_require__(48361); +class AdaptiveRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = config_1.RETRY_MODES.ADAPTIVE; + const { rateLimiter } = options !== null && options !== void 0 ? options : {}; + this.rateLimiter = rateLimiter !== null && rateLimiter !== void 0 ? rateLimiter : new DefaultRateLimiter_1.DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy_1.StandardRetryStrategy(maxAttemptsProvider); } - const signingRegion = (0, getResolvedSigningRegion_1.getResolvedSigningRegion)(hostname, { - signingRegion: (_e = regionHash[resolvedRegion]) === null || _e === void 0 ? void 0 : _e.signingRegion, - regionRegex: partitionHash[partition].regionRegex, - useFipsEndpoint, - }); - return { - partition, - signingService, - hostname, - ...(signingRegion && { signingRegion }), - ...(((_f = regionHash[resolvedRegion]) === null || _f === void 0 ? void 0 : _f.signingService) && { - signingService: regionHash[resolvedRegion].signingService, - }), - }; -}; -exports.getRegionInfo = getRegionInfo; + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +} +exports.AdaptiveRetryStrategy = AdaptiveRetryStrategy; /***/ }), -/***/ 1487: -/***/ ((__unused_webpack_module, exports) => { +/***/ 25689: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getResolvedHostname = void 0; -const getResolvedHostname = (resolvedRegion, { regionHostname, partitionHostname }) => regionHostname - ? regionHostname - : partitionHostname - ? partitionHostname.replace("{region}", resolvedRegion) - : undefined; -exports.getResolvedHostname = getResolvedHostname; +exports.ConfiguredRetryStrategy = void 0; +const constants_1 = __nccwpck_require__(66302); +const StandardRetryStrategy_1 = __nccwpck_require__(48361); +class ConfiguredRetryStrategy extends StandardRetryStrategy_1.StandardRetryStrategy { + constructor(maxAttempts, computeNextBackoffDelay = constants_1.DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } + else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +} +exports.ConfiguredRetryStrategy = ConfiguredRetryStrategy; /***/ }), -/***/ 44441: -/***/ ((__unused_webpack_module, exports) => { +/***/ 22234: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getResolvedPartition = void 0; -const getResolvedPartition = (region, { partitionHash }) => { var _a; return (_a = Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region))) !== null && _a !== void 0 ? _a : "aws"; }; -exports.getResolvedPartition = getResolvedPartition; +exports.DefaultRateLimiter = void 0; +const service_error_classification_1 = __nccwpck_require__(6375); +class DefaultRateLimiter { + constructor(options) { + var _a, _b, _c, _d, _e; + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = (_a = options === null || options === void 0 ? void 0 : options.beta) !== null && _a !== void 0 ? _a : 0.7; + this.minCapacity = (_b = options === null || options === void 0 ? void 0 : options.minCapacity) !== null && _b !== void 0 ? _b : 1; + this.minFillRate = (_c = options === null || options === void 0 ? void 0 : options.minFillRate) !== null && _c !== void 0 ? _c : 0.5; + this.scaleConstant = (_d = options === null || options === void 0 ? void 0 : options.scaleConstant) !== null && _d !== void 0 ? _d : 0.4; + this.smooth = (_e = options === null || options === void 0 ? void 0 : options.smooth) !== null && _e !== void 0 ? _e : 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + getCurrentTimeInSeconds() { + return Date.now() / 1000; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = ((amount - this.currentCapacity) / this.fillRate) * 1000; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if ((0, service_error_classification_1.isThrottlingError)(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } + else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow((this.lastMaxRate * (1 - this.beta)) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise(this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +} +exports.DefaultRateLimiter = DefaultRateLimiter; /***/ }), -/***/ 92281: -/***/ ((__unused_webpack_module, exports) => { +/***/ 48361: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getResolvedSigningRegion = void 0; -const getResolvedSigningRegion = (hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { - if (signingRegion) { - return signingRegion; +exports.StandardRetryStrategy = void 0; +const config_1 = __nccwpck_require__(93435); +const constants_1 = __nccwpck_require__(66302); +const defaultRetryBackoffStrategy_1 = __nccwpck_require__(21337); +const defaultRetryToken_1 = __nccwpck_require__(1127); +class StandardRetryStrategy { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = config_1.RETRY_MODES.STANDARD; + this.capacity = constants_1.INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = (0, defaultRetryBackoffStrategy_1.getDefaultRetryBackoffStrategy)(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; } - else if (useFipsEndpoint) { - const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); - const regionRegexmatchArray = hostname.match(regionRegexJs); - if (regionRegexmatchArray) { - return regionRegexmatchArray[0].slice(1, -1); + async acquireInitialRetryToken(retryTokenScope) { + return (0, defaultRetryToken_1.createDefaultRetryToken)({ + retryDelay: constants_1.DEFAULT_RETRY_DELAY_BASE, + retryCount: 0, + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase(errorType === "THROTTLING" ? constants_1.THROTTLING_RETRY_DELAY_BASE : constants_1.DEFAULT_RETRY_DELAY_BASE); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint + ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) + : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return (0, defaultRetryToken_1.createDefaultRetryToken)({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost, + }); } + throw new Error("No retry token available"); } -}; -exports.getResolvedSigningRegion = getResolvedSigningRegion; + recordSuccess(token) { + var _a; + this.capacity = Math.max(constants_1.INITIAL_RETRY_TOKENS, this.capacity + ((_a = token.getRetryCost()) !== null && _a !== void 0 ? _a : constants_1.NO_RETRY_INCREMENT)); + } + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } + catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${config_1.DEFAULT_MAX_ATTEMPTS}`); + return config_1.DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return (attempts < maxAttempts && + this.capacity >= this.getCapacityCost(errorInfo.errorType) && + this.isRetryableError(errorInfo.errorType)); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? constants_1.TIMEOUT_RETRY_COST : constants_1.RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +} +exports.StandardRetryStrategy = StandardRetryStrategy; /***/ }), -/***/ 86985: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 93435: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(19814), exports); -tslib_1.__exportStar(__nccwpck_require__(14832), exports); -tslib_1.__exportStar(__nccwpck_require__(77792), exports); +exports.DEFAULT_RETRY_MODE = exports.DEFAULT_MAX_ATTEMPTS = exports.RETRY_MODES = void 0; +var RETRY_MODES; +(function (RETRY_MODES) { + RETRY_MODES["STANDARD"] = "standard"; + RETRY_MODES["ADAPTIVE"] = "adaptive"; +})(RETRY_MODES = exports.RETRY_MODES || (exports.RETRY_MODES = {})); +exports.DEFAULT_MAX_ATTEMPTS = 3; +exports.DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; /***/ }), -/***/ 82518: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 66302: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSmithyContext = void 0; -const types_1 = __nccwpck_require__(55756); -const getSmithyContext = (context) => context[types_1.SMITHY_CONTEXT_KEY] || (context[types_1.SMITHY_CONTEXT_KEY] = {}); -exports.getSmithyContext = getSmithyContext; +exports.REQUEST_HEADER = exports.INVOCATION_ID_HEADER = exports.NO_RETRY_INCREMENT = exports.TIMEOUT_RETRY_COST = exports.RETRY_COST = exports.INITIAL_RETRY_TOKENS = exports.THROTTLING_RETRY_DELAY_BASE = exports.MAXIMUM_RETRY_DELAY = exports.DEFAULT_RETRY_DELAY_BASE = void 0; +exports.DEFAULT_RETRY_DELAY_BASE = 100; +exports.MAXIMUM_RETRY_DELAY = 20 * 1000; +exports.THROTTLING_RETRY_DELAY_BASE = 500; +exports.INITIAL_RETRY_TOKENS = 500; +exports.RETRY_COST = 5; +exports.TIMEOUT_RETRY_COST = 10; +exports.NO_RETRY_INCREMENT = 1; +exports.INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +exports.REQUEST_HEADER = "amz-sdk-request"; /***/ }), -/***/ 55829: +/***/ 21337: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(41822), exports); -tslib_1.__exportStar(__nccwpck_require__(60793), exports); -tslib_1.__exportStar(__nccwpck_require__(17633), exports); -tslib_1.__exportStar(__nccwpck_require__(82518), exports); -tslib_1.__exportStar(__nccwpck_require__(21843), exports); -tslib_1.__exportStar(__nccwpck_require__(54006), exports); +exports.getDefaultRetryBackoffStrategy = void 0; +const constants_1 = __nccwpck_require__(66302); +const getDefaultRetryBackoffStrategy = () => { + let delayBase = constants_1.DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = (attempts) => { + return Math.floor(Math.min(constants_1.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }; + const setDelayBase = (delay) => { + delayBase = delay; + }; + return { + computeNextBackoffDelay, + setDelayBase, + }; +}; +exports.getDefaultRetryBackoffStrategy = getDefaultRetryBackoffStrategy; /***/ }), -/***/ 19917: +/***/ 1127: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getHttpAuthSchemeEndpointRuleSetPlugin = exports.httpAuthSchemeEndpointRuleSetMiddlewareOptions = void 0; -const middleware_endpoint_1 = __nccwpck_require__(82918); -const httpAuthSchemeMiddleware_1 = __nccwpck_require__(63910); -exports.httpAuthSchemeEndpointRuleSetMiddlewareOptions = { - step: "serialize", - tags: ["HTTP_AUTH_SCHEME"], - name: "httpAuthSchemeMiddleware", - override: true, - relation: "before", - toMiddleware: middleware_endpoint_1.endpointMiddlewareOptions.name, +exports.createDefaultRetryToken = void 0; +const constants_1 = __nccwpck_require__(66302); +const createDefaultRetryToken = ({ retryDelay, retryCount, retryCost, }) => { + const getRetryCount = () => retryCount; + const getRetryDelay = () => Math.min(constants_1.MAXIMUM_RETRY_DELAY, retryDelay); + const getRetryCost = () => retryCost; + return { + getRetryCount, + getRetryDelay, + getRetryCost, + }; }; -const getHttpAuthSchemeEndpointRuleSetPlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({ - applyToStack: (clientStack) => { - clientStack.addRelativeTo((0, httpAuthSchemeMiddleware_1.httpAuthSchemeMiddleware)(config, { - httpAuthSchemeParametersProvider, - identityProviderConfigProvider, - }), exports.httpAuthSchemeEndpointRuleSetMiddlewareOptions); - }, -}); -exports.getHttpAuthSchemeEndpointRuleSetPlugin = getHttpAuthSchemeEndpointRuleSetPlugin; +exports.createDefaultRetryToken = createDefaultRetryToken; /***/ }), -/***/ 70022: +/***/ 84902: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getHttpAuthSchemePlugin = exports.httpAuthSchemeMiddlewareOptions = void 0; -const middleware_serde_1 = __nccwpck_require__(81238); -const httpAuthSchemeMiddleware_1 = __nccwpck_require__(63910); -exports.httpAuthSchemeMiddlewareOptions = { - step: "serialize", - tags: ["HTTP_AUTH_SCHEME"], - name: "httpAuthSchemeMiddleware", - override: true, - relation: "before", - toMiddleware: middleware_serde_1.serializerMiddlewareOption.name, -}; -const getHttpAuthSchemePlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({ - applyToStack: (clientStack) => { - clientStack.addRelativeTo((0, httpAuthSchemeMiddleware_1.httpAuthSchemeMiddleware)(config, { - httpAuthSchemeParametersProvider, - identityProviderConfigProvider, - }), exports.httpAuthSchemeMiddlewareOptions); - }, -}); -exports.getHttpAuthSchemePlugin = getHttpAuthSchemePlugin; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(65053), exports); +tslib_1.__exportStar(__nccwpck_require__(25689), exports); +tslib_1.__exportStar(__nccwpck_require__(22234), exports); +tslib_1.__exportStar(__nccwpck_require__(48361), exports); +tslib_1.__exportStar(__nccwpck_require__(93435), exports); +tslib_1.__exportStar(__nccwpck_require__(66302), exports); +tslib_1.__exportStar(__nccwpck_require__(75427), exports); /***/ }), -/***/ 63910: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 75427: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.httpAuthSchemeMiddleware = void 0; -const types_1 = __nccwpck_require__(55756); -const util_middleware_1 = __nccwpck_require__(2390); -function convertHttpAuthSchemesToMap(httpAuthSchemes) { - const map = new Map(); - for (const scheme of httpAuthSchemes) { - map.set(scheme.schemeId, scheme); - } - return map; -} -const httpAuthSchemeMiddleware = (config, mwOptions) => (next, context) => async (args) => { - var _a; - const options = config.httpAuthSchemeProvider(await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input)); - const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); - const smithyContext = (0, util_middleware_1.getSmithyContext)(context); - const failureReasons = []; - for (const option of options) { - const scheme = authSchemes.get(option.schemeId); - if (!scheme) { - failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); - continue; - } - const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); - if (!identityProvider) { - failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); - continue; - } - const { identityProperties = {}, signingProperties = {} } = ((_a = option.propertiesExtractor) === null || _a === void 0 ? void 0 : _a.call(option, config, context)) || {}; - option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); - option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); - smithyContext.selectedHttpAuthScheme = { - httpAuthOption: option, - identity: await identityProvider(option.identityProperties), - signer: scheme.signer, - }; - break; - } - if (!smithyContext.selectedHttpAuthScheme) { - throw new Error(failureReasons.join("\n")); - } - return next(args); -}; -exports.httpAuthSchemeMiddleware = httpAuthSchemeMiddleware; /***/ }), -/***/ 41822: +/***/ 22094: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(63910), exports); -tslib_1.__exportStar(__nccwpck_require__(19917), exports); -tslib_1.__exportStar(__nccwpck_require__(70022), exports); +exports.Uint8ArrayBlobAdapter = void 0; +const transforms_1 = __nccwpck_require__(82098); +class Uint8ArrayBlobAdapter extends Uint8Array { + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return (0, transforms_1.transformFromString)(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + static mutate(source) { + Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype); + return source; + } + transformToString(encoding = "utf-8") { + return (0, transforms_1.transformToString)(this, encoding); + } +} +exports.Uint8ArrayBlobAdapter = Uint8ArrayBlobAdapter; /***/ }), -/***/ 96819: +/***/ 82098: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getHttpSigningPlugin = exports.httpSigningMiddlewareOptions = void 0; -const middleware_retry_1 = __nccwpck_require__(96039); -const httpSigningMiddleware_1 = __nccwpck_require__(22774); -exports.httpSigningMiddlewareOptions = { - step: "finalizeRequest", - tags: ["HTTP_SIGNING"], - name: "httpSigningMiddleware", - aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], - override: true, - relation: "after", - toMiddleware: middleware_retry_1.retryMiddlewareOptions.name, -}; -const getHttpSigningPlugin = (config) => ({ - applyToStack: (clientStack) => { - clientStack.addRelativeTo((0, httpSigningMiddleware_1.httpSigningMiddleware)(config), exports.httpSigningMiddlewareOptions); - }, -}); -exports.getHttpSigningPlugin = getHttpSigningPlugin; +exports.transformFromString = exports.transformToString = void 0; +const util_base64_1 = __nccwpck_require__(75600); +const util_utf8_1 = __nccwpck_require__(41895); +const Uint8ArrayBlobAdapter_1 = __nccwpck_require__(22094); +function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return (0, util_base64_1.toBase64)(payload); + } + return (0, util_utf8_1.toUtf8)(payload); +} +exports.transformToString = transformToString; +function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter_1.Uint8ArrayBlobAdapter.mutate((0, util_base64_1.fromBase64)(str)); + } + return Uint8ArrayBlobAdapter_1.Uint8ArrayBlobAdapter.mutate((0, util_utf8_1.fromUtf8)(str)); +} +exports.transformFromString = transformFromString; /***/ }), -/***/ 22774: +/***/ 23636: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.httpSigningMiddleware = void 0; -const protocol_http_1 = __nccwpck_require__(64418); -const types_1 = __nccwpck_require__(55756); -const util_middleware_1 = __nccwpck_require__(2390); -const defaultErrorHandler = (signingProperties) => (error) => { - throw error; -}; -const defaultSuccessHandler = (httpResponse, signingProperties) => { }; -const httpSigningMiddleware = (config) => (next, context) => async (args) => { - if (!protocol_http_1.HttpRequest.isInstance(args.request)) { - return next(args); - } - const smithyContext = (0, util_middleware_1.getSmithyContext)(context); - const scheme = smithyContext.selectedHttpAuthScheme; - if (!scheme) { - throw new Error(`No HttpAuthScheme was selected: unable to sign request`); - } - const { httpAuthOption: { signingProperties = {} }, identity, signer, } = scheme; - const output = await next({ - ...args, - request: await signer.sign(args.request, identity, signingProperties), - }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); - (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); - return output; +exports.getAwsChunkedEncodingStream = void 0; +const stream_1 = __nccwpck_require__(12781); +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; }; -exports.httpSigningMiddleware = httpSigningMiddleware; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; /***/ }), -/***/ 60793: +/***/ 96607: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(22774), exports); -tslib_1.__exportStar(__nccwpck_require__(96819), exports); +tslib_1.__exportStar(__nccwpck_require__(22094), exports); +tslib_1.__exportStar(__nccwpck_require__(23636), exports); +tslib_1.__exportStar(__nccwpck_require__(4515), exports); /***/ }), -/***/ 21843: -/***/ ((__unused_webpack_module, exports) => { +/***/ 4515: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.normalizeProvider = void 0; -const normalizeProvider = (input) => { - if (typeof input === "function") - return input; - const promisified = Promise.resolve(input); - return () => promisified; +exports.sdkStreamMixin = void 0; +const node_http_handler_1 = __nccwpck_require__(20258); +const util_buffer_from_1 = __nccwpck_require__(31381); +const stream_1 = __nccwpck_require__(12781); +const util_1 = __nccwpck_require__(73837); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!(stream instanceof stream_1.Readable)) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, node_http_handler_1.streamCollector)(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new util_1.TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof stream_1.Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please make sure you are using Node.js >= 17.0.0, or polyfill is available."); + } + transformed = true; + return stream_1.Readable.toWeb(stream); + }, + }); }; -exports.normalizeProvider = normalizeProvider; +exports.sdkStreamMixin = sdkStreamMixin; /***/ }), -/***/ 54006: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 54197: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + escapeUri: () => escapeUri, + escapeUriPath: () => escapeUriPath +}); +module.exports = __toCommonJS(src_exports); + +// src/escape-uri.ts +var escapeUri = /* @__PURE__ */ __name((uri) => ( + // AWS percent-encodes some extra non-standard characters in a URI + encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode) +), "escapeUri"); +var hexEncode = /* @__PURE__ */ __name((c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`, "hexEncode"); + +// src/escape-uri-path.ts +var escapeUriPath = /* @__PURE__ */ __name((uri) => uri.split("/").map(escapeUri).join("/"), "escapeUriPath"); +// Annotate the CommonJS export names for ESM import in node: -"use strict"; +0 && (0); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.RequestBuilder = exports.requestBuilder = void 0; -const protocol_http_1 = __nccwpck_require__(64418); -const smithy_client_1 = __nccwpck_require__(63570); -function requestBuilder(input, context) { - return new RequestBuilder(input, context); -} -exports.requestBuilder = requestBuilder; -class RequestBuilder { - constructor(input, context) { - this.input = input; - this.context = context; - this.query = {}; - this.method = ""; - this.headers = {}; - this.path = ""; - this.body = null; - this.hostname = ""; - this.resolvePathStack = []; - } - async build() { - const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); - this.path = basePath; - for (const resolvePath of this.resolvePathStack) { - resolvePath(this.path); - } - return new protocol_http_1.HttpRequest({ - protocol, - hostname: this.hostname || hostname, - port, - method: this.method, - path: this.path, - query: this.query, - body: this.body, - headers: this.headers, - }); - } - hn(hostname) { - this.hostname = hostname; - return this; - } - bp(uriLabel) { - this.resolvePathStack.push((basePath) => { - this.path = `${(basePath === null || basePath === void 0 ? void 0 : basePath.endsWith("/")) ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; - }); - return this; - } - p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { - this.resolvePathStack.push((path) => { - this.path = (0, smithy_client_1.resolvedPath)(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); - }); - return this; - } - h(headers) { - this.headers = headers; - return this; - } - q(query) { - this.query = query; - return this; - } - b(body) { - this.body = body; - return this; - } - m(method) { - this.method = method; - return this; - } -} -exports.RequestBuilder = RequestBuilder; /***/ }), -/***/ 50301: -/***/ ((__unused_webpack_module, exports) => { +/***/ 45917: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultIdentityProviderConfig = void 0; -class DefaultIdentityProviderConfig { - constructor(config) { - this.authSchemes = new Map(); - for (const [key, value] of Object.entries(config)) { - if (value !== undefined) { - this.authSchemes.set(key, value); - } - } - } - getIdentityProvider(schemeId) { - return this.authSchemes.get(schemeId); - } -} -exports.DefaultIdentityProviderConfig = DefaultIdentityProviderConfig; +exports.fromUtf8 = void 0; +const util_buffer_from_1 = __nccwpck_require__(31381); +const fromUtf8 = (input) => { + const buf = (0, util_buffer_from_1.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; +exports.fromUtf8 = fromUtf8; /***/ }), -/***/ 65906: +/***/ 41895: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpApiKeyAuthSigner = void 0; -const types_1 = __nccwpck_require__(55756); -class HttpApiKeyAuthSigner { - async sign(httpRequest, identity, signingProperties) { - if (!signingProperties) { - throw new Error("request could not be signed with `apiKey` since the `name` and `in` signer properties are missing"); - } - if (!signingProperties.name) { - throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); - } - if (!signingProperties.in) { - throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); - } - if (!identity.apiKey) { - throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); - } - const clonedRequest = httpRequest.clone(); - if (signingProperties.in === types_1.HttpApiKeyAuthLocation.QUERY) { - clonedRequest.query[signingProperties.name] = identity.apiKey; - } - else if (signingProperties.in === types_1.HttpApiKeyAuthLocation.HEADER) { - clonedRequest.headers[signingProperties.name] = signingProperties.scheme - ? `${signingProperties.scheme} ${identity.apiKey}` - : identity.apiKey; - } - else { - throw new Error("request can only be signed with `apiKey` locations `query` or `header`, " + - "but found: `" + - signingProperties.in + - "`"); - } - return clonedRequest; - } -} -exports.HttpApiKeyAuthSigner = HttpApiKeyAuthSigner; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(45917), exports); +tslib_1.__exportStar(__nccwpck_require__(95470), exports); +tslib_1.__exportStar(__nccwpck_require__(99960), exports); /***/ }), -/***/ 62414: -/***/ ((__unused_webpack_module, exports) => { +/***/ 95470: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpBearerAuthSigner = void 0; -class HttpBearerAuthSigner { - async sign(httpRequest, identity, signingProperties) { - const clonedRequest = httpRequest.clone(); - if (!identity.token) { - throw new Error("request could not be signed with `token` since the `token` is not defined"); - } - clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; - return clonedRequest; +exports.toUint8Array = void 0; +const fromUtf8_1 = __nccwpck_require__(45917); +const toUint8Array = (data) => { + if (typeof data === "string") { + return (0, fromUtf8_1.fromUtf8)(data); } -} -exports.HttpBearerAuthSigner = HttpBearerAuthSigner; + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; +exports.toUint8Array = toUint8Array; /***/ }), -/***/ 86734: +/***/ 99960: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(65906), exports); -tslib_1.__exportStar(__nccwpck_require__(62414), exports); -tslib_1.__exportStar(__nccwpck_require__(62818), exports); +exports.toUtf8 = void 0; +const util_buffer_from_1 = __nccwpck_require__(31381); +const toUtf8 = (input) => (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +exports.toUtf8 = toUtf8; /***/ }), -/***/ 62818: -/***/ ((__unused_webpack_module, exports) => { +/***/ 76991: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoAuthSigner = void 0; -class NoAuthSigner { - async sign(httpRequest, identity, signingProperties) { - return httpRequest; +exports.createWaiter = void 0; +const poller_1 = __nccwpck_require__(39033); +const utils_1 = __nccwpck_require__(26000); +const waiter_1 = __nccwpck_require__(79089); +const abortTimeout = async (abortSignal) => { + return new Promise((resolve) => { + abortSignal.onabort = () => resolve({ state: waiter_1.WaiterState.ABORTED }); + }); +}; +const createWaiter = async (options, input, acceptorChecks) => { + const params = { + ...waiter_1.waiterServiceDefaults, + ...options, + }; + (0, utils_1.validateWaiterOptions)(params); + const exitConditions = [(0, poller_1.runPolling)(params, input, acceptorChecks)]; + if (options.abortController) { + exitConditions.push(abortTimeout(options.abortController.signal)); } -} -exports.NoAuthSigner = NoAuthSigner; + if (options.abortSignal) { + exitConditions.push(abortTimeout(options.abortSignal)); + } + return Promise.race(exitConditions); +}; +exports.createWaiter = createWaiter; /***/ }), -/***/ 17633: +/***/ 78011: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(50301), exports); -tslib_1.__exportStar(__nccwpck_require__(86734), exports); -tslib_1.__exportStar(__nccwpck_require__(69851), exports); +tslib_1.__exportStar(__nccwpck_require__(76991), exports); +tslib_1.__exportStar(__nccwpck_require__(79089), exports); /***/ }), -/***/ 69851: -/***/ ((__unused_webpack_module, exports) => { +/***/ 39033: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.memoizeIdentityProvider = exports.doesIdentityRequireRefresh = exports.isIdentityExpired = exports.EXPIRATION_MS = exports.createIsIdentityExpiredFunction = void 0; -const createIsIdentityExpiredFunction = (expirationMs) => (identity) => (0, exports.doesIdentityRequireRefresh)(identity) && identity.expiration.getTime() - Date.now() < expirationMs; -exports.createIsIdentityExpiredFunction = createIsIdentityExpiredFunction; -exports.EXPIRATION_MS = 300000; -exports.isIdentityExpired = (0, exports.createIsIdentityExpiredFunction)(exports.EXPIRATION_MS); -const doesIdentityRequireRefresh = (identity) => identity.expiration !== undefined; -exports.doesIdentityRequireRefresh = doesIdentityRequireRefresh; -const memoizeIdentityProvider = (provider, isExpired, requiresRefresh) => { - if (provider === undefined) { - return undefined; +exports.runPolling = void 0; +const sleep_1 = __nccwpck_require__(62380); +const waiter_1 = __nccwpck_require__(79089); +const exponentialBackoffWithJitter = (minDelay, maxDelay, attemptCeiling, attempt) => { + if (attempt > attemptCeiling) + return maxDelay; + const delay = minDelay * 2 ** (attempt - 1); + return randomInRange(minDelay, delay); +}; +const randomInRange = (min, max) => min + Math.random() * (max - min); +const runPolling = async ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }, input, acceptorChecks) => { + var _a; + const { state, reason } = await acceptorChecks(client, input); + if (state !== waiter_1.WaiterState.RETRY) { + return { state, reason }; } - const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; - let resolved; - let pending; - let hasResult; - let isConstant = false; - const coalesceProvider = async (options) => { - if (!pending) { - pending = normalizedProvider(options); + let currentAttempt = 1; + const waitUntil = Date.now() + maxWaitTime * 1000; + const attemptCeiling = Math.log(maxDelay / minDelay) / Math.log(2) + 1; + while (true) { + if (((_a = abortController === null || abortController === void 0 ? void 0 : abortController.signal) === null || _a === void 0 ? void 0 : _a.aborted) || (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted)) { + return { state: waiter_1.WaiterState.ABORTED }; } - try { - resolved = await pending; - hasResult = true; - isConstant = false; + const delay = exponentialBackoffWithJitter(minDelay, maxDelay, attemptCeiling, currentAttempt); + if (Date.now() + delay * 1000 > waitUntil) { + return { state: waiter_1.WaiterState.TIMEOUT }; } - finally { - pending = undefined; + await (0, sleep_1.sleep)(delay); + const { state, reason } = await acceptorChecks(client, input); + if (state !== waiter_1.WaiterState.RETRY) { + return { state, reason }; } - return resolved; - }; - if (isExpired === undefined) { - return async (options) => { - if (!hasResult || (options === null || options === void 0 ? void 0 : options.forceRefresh)) { - resolved = await coalesceProvider(options); - } - return resolved; - }; + currentAttempt += 1; } - return async (options) => { - if (!hasResult || (options === null || options === void 0 ? void 0 : options.forceRefresh)) { - resolved = await coalesceProvider(options); - } - if (isConstant) { - return resolved; - } - if (!requiresRefresh(resolved)) { - isConstant = true; - return resolved; - } - if (isExpired(resolved)) { - await coalesceProvider(options); - return resolved; - } - return resolved; - }; }; -exports.memoizeIdentityProvider = memoizeIdentityProvider; +exports.runPolling = runPolling; /***/ }), -/***/ 18044: -/***/ ((__unused_webpack_module, exports) => { +/***/ 26000: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Endpoint = void 0; -var Endpoint; -(function (Endpoint) { - Endpoint["IPv4"] = "http://169.254.169.254"; - Endpoint["IPv6"] = "http://[fd00:ec2::254]"; -})(Endpoint = exports.Endpoint || (exports.Endpoint = {})); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(62380), exports); +tslib_1.__exportStar(__nccwpck_require__(6594), exports); /***/ }), -/***/ 57342: +/***/ 62380: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ENDPOINT_CONFIG_OPTIONS = exports.CONFIG_ENDPOINT_NAME = exports.ENV_ENDPOINT_NAME = void 0; -exports.ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; -exports.CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; -exports.ENDPOINT_CONFIG_OPTIONS = { - environmentVariableSelector: (env) => env[exports.ENV_ENDPOINT_NAME], - configFileSelector: (profile) => profile[exports.CONFIG_ENDPOINT_NAME], - default: undefined, +exports.sleep = void 0; +const sleep = (seconds) => { + return new Promise((resolve) => setTimeout(resolve, seconds * 1000)); }; +exports.sleep = sleep; /***/ }), -/***/ 80991: +/***/ 6594: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.EndpointMode = void 0; -var EndpointMode; -(function (EndpointMode) { - EndpointMode["IPv4"] = "IPv4"; - EndpointMode["IPv6"] = "IPv6"; -})(EndpointMode = exports.EndpointMode || (exports.EndpointMode = {})); +exports.validateWaiterOptions = void 0; +const validateWaiterOptions = (options) => { + if (options.maxWaitTime < 1) { + throw new Error(`WaiterConfiguration.maxWaitTime must be greater than 0`); + } + else if (options.minDelay < 1) { + throw new Error(`WaiterConfiguration.minDelay must be greater than 0`); + } + else if (options.maxDelay < 1) { + throw new Error(`WaiterConfiguration.maxDelay must be greater than 0`); + } + else if (options.maxWaitTime <= options.minDelay) { + throw new Error(`WaiterConfiguration.maxWaitTime [${options.maxWaitTime}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); + } + else if (options.maxDelay < options.minDelay) { + throw new Error(`WaiterConfiguration.maxDelay [${options.maxDelay}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); + } +}; +exports.validateWaiterOptions = validateWaiterOptions; /***/ }), -/***/ 88337: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 79089: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ENDPOINT_MODE_CONFIG_OPTIONS = exports.CONFIG_ENDPOINT_MODE_NAME = exports.ENV_ENDPOINT_MODE_NAME = void 0; -const EndpointMode_1 = __nccwpck_require__(80991); -exports.ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; -exports.CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; -exports.ENDPOINT_MODE_CONFIG_OPTIONS = { - environmentVariableSelector: (env) => env[exports.ENV_ENDPOINT_MODE_NAME], - configFileSelector: (profile) => profile[exports.CONFIG_ENDPOINT_MODE_NAME], - default: EndpointMode_1.EndpointMode.IPv4, +exports.checkExceptions = exports.WaiterState = exports.waiterServiceDefaults = void 0; +exports.waiterServiceDefaults = { + minDelay: 2, + maxDelay: 120, +}; +var WaiterState; +(function (WaiterState) { + WaiterState["ABORTED"] = "ABORTED"; + WaiterState["FAILURE"] = "FAILURE"; + WaiterState["SUCCESS"] = "SUCCESS"; + WaiterState["RETRY"] = "RETRY"; + WaiterState["TIMEOUT"] = "TIMEOUT"; +})(WaiterState = exports.WaiterState || (exports.WaiterState = {})); +const checkExceptions = (result) => { + if (result.state === WaiterState.ABORTED) { + const abortError = new Error(`${JSON.stringify({ + ...result, + reason: "Request was aborted", + })}`); + abortError.name = "AbortError"; + throw abortError; + } + else if (result.state === WaiterState.TIMEOUT) { + const timeoutError = new Error(`${JSON.stringify({ + ...result, + reason: "Waiter has timed out", + })}`); + timeoutError.name = "TimeoutError"; + throw timeoutError; + } + else if (result.state !== WaiterState.SUCCESS) { + throw new Error(`${JSON.stringify({ result })}`); + } + return result; }; +exports.checkExceptions = checkExceptions; /***/ }), -/***/ 58232: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 81040: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.InstanceMetadataV1FallbackError = void 0; -const property_provider_1 = __nccwpck_require__(79721); -class InstanceMetadataV1FallbackError extends property_provider_1.CredentialsProviderError { - constructor(message, tryNextLink = true) { - super(message, tryNextLink); - this.tryNextLink = tryNextLink; - this.name = "InstanceMetadataV1FallbackError"; - Object.setPrototypeOf(this, InstanceMetadataV1FallbackError.prototype); - } +function once(emitter, name, { signal } = {}) { + return new Promise((resolve, reject) => { + function cleanup() { + signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + } + function onEvent(...args) { + cleanup(); + resolve(args); + } + function onError(err) { + cleanup(); + reject(err); + } + signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); + emitter.on(name, onEvent); + emitter.on('error', onError); + }); } -exports.InstanceMetadataV1FallbackError = InstanceMetadataV1FallbackError; - +exports["default"] = once; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 89227: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 61659: +/***/ ((module, exports, __nccwpck_require__) => { "use strict"; +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ + Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromContainerMetadata = exports.ENV_CMDS_AUTH_TOKEN = exports.ENV_CMDS_RELATIVE_URI = exports.ENV_CMDS_FULL_URI = void 0; -const property_provider_1 = __nccwpck_require__(79721); -const url_1 = __nccwpck_require__(57310); -const httpRequest_1 = __nccwpck_require__(32199); -const ImdsCredentials_1 = __nccwpck_require__(6894); -const RemoteProviderInit_1 = __nccwpck_require__(98533); -const retry_1 = __nccwpck_require__(91351); -exports.ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; -exports.ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; -exports.ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; -const fromContainerMetadata = (init = {}) => { - const { timeout, maxRetries } = (0, RemoteProviderInit_1.providerConfigFromInit)(init); - return () => (0, retry_1.retry)(async () => { - const requestOptions = await getCmdsUri(); - const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); - if (!(0, ImdsCredentials_1.isImdsCredentials)(credsResponse)) { - throw new property_provider_1.CredentialsProviderError("Invalid response received from instance metadata service."); - } - return (0, ImdsCredentials_1.fromImdsCredentials)(credsResponse); - }, maxRetries); -}; -exports.fromContainerMetadata = fromContainerMetadata; -const requestFromEcsImds = async (timeout, options) => { - if (process.env[exports.ENV_CMDS_AUTH_TOKEN]) { - options.headers = { - ...options.headers, - Authorization: process.env[exports.ENV_CMDS_AUTH_TOKEN], - }; - } - const buffer = await (0, httpRequest_1.httpRequest)({ - ...options, - timeout, - }); - return buffer.toString(); -}; -const CMDS_IP = "169.254.170.2"; -const GREENGRASS_HOSTS = { - localhost: true, - "127.0.0.1": true, -}; -const GREENGRASS_PROTOCOLS = { - "http:": true, - "https:": true, -}; -const getCmdsUri = async () => { - if (process.env[exports.ENV_CMDS_RELATIVE_URI]) { - return { - hostname: CMDS_IP, - path: process.env[exports.ENV_CMDS_RELATIVE_URI], - }; + +var eventTargetShim = __nccwpck_require__(84697); + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends eventTargetShim.EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); } - if (process.env[exports.ENV_CMDS_FULL_URI]) { - const parsed = (0, url_1.parse)(process.env[exports.ENV_CMDS_FULL_URI]); - if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { - throw new property_provider_1.CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, false); - } - if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { - throw new property_provider_1.CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, false); + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); } - return { - ...parsed, - port: parsed.port ? parseInt(parsed.port, 10) : undefined, - }; + return aborted; } - throw new property_provider_1.CredentialsProviderError("The container metadata credential provider cannot be used unless" + - ` the ${exports.ENV_CMDS_RELATIVE_URI} or ${exports.ENV_CMDS_FULL_URI} environment` + - " variable is set", false); -}; +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports["default"] = AbortController; + +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map /***/ }), -/***/ 52207: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 49690: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromInstanceMetadata = void 0; -const node_config_provider_1 = __nccwpck_require__(33461); -const property_provider_1 = __nccwpck_require__(79721); -const InstanceMetadataV1FallbackError_1 = __nccwpck_require__(58232); -const httpRequest_1 = __nccwpck_require__(32199); -const ImdsCredentials_1 = __nccwpck_require__(6894); -const RemoteProviderInit_1 = __nccwpck_require__(98533); -const retry_1 = __nccwpck_require__(91351); -const getInstanceMetadataEndpoint_1 = __nccwpck_require__(92460); -const staticStabilityProvider_1 = __nccwpck_require__(74035); -const IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; -const IMDS_TOKEN_PATH = "/latest/api/token"; -const AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; -const PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; -const X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; -const fromInstanceMetadata = (init = {}) => (0, staticStabilityProvider_1.staticStabilityProvider)(getInstanceImdsProvider(init), { logger: init.logger }); -exports.fromInstanceMetadata = fromInstanceMetadata; -const getInstanceImdsProvider = (init) => { - let disableFetchToken = false; - const { logger, profile } = init; - const { timeout, maxRetries } = (0, RemoteProviderInit_1.providerConfigFromInit)(init); - const getCredentials = async (maxRetries, options) => { - var _a; - const isImdsV1Fallback = disableFetchToken || ((_a = options.headers) === null || _a === void 0 ? void 0 : _a[X_AWS_EC2_METADATA_TOKEN]) == null; - if (isImdsV1Fallback) { - let fallbackBlockedFromProfile = false; - let fallbackBlockedFromProcessEnv = false; - const configValue = await (0, node_config_provider_1.loadConfig)({ - environmentVariableSelector: (env) => { - const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; - fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; - if (envValue === undefined) { - throw new property_provider_1.CredentialsProviderError(`${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.`); - } - return fallbackBlockedFromProcessEnv; - }, - configFileSelector: (profile) => { - const profileValue = profile[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; - fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; - return fallbackBlockedFromProfile; - }, - default: false, - }, { - profile, - })(); - if (init.ec2MetadataV1Disabled || configValue) { - const causes = []; - if (init.ec2MetadataV1Disabled) - causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); - if (fallbackBlockedFromProfile) - causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); - if (fallbackBlockedFromProcessEnv) - causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); - throw new InstanceMetadataV1FallbackError_1.InstanceMetadataV1FallbackError(`AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join(", ")}].`); - } - } - const imdsProfile = (await (0, retry_1.retry)(async () => { - let profile; - try { - profile = await getProfile(options); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(82361); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const promisify_1 = __importDefault(__nccwpck_require__(66570)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; } - catch (err) { - if (err.statusCode === 401) { - disableFetchToken = false; - } - throw err; + else if (callback) { + opts = callback; } - return profile; - }, maxRetries)).trim(); - return (0, retry_1.retry)(async () => { - let creds; - try { - creds = await getCredentialsFromProfile(imdsProfile, options); + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; } - catch (err) { - if (err.statusCode === 401) { - disableFetchToken = false; - } - throw err; + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; } - return creds; - }, maxRetries); - }; - return async () => { - const endpoint = await (0, getInstanceMetadataEndpoint_1.getInstanceMetadataEndpoint)(); - if (disableFetchToken) { - logger === null || logger === void 0 ? void 0 : logger.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); - return getCredentials(maxRetries, { ...endpoint, timeout }); + return isSecureEndpoint() ? 443 : 80; } - else { - let token; - try { - token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; } - catch (error) { - if ((error === null || error === void 0 ? void 0 : error.statusCode) === 400) { - throw Object.assign(error, { - message: "EC2 Metadata token request returned error", + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); } - else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { - disableFetchToken = true; + else { + this.promisifiedCallback = this.callback; } - logger === null || logger === void 0 ? void 0 : logger.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); - return getCredentials(maxRetries, { ...endpoint, timeout }); } - return getCredentials(maxRetries, { - ...endpoint, - headers: { - [X_AWS_EC2_METADATA_TOKEN]: token, - }, - timeout, - }); + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); } - }; -}; -const getMetadataToken = async (options) => (0, httpRequest_1.httpRequest)({ - ...options, - path: IMDS_TOKEN_PATH, - method: "PUT", - headers: { - "x-aws-ec2-metadata-token-ttl-seconds": "21600", - }, -}); -const getProfile = async (options) => (await (0, httpRequest_1.httpRequest)({ ...options, path: IMDS_PATH })).toString(); -const getCredentialsFromProfile = async (profile, options) => { - const credsResponse = JSON.parse((await (0, httpRequest_1.httpRequest)({ - ...options, - path: IMDS_PATH + profile, - })).toString()); - if (!(0, ImdsCredentials_1.isImdsCredentials)(credsResponse)) { - throw new property_provider_1.CredentialsProviderError("Invalid response received from instance metadata service."); } - return (0, ImdsCredentials_1.fromImdsCredentials)(credsResponse); -}; - + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 7477: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 66570: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getInstanceMetadataEndpoint = exports.httpRequest = void 0; -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(89227), exports); -tslib_1.__exportStar(__nccwpck_require__(52207), exports); -tslib_1.__exportStar(__nccwpck_require__(98533), exports); -tslib_1.__exportStar(__nccwpck_require__(45036), exports); -var httpRequest_1 = __nccwpck_require__(32199); -Object.defineProperty(exports, "httpRequest", ({ enumerable: true, get: function () { return httpRequest_1.httpRequest; } })); -var getInstanceMetadataEndpoint_1 = __nccwpck_require__(92460); -Object.defineProperty(exports, "getInstanceMetadataEndpoint", ({ enumerable: true, get: function () { return getInstanceMetadataEndpoint_1.getInstanceMetadataEndpoint; } })); - +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map /***/ }), -/***/ 6894: -/***/ ((__unused_webpack_module, exports) => { +/***/ 61546: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromImdsCredentials = exports.isImdsCredentials = void 0; -const isImdsCredentials = (arg) => Boolean(arg) && - typeof arg === "object" && - typeof arg.AccessKeyId === "string" && - typeof arg.SecretAccessKey === "string" && - typeof arg.Token === "string" && - typeof arg.Expiration === "string"; -exports.isImdsCredentials = isImdsCredentials; -const fromImdsCredentials = (creds) => ({ - accessKeyId: creds.AccessKeyId, - secretAccessKey: creds.SecretAccessKey, - sessionToken: creds.Token, - expiration: new Date(creds.Expiration), -}); -exports.fromImdsCredentials = fromImdsCredentials; +const arrify = value => { + if (value === null || value === undefined) { + return []; + } -/***/ }), + if (Array.isArray(value)) { + return value; + } -/***/ 98533: -/***/ ((__unused_webpack_module, exports) => { + if (typeof value === 'string') { + return [value]; + } -"use strict"; + if (typeof value[Symbol.iterator] === 'function') { + return [...value]; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.providerConfigFromInit = exports.DEFAULT_MAX_RETRIES = exports.DEFAULT_TIMEOUT = void 0; -exports.DEFAULT_TIMEOUT = 1000; -exports.DEFAULT_MAX_RETRIES = 0; -const providerConfigFromInit = ({ maxRetries = exports.DEFAULT_MAX_RETRIES, timeout = exports.DEFAULT_TIMEOUT, }) => ({ maxRetries, timeout }); -exports.providerConfigFromInit = providerConfigFromInit; + return [value]; +}; + +module.exports = arrify; /***/ }), -/***/ 32199: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 33415: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +// Packages +var retrier = __nccwpck_require__(71604); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.httpRequest = void 0; -const property_provider_1 = __nccwpck_require__(79721); -const buffer_1 = __nccwpck_require__(14300); -const http_1 = __nccwpck_require__(13685); -function httpRequest(options) { - return new Promise((resolve, reject) => { - var _a; - const req = (0, http_1.request)({ - method: "GET", - ...options, - hostname: (_a = options.hostname) === null || _a === void 0 ? void 0 : _a.replace(/^\[(.+)\]$/, "$1"), - }); - req.on("error", (err) => { - reject(Object.assign(new property_provider_1.ProviderError("Unable to connect to instance metadata service"), err)); - req.destroy(); - }); - req.on("timeout", () => { - reject(new property_provider_1.ProviderError("TimeoutError from instance metadata service")); - req.destroy(); - }); - req.on("response", (res) => { - const { statusCode = 400 } = res; - if (statusCode < 200 || 300 <= statusCode) { - reject(Object.assign(new property_provider_1.ProviderError("Error response received from instance metadata service"), { statusCode })); - req.destroy(); - } - const chunks = []; - res.on("data", (chunk) => { - chunks.push(chunk); - }); - res.on("end", () => { - resolve(buffer_1.Buffer.concat(chunks)); - req.destroy(); - }); - }); - req.end(); - }); -} -exports.httpRequest = httpRequest; +function retry(fn, opts) { + function run(resolve, reject) { + var options = opts || {}; + var op; + // Default `randomize` to true + if (!('randomize' in options)) { + options.randomize = true; + } -/***/ }), + op = retrier.operation(options); -/***/ 91351: -/***/ ((__unused_webpack_module, exports) => { + // We allow the user to abort retrying + // this makes sense in the cases where + // knowledge is obtained that retrying + // would be futile (e.g.: auth errors) -"use strict"; + function bail(err) { + reject(err || new Error('Aborted')); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.retry = void 0; -const retry = (toRetry, maxRetries) => { - let promise = toRetry(); - for (let i = 0; i < maxRetries; i++) { - promise = promise.catch(toRetry); + function onError(err, num) { + if (err.bail) { + bail(err); + return; + } + + if (!op.retry(err)) { + reject(op.mainError()); + } else if (options.onRetry) { + options.onRetry(err, num); + } } - return promise; -}; -exports.retry = retry; + function runAttempt(num) { + var val; -/***/ }), + try { + val = fn(bail, num); + } catch (err) { + onError(err, num); + return; + } -/***/ 45036: -/***/ ((__unused_webpack_module, exports) => { + Promise.resolve(val) + .then(resolve) + .catch(function catchIt(err) { + onError(err, num); + }); + } -"use strict"; + op.attempt(runAttempt); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); + return new Promise(run); +} + +module.exports = retry; /***/ }), -/***/ 22666: +/***/ 26463: /***/ ((__unused_webpack_module, exports) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getExtendedInstanceMetadataCredentials = void 0; -const STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; -const STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; -const STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; -const getExtendedInstanceMetadataCredentials = (credentials, logger) => { - var _a; - const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + - Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); - const newExpiration = new Date(Date.now() + refreshInterval * 1000); - logger.warn("Attempting credential expiration extension due to a credential service availability issue. A refresh of these " + - "credentials will be attempted after ${new Date(newExpiration)}.\nFor more information, please visit: " + - STATIC_STABILITY_DOC_URL); - const originalExpiration = (_a = credentials.originalExpiration) !== null && _a !== void 0 ? _a : credentials.expiration; - return { - ...credentials, - ...(originalExpiration ? { originalExpiration } : {}), - expiration: newExpiration, - }; -}; -exports.getExtendedInstanceMetadataCredentials = getExtendedInstanceMetadataCredentials; +exports.byteLength = byteLength +exports.toByteArray = toByteArray +exports.fromByteArray = fromByteArray -/***/ }), +var lookup = [] +var revLookup = [] +var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array -/***/ 92460: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' +for (var i = 0, len = code.length; i < len; ++i) { + lookup[i] = code[i] + revLookup[code.charCodeAt(i)] = i +} -"use strict"; +// Support decoding URL-safe base64 strings, as Node.js does. +// See: https://en.wikipedia.org/wiki/Base64#URL_applications +revLookup['-'.charCodeAt(0)] = 62 +revLookup['_'.charCodeAt(0)] = 63 -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getInstanceMetadataEndpoint = void 0; -const node_config_provider_1 = __nccwpck_require__(33461); -const url_parser_1 = __nccwpck_require__(14681); -const Endpoint_1 = __nccwpck_require__(18044); -const EndpointConfigOptions_1 = __nccwpck_require__(57342); -const EndpointMode_1 = __nccwpck_require__(80991); -const EndpointModeConfigOptions_1 = __nccwpck_require__(88337); -const getInstanceMetadataEndpoint = async () => (0, url_parser_1.parseUrl)((await getFromEndpointConfig()) || (await getFromEndpointModeConfig())); -exports.getInstanceMetadataEndpoint = getInstanceMetadataEndpoint; -const getFromEndpointConfig = async () => (0, node_config_provider_1.loadConfig)(EndpointConfigOptions_1.ENDPOINT_CONFIG_OPTIONS)(); -const getFromEndpointModeConfig = async () => { - const endpointMode = await (0, node_config_provider_1.loadConfig)(EndpointModeConfigOptions_1.ENDPOINT_MODE_CONFIG_OPTIONS)(); - switch (endpointMode) { - case EndpointMode_1.EndpointMode.IPv4: - return Endpoint_1.Endpoint.IPv4; - case EndpointMode_1.EndpointMode.IPv6: - return Endpoint_1.Endpoint.IPv6; - default: - throw new Error(`Unsupported endpoint mode: ${endpointMode}.` + ` Select from ${Object.values(EndpointMode_1.EndpointMode)}`); - } -}; +function getLens (b64) { + var len = b64.length + if (len % 4 > 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } -/***/ }), + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len -/***/ 74035: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) -"use strict"; + return [validLen, placeHoldersLen] +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.staticStabilityProvider = void 0; -const getExtendedInstanceMetadataCredentials_1 = __nccwpck_require__(22666); -const staticStabilityProvider = (provider, options = {}) => { - const logger = (options === null || options === void 0 ? void 0 : options.logger) || console; - let pastCredentials; - return async () => { - let credentials; - try { - credentials = await provider(); - if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { - credentials = (0, getExtendedInstanceMetadataCredentials_1.getExtendedInstanceMetadataCredentials)(credentials, logger); - } - } - catch (e) { - if (pastCredentials) { - logger.warn("Credential renew failed: ", e); - credentials = (0, getExtendedInstanceMetadataCredentials_1.getExtendedInstanceMetadataCredentials)(pastCredentials, logger); - } - else { - throw e; - } - } - pastCredentials = credentials; - return credentials; - }; -}; -exports.staticStabilityProvider = staticStabilityProvider; +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} -/***/ }), +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] -/***/ 11014: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) -"use strict"; + var curByte = 0 -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.EventStreamCodec = void 0; -const crc32_1 = __nccwpck_require__(47327); -const HeaderMarshaller_1 = __nccwpck_require__(74712); -const splitMessage_1 = __nccwpck_require__(20597); -class EventStreamCodec { - constructor(toUtf8, fromUtf8) { - this.headerMarshaller = new HeaderMarshaller_1.HeaderMarshaller(toUtf8, fromUtf8); - this.messageBuffer = []; - this.isEndOfStream = false; - } - feed(message) { - this.messageBuffer.push(this.decode(message)); - } - endOfStream() { - this.isEndOfStream = true; - } - getMessage() { - const message = this.messageBuffer.pop(); - const isEndOfStream = this.isEndOfStream; - return { - getMessage() { - return message; - }, - isEndOfStream() { - return isEndOfStream; - }, - }; - } - getAvailableMessages() { - const messages = this.messageBuffer; - this.messageBuffer = []; - const isEndOfStream = this.isEndOfStream; - return { - getMessages() { - return messages; - }, - isEndOfStream() { - return isEndOfStream; - }, - }; - } - encode({ headers: rawHeaders, body }) { - const headers = this.headerMarshaller.format(rawHeaders); - const length = headers.byteLength + body.byteLength + 16; - const out = new Uint8Array(length); - const view = new DataView(out.buffer, out.byteOffset, out.byteLength); - const checksum = new crc32_1.Crc32(); - view.setUint32(0, length, false); - view.setUint32(4, headers.byteLength, false); - view.setUint32(8, checksum.update(out.subarray(0, 8)).digest(), false); - out.set(headers, 12); - out.set(body, headers.byteLength + 12); - view.setUint32(length - 4, checksum.update(out.subarray(8, length - 4)).digest(), false); - return out; - } - decode(message) { - const { headers, body } = (0, splitMessage_1.splitMessage)(message); - return { headers: this.headerMarshaller.parse(headers), body }; - } - formatHeaders(rawHeaders) { - return this.headerMarshaller.format(rawHeaders); - } + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr } -exports.EventStreamCodec = EventStreamCodec; +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} -/***/ }), +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} -/***/ 74712: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 -"use strict"; + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HeaderMarshaller = void 0; -const util_hex_encoding_1 = __nccwpck_require__(45364); -const Int64_1 = __nccwpck_require__(46086); -class HeaderMarshaller { - constructor(toUtf8, fromUtf8) { - this.toUtf8 = toUtf8; - this.fromUtf8 = fromUtf8; - } - format(headers) { - const chunks = []; - for (const headerName of Object.keys(headers)) { - const bytes = this.fromUtf8(headerName); - chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); - } - const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); - let position = 0; - for (const chunk of chunks) { - out.set(chunk, position); - position += chunk.byteLength; - } - return out; - } - formatHeaderValue(header) { - switch (header.type) { - case "boolean": - return Uint8Array.from([header.value ? 0 : 1]); - case "byte": - return Uint8Array.from([2, header.value]); - case "short": - const shortView = new DataView(new ArrayBuffer(3)); - shortView.setUint8(0, 3); - shortView.setInt16(1, header.value, false); - return new Uint8Array(shortView.buffer); - case "integer": - const intView = new DataView(new ArrayBuffer(5)); - intView.setUint8(0, 4); - intView.setInt32(1, header.value, false); - return new Uint8Array(intView.buffer); - case "long": - const longBytes = new Uint8Array(9); - longBytes[0] = 5; - longBytes.set(header.value.bytes, 1); - return longBytes; - case "binary": - const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); - binView.setUint8(0, 6); - binView.setUint16(1, header.value.byteLength, false); - const binBytes = new Uint8Array(binView.buffer); - binBytes.set(header.value, 3); - return binBytes; - case "string": - const utf8Bytes = this.fromUtf8(header.value); - const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); - strView.setUint8(0, 7); - strView.setUint16(1, utf8Bytes.byteLength, false); - const strBytes = new Uint8Array(strView.buffer); - strBytes.set(utf8Bytes, 3); - return strBytes; - case "timestamp": - const tsBytes = new Uint8Array(9); - tsBytes[0] = 8; - tsBytes.set(Int64_1.Int64.fromNumber(header.value.valueOf()).bytes, 1); - return tsBytes; - case "uuid": - if (!UUID_PATTERN.test(header.value)) { - throw new Error(`Invalid UUID received: ${header.value}`); - } - const uuidBytes = new Uint8Array(17); - uuidBytes[0] = 9; - uuidBytes.set((0, util_hex_encoding_1.fromHex)(header.value.replace(/\-/g, "")), 1); - return uuidBytes; - } - } - parse(headers) { - const out = {}; - let position = 0; - while (position < headers.byteLength) { - const nameLength = headers.getUint8(position++); - const name = this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, nameLength)); - position += nameLength; - switch (headers.getUint8(position++)) { - case 0: - out[name] = { - type: BOOLEAN_TAG, - value: true, - }; - break; - case 1: - out[name] = { - type: BOOLEAN_TAG, - value: false, - }; - break; - case 2: - out[name] = { - type: BYTE_TAG, - value: headers.getInt8(position++), - }; - break; - case 3: - out[name] = { - type: SHORT_TAG, - value: headers.getInt16(position, false), - }; - position += 2; - break; - case 4: - out[name] = { - type: INT_TAG, - value: headers.getInt32(position, false), - }; - position += 4; - break; - case 5: - out[name] = { - type: LONG_TAG, - value: new Int64_1.Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)), - }; - position += 8; - break; - case 6: - const binaryLength = headers.getUint16(position, false); - position += 2; - out[name] = { - type: BINARY_TAG, - value: new Uint8Array(headers.buffer, headers.byteOffset + position, binaryLength), - }; - position += binaryLength; - break; - case 7: - const stringLength = headers.getUint16(position, false); - position += 2; - out[name] = { - type: STRING_TAG, - value: this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, stringLength)), - }; - position += stringLength; - break; - case 8: - out[name] = { - type: TIMESTAMP_TAG, - value: new Date(new Int64_1.Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)).valueOf()), - }; - position += 8; - break; - case 9: - const uuidBytes = new Uint8Array(headers.buffer, headers.byteOffset + position, 16); - position += 16; - out[name] = { - type: UUID_TAG, - value: `${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(0, 4))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(4, 6))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(6, 8))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(8, 10))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(10))}`, - }; - break; - default: - throw new Error(`Unrecognized header type tag`); - } - } - return out; - } + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') } -exports.HeaderMarshaller = HeaderMarshaller; -var HEADER_VALUE_TYPE; -(function (HEADER_VALUE_TYPE) { - HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolTrue"] = 0] = "boolTrue"; - HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolFalse"] = 1] = "boolFalse"; - HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byte"] = 2] = "byte"; - HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["short"] = 3] = "short"; - HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["integer"] = 4] = "integer"; - HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["long"] = 5] = "long"; - HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byteArray"] = 6] = "byteArray"; - HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["string"] = 7] = "string"; - HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["timestamp"] = 8] = "timestamp"; - HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["uuid"] = 9] = "uuid"; -})(HEADER_VALUE_TYPE || (HEADER_VALUE_TYPE = {})); -const BOOLEAN_TAG = "boolean"; -const BYTE_TAG = "byte"; -const SHORT_TAG = "short"; -const INT_TAG = "integer"; -const LONG_TAG = "long"; -const BINARY_TAG = "binary"; -const STRING_TAG = "string"; -const TIMESTAMP_TAG = "timestamp"; -const UUID_TAG = "uuid"; -const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; /***/ }), -/***/ 46086: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 83682: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var register = __nccwpck_require__(44670); +var addHook = __nccwpck_require__(5549); +var removeHook = __nccwpck_require__(6819); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Int64 = void 0; -const util_hex_encoding_1 = __nccwpck_require__(45364); -class Int64 { - constructor(bytes) { - this.bytes = bytes; - if (bytes.byteLength !== 8) { - throw new Error("Int64 buffers must be exactly 8 bytes"); - } - } - static fromNumber(number) { - if (number > 9223372036854776000 || number < -9223372036854776000) { - throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); - } - const bytes = new Uint8Array(8); - for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { - bytes[i] = remaining; - } - if (number < 0) { - negate(bytes); - } - return new Int64(bytes); - } - valueOf() { - const bytes = this.bytes.slice(0); - const negative = bytes[0] & 0b10000000; - if (negative) { - negate(bytes); - } - return parseInt((0, util_hex_encoding_1.toHex)(bytes), 16) * (negative ? -1 : 1); - } - toString() { - return String(this.valueOf()); - } -} -exports.Int64 = Int64; -function negate(bytes) { - for (let i = 0; i < 8; i++) { - bytes[i] ^= 0xff; - } - for (let i = 7; i > -1; i--) { - bytes[i]++; - if (bytes[i] !== 0) - break; - } +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind; +var bindable = bind.bind(bind); + +function bindApi(hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); } +function HookSingular() { + var singularHookName = "h"; + var singularHookState = { + registry: {}, + }; + var singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; +} -/***/ }), +function HookCollection() { + var state = { + registry: {}, + }; -/***/ 73684: -/***/ ((__unused_webpack_module, exports) => { + var hook = register.bind(null, state); + bindApi(hook, state); -"use strict"; + return hook; +} -Object.defineProperty(exports, "__esModule", ({ value: true })); +var collectionHookDeprecationMessageDisplayed = false; +function Hook() { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn( + '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' + ); + collectionHookDeprecationMessageDisplayed = true; + } + return HookCollection(); +} +Hook.Singular = HookSingular.bind(); +Hook.Collection = HookCollection.bind(); -/***/ }), +module.exports = Hook; +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook; +module.exports.Singular = Hook.Singular; +module.exports.Collection = Hook.Collection; -/***/ 57255: -/***/ ((__unused_webpack_module, exports) => { -"use strict"; +/***/ }), -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MessageDecoderStream = void 0; -class MessageDecoderStream { - constructor(options) { - this.options = options; - } - [Symbol.asyncIterator]() { - return this.asyncIterator(); - } - async *asyncIterator() { - for await (const bytes of this.options.inputStream) { - const decoded = this.options.decoder.decode(bytes); - yield decoded; - } - } -} -exports.MessageDecoderStream = MessageDecoderStream; +/***/ 5549: +/***/ ((module) => { +module.exports = addHook; -/***/ }), +function addHook(state, kind, name, hook) { + var orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; + } -/***/ 52362: -/***/ ((__unused_webpack_module, exports) => { + if (kind === "before") { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)); + }; + } -"use strict"; + if (kind === "after") { + hook = function (method, options) { + var result; + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_; + return orig(result, options); + }) + .then(function () { + return result; + }); + }; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MessageEncoderStream = void 0; -class MessageEncoderStream { - constructor(options) { - this.options = options; - } - [Symbol.asyncIterator]() { - return this.asyncIterator(); - } - async *asyncIterator() { - for await (const msg of this.options.messageStream) { - const encoded = this.options.encoder.encode(msg); - yield encoded; - } - if (this.options.includeEndFrame) { - yield new Uint8Array(0); - } - } + if (kind === "error") { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options); + }); + }; + } + + state.registry[name].push({ + hook: hook, + orig: orig, + }); } -exports.MessageEncoderStream = MessageEncoderStream; /***/ }), -/***/ 62379: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SmithyMessageDecoderStream = void 0; -class SmithyMessageDecoderStream { - constructor(options) { - this.options = options; - } - [Symbol.asyncIterator]() { - return this.asyncIterator(); - } - async *asyncIterator() { - for await (const message of this.options.messageStream) { - const deserialized = await this.options.deserializer(message); - if (deserialized === undefined) - continue; - yield deserialized; - } - } -} -exports.SmithyMessageDecoderStream = SmithyMessageDecoderStream; +/***/ 44670: +/***/ ((module) => { +module.exports = register; -/***/ }), +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); + } -/***/ 12484: -/***/ ((__unused_webpack_module, exports) => { + if (!options) { + options = {}; + } -"use strict"; + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options); + }, method)(); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SmithyMessageEncoderStream = void 0; -class SmithyMessageEncoderStream { - constructor(options) { - this.options = options; - } - [Symbol.asyncIterator]() { - return this.asyncIterator(); - } - async *asyncIterator() { - for await (const chunk of this.options.inputStream) { - const payloadBuf = this.options.serializer(chunk); - yield payloadBuf; - } + return Promise.resolve().then(function () { + if (!state.registry[name]) { + return method(options); } + + return state.registry[name].reduce(function (method, registered) { + return registered.hook.bind(null, method, options); + }, method)(); + }); } -exports.SmithyMessageEncoderStream = SmithyMessageEncoderStream; /***/ }), -/***/ 56459: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(11014), exports); -tslib_1.__exportStar(__nccwpck_require__(74712), exports); -tslib_1.__exportStar(__nccwpck_require__(46086), exports); -tslib_1.__exportStar(__nccwpck_require__(73684), exports); -tslib_1.__exportStar(__nccwpck_require__(57255), exports); -tslib_1.__exportStar(__nccwpck_require__(52362), exports); -tslib_1.__exportStar(__nccwpck_require__(62379), exports); -tslib_1.__exportStar(__nccwpck_require__(12484), exports); +/***/ 6819: +/***/ ((module) => { +module.exports = removeHook; -/***/ }), +function removeHook(state, name, method) { + if (!state.registry[name]) { + return; + } -/***/ 20597: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var index = state.registry[name] + .map(function (registered) { + return registered.orig; + }) + .indexOf(method); -"use strict"; + if (index === -1) { + return; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.splitMessage = void 0; -const crc32_1 = __nccwpck_require__(47327); -const PRELUDE_MEMBER_LENGTH = 4; -const PRELUDE_LENGTH = PRELUDE_MEMBER_LENGTH * 2; -const CHECKSUM_LENGTH = 4; -const MINIMUM_MESSAGE_LENGTH = PRELUDE_LENGTH + CHECKSUM_LENGTH * 2; -function splitMessage({ byteLength, byteOffset, buffer }) { - if (byteLength < MINIMUM_MESSAGE_LENGTH) { - throw new Error("Provided message too short to accommodate event stream message overhead"); - } - const view = new DataView(buffer, byteOffset, byteLength); - const messageLength = view.getUint32(0, false); - if (byteLength !== messageLength) { - throw new Error("Reported message length does not match received message length"); - } - const headerLength = view.getUint32(PRELUDE_MEMBER_LENGTH, false); - const expectedPreludeChecksum = view.getUint32(PRELUDE_LENGTH, false); - const expectedMessageChecksum = view.getUint32(byteLength - CHECKSUM_LENGTH, false); - const checksummer = new crc32_1.Crc32().update(new Uint8Array(buffer, byteOffset, PRELUDE_LENGTH)); - if (expectedPreludeChecksum !== checksummer.digest()) { - throw new Error(`The prelude checksum specified in the message (${expectedPreludeChecksum}) does not match the calculated CRC32 checksum (${checksummer.digest()})`); - } - checksummer.update(new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH, byteLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH))); - if (expectedMessageChecksum !== checksummer.digest()) { - throw new Error(`The message checksum (${checksummer.digest()}) did not match the expected value of ${expectedMessageChecksum}`); - } - return { - headers: new DataView(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH, headerLength), - body: new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH + headerLength, messageLength - headerLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH + CHECKSUM_LENGTH)), - }; + state.registry[name].splice(index, 1); } -exports.splitMessage = splitMessage; /***/ }), -/***/ 33193: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; +/***/ 87558: +/***/ (function(module) { -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveEventStreamSerdeConfig = void 0; -const resolveEventStreamSerdeConfig = (input) => ({ - ...input, - eventStreamMarshaller: input.eventStreamSerdeProvider(input), -}); -exports.resolveEventStreamSerdeConfig = resolveEventStreamSerdeConfig; +;(function (globalObject) { + 'use strict'; + +/* + * bignumber.js v9.1.2 + * A JavaScript library for arbitrary-precision arithmetic. + * https://github.com/MikeMcl/bignumber.js + * Copyright (c) 2022 Michael Mclaughlin + * MIT Licensed. + * + * BigNumber.prototype methods | BigNumber methods + * | + * absoluteValue abs | clone + * comparedTo | config set + * decimalPlaces dp | DECIMAL_PLACES + * dividedBy div | ROUNDING_MODE + * dividedToIntegerBy idiv | EXPONENTIAL_AT + * exponentiatedBy pow | RANGE + * integerValue | CRYPTO + * isEqualTo eq | MODULO_MODE + * isFinite | POW_PRECISION + * isGreaterThan gt | FORMAT + * isGreaterThanOrEqualTo gte | ALPHABET + * isInteger | isBigNumber + * isLessThan lt | maximum max + * isLessThanOrEqualTo lte | minimum min + * isNaN | random + * isNegative | sum + * isPositive | + * isZero | + * minus | + * modulo mod | + * multipliedBy times | + * negated | + * plus | + * precision sd | + * shiftedBy | + * squareRoot sqrt | + * toExponential | + * toFixed | + * toFormat | + * toFraction | + * toJSON | + * toNumber | + * toPrecision | + * toString | + * valueOf | + * + */ + + + var BigNumber, + isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, + mathceil = Math.ceil, + mathfloor = Math.floor, + + bignumberError = '[BigNumber Error] ', + tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', + + BASE = 1e14, + LOG_BASE = 14, + MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 + // MAX_INT32 = 0x7fffffff, // 2^31 - 1 + POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], + SQRT_BASE = 1e7, + + // EDITABLE + // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and + // the arguments to toExponential, toFixed, toFormat, and toPrecision. + MAX = 1E9; // 0 to MAX_INT32 + + + /* + * Create and return a BigNumber constructor. + */ + function clone(configObject) { + var div, convertBase, parseNumeric, + P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, + ONE = new BigNumber(1), + + + //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- + + + // The default values below must be integers within the inclusive ranges stated. + // The values can also be changed at run-time using BigNumber.set. + + // The maximum number of decimal places for operations involving division. + DECIMAL_PLACES = 20, // 0 to MAX + + // The rounding mode used when rounding to the above decimal places, and when using + // toExponential, toFixed, toFormat and toPrecision, and round (default value). + // UP 0 Away from zero. + // DOWN 1 Towards zero. + // CEIL 2 Towards +Infinity. + // FLOOR 3 Towards -Infinity. + // HALF_UP 4 Towards nearest neighbour. If equidistant, up. + // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. + // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. + // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. + // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. + ROUNDING_MODE = 4, // 0 to 8 + + // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] + + // The exponent value at and beneath which toString returns exponential notation. + // Number type: -7 + TO_EXP_NEG = -7, // 0 to -MAX + + // The exponent value at and above which toString returns exponential notation. + // Number type: 21 + TO_EXP_POS = 21, // 0 to MAX + + // RANGE : [MIN_EXP, MAX_EXP] + + // The minimum exponent value, beneath which underflow to zero occurs. + // Number type: -324 (5e-324) + MIN_EXP = -1e7, // -1 to -MAX + + // The maximum exponent value, above which overflow to Infinity occurs. + // Number type: 308 (1.7976931348623157e+308) + // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. + MAX_EXP = 1e7, // 1 to MAX + + // Whether to use cryptographically-secure random number generation, if available. + CRYPTO = false, // true or false + + // The modulo mode used when calculating the modulus: a mod n. + // The quotient (q = a / n) is calculated according to the corresponding rounding mode. + // The remainder (r) is calculated as: r = a - n * q. + // + // UP 0 The remainder is positive if the dividend is negative, else is negative. + // DOWN 1 The remainder has the same sign as the dividend. + // This modulo mode is commonly known as 'truncated division' and is + // equivalent to (a % n) in JavaScript. + // FLOOR 3 The remainder has the same sign as the divisor (Python %). + // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. + // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). + // The remainder is always positive. + // + // The truncated division, floored division, Euclidian division and IEEE 754 remainder + // modes are commonly used for the modulus operation. + // Although the other rounding modes can also be used, they may not give useful results. + MODULO_MODE = 1, // 0 to 9 + + // The maximum number of significant digits of the result of the exponentiatedBy operation. + // If POW_PRECISION is 0, there will be unlimited significant digits. + POW_PRECISION = 0, // 0 to MAX + + // The format specification used by the BigNumber.prototype.toFormat method. + FORMAT = { + prefix: '', + groupSize: 3, + secondaryGroupSize: 0, + groupSeparator: ',', + decimalSeparator: '.', + fractionGroupSize: 0, + fractionGroupSeparator: '\xA0', // non-breaking space + suffix: '' + }, + + // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', + // '-', '.', whitespace, or repeated character. + // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', + alphabetHasNormalDecimalDigits = true; + + + //------------------------------------------------------------------------------------------ + + + // CONSTRUCTOR + + + /* + * The BigNumber constructor and exported function. + * Create and return a new instance of a BigNumber object. + * + * v {number|string|BigNumber} A numeric value. + * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. + */ + function BigNumber(v, b) { + var alphabet, c, caseChanged, e, i, isNum, len, str, + x = this; + + // Enable constructor call without `new`. + if (!(x instanceof BigNumber)) return new BigNumber(v, b); + + if (b == null) { + + if (v && v._isBigNumber === true) { + x.s = v.s; + + if (!v.c || v.e > MAX_EXP) { + x.c = x.e = null; + } else if (v.e < MIN_EXP) { + x.c = [x.e = 0]; + } else { + x.e = v.e; + x.c = v.c.slice(); + } + + return; + } + + if ((isNum = typeof v == 'number') && v * 0 == 0) { + + // Use `1 / n` to handle minus zero also. + x.s = 1 / v < 0 ? (v = -v, -1) : 1; + + // Fast path for integers, where n < 2147483648 (2**31). + if (v === ~~v) { + for (e = 0, i = v; i >= 10; i /= 10, e++); + + if (e > MAX_EXP) { + x.c = x.e = null; + } else { + x.e = e; + x.c = [v]; + } + + return; + } + + str = String(v); + } else { + + if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); + + x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; + } + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + + // Exponential form? + if ((i = str.search(/e/i)) > 0) { + + // Determine exponent. + if (e < 0) e = i; + e += +str.slice(i + 1); + str = str.substring(0, i); + } else if (e < 0) { + + // Integer. + e = str.length; + } + + } else { + + // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + intCheck(b, 2, ALPHABET.length, 'Base'); + + // Allow exponential notation to be used with base 10 argument, while + // also rounding to DECIMAL_PLACES as with other bases. + if (b == 10 && alphabetHasNormalDecimalDigits) { + x = new BigNumber(v); + return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); + } + + str = String(v); + + if (isNum = typeof v == 'number') { + + // Avoid potential interpretation of Infinity and NaN as base 44+ values. + if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + + x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { + throw Error + (tooManyDigits + v); + } + } else { + x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; + } + + alphabet = ALPHABET.slice(0, b); + e = i = 0; + + // Check that str is a valid base b number. + // Don't use RegExp, so alphabet can contain special characters. + for (len = str.length; i < len; i++) { + if (alphabet.indexOf(c = str.charAt(i)) < 0) { + if (c == '.') { + + // If '.' is not the first character and it has not be found before. + if (i > e) { + e = len; + continue; + } + } else if (!caseChanged) { + + // Allow e.g. hexadecimal 'FF' as well as 'ff'. + if (str == str.toUpperCase() && (str = str.toLowerCase()) || + str == str.toLowerCase() && (str = str.toUpperCase())) { + caseChanged = true; + i = -1; + e = 0; + continue; + } + } + + return parseNumeric(x, String(v), isNum, b); + } + } + + // Prevent later check for length on converted number. + isNum = false; + str = convertBase(str, b, 10, x.s); + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + else e = str.length; + } + + // Determine leading zeros. + for (i = 0; str.charCodeAt(i) === 48; i++); + + // Determine trailing zeros. + for (len = str.length; str.charCodeAt(--len) === 48;); + + if (str = str.slice(i, ++len)) { + len -= i; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (isNum && BigNumber.DEBUG && + len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { + throw Error + (tooManyDigits + (x.s * v)); + } + + // Overflow? + if ((e = e - i - 1) > MAX_EXP) { + + // Infinity. + x.c = x.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + x.c = [x.e = 0]; + } else { + x.e = e; + x.c = []; + + // Transform base + + // e is the base 10 exponent. + // i is where to slice str to get the first element of the coefficient array. + i = (e + 1) % LOG_BASE; + if (e < 0) i += LOG_BASE; // i < 1 + + if (i < len) { + if (i) x.c.push(+str.slice(0, i)); + + for (len -= LOG_BASE; i < len;) { + x.c.push(+str.slice(i, i += LOG_BASE)); + } + + i = LOG_BASE - (str = str.slice(i)).length; + } else { + i -= len; + } + + for (; i--; str += '0'); + x.c.push(+str); + } + } else { + + // Zero. + x.c = [x.e = 0]; + } + } + + + // CONSTRUCTOR PROPERTIES + + + BigNumber.clone = clone; + + BigNumber.ROUND_UP = 0; + BigNumber.ROUND_DOWN = 1; + BigNumber.ROUND_CEIL = 2; + BigNumber.ROUND_FLOOR = 3; + BigNumber.ROUND_HALF_UP = 4; + BigNumber.ROUND_HALF_DOWN = 5; + BigNumber.ROUND_HALF_EVEN = 6; + BigNumber.ROUND_HALF_CEIL = 7; + BigNumber.ROUND_HALF_FLOOR = 8; + BigNumber.EUCLID = 9; + + + /* + * Configure infrequently-changing library-wide settings. + * + * Accept an object with the following optional properties (if the value of a property is + * a number, it must be an integer within the inclusive range stated): + * + * DECIMAL_PLACES {number} 0 to MAX + * ROUNDING_MODE {number} 0 to 8 + * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] + * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] + * CRYPTO {boolean} true or false + * MODULO_MODE {number} 0 to 9 + * POW_PRECISION {number} 0 to MAX + * ALPHABET {string} A string of two or more unique characters which does + * not contain '.'. + * FORMAT {object} An object with some of the following properties: + * prefix {string} + * groupSize {number} + * secondaryGroupSize {number} + * groupSeparator {string} + * decimalSeparator {string} + * fractionGroupSize {number} + * fractionGroupSeparator {string} + * suffix {string} + * + * (The values assigned to the above FORMAT object properties are not checked for validity.) + * + * E.g. + * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) + * + * Ignore properties/parameters set to null or undefined, except for ALPHABET. + * + * Return an object with the properties current values. + */ + BigNumber.config = BigNumber.set = function (obj) { + var p, v; + + if (obj != null) { + + if (typeof obj == 'object') { + + // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + DECIMAL_PLACES = v; + } + + // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. + // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { + v = obj[p]; + intCheck(v, 0, 8, p); + ROUNDING_MODE = v; + } + + // EXPONENTIAL_AT {number|number[]} + // Integer, -MAX to MAX inclusive or + // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. + // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, 0, p); + intCheck(v[1], 0, MAX, p); + TO_EXP_NEG = v[0]; + TO_EXP_POS = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); + } + } + + // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or + // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. + // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' + if (obj.hasOwnProperty(p = 'RANGE')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, -1, p); + intCheck(v[1], 1, MAX, p); + MIN_EXP = v[0]; + MAX_EXP = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + if (v) { + MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); + } else { + throw Error + (bignumberError + p + ' cannot be zero: ' + v); + } + } + } + + // CRYPTO {boolean} true or false. + // '[BigNumber Error] CRYPTO not true or false: {v}' + // '[BigNumber Error] crypto unavailable' + if (obj.hasOwnProperty(p = 'CRYPTO')) { + v = obj[p]; + if (v === !!v) { + if (v) { + if (typeof crypto != 'undefined' && crypto && + (crypto.getRandomValues || crypto.randomBytes)) { + CRYPTO = v; + } else { + CRYPTO = !v; + throw Error + (bignumberError + 'crypto unavailable'); + } + } else { + CRYPTO = v; + } + } else { + throw Error + (bignumberError + p + ' not true or false: ' + v); + } + } + + // MODULO_MODE {number} Integer, 0 to 9 inclusive. + // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'MODULO_MODE')) { + v = obj[p]; + intCheck(v, 0, 9, p); + MODULO_MODE = v; + } + + // POW_PRECISION {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'POW_PRECISION')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + POW_PRECISION = v; + } + + // FORMAT {object} + // '[BigNumber Error] FORMAT not an object: {v}' + if (obj.hasOwnProperty(p = 'FORMAT')) { + v = obj[p]; + if (typeof v == 'object') FORMAT = v; + else throw Error + (bignumberError + p + ' not an object: ' + v); + } + + // ALPHABET {string} + // '[BigNumber Error] ALPHABET invalid: {v}' + if (obj.hasOwnProperty(p = 'ALPHABET')) { + v = obj[p]; + + // Disallow if less than two characters, + // or if it contains '+', '-', '.', whitespace, or a repeated character. + if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { + alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; + ALPHABET = v; + } else { + throw Error + (bignumberError + p + ' invalid: ' + v); + } + } + + } else { + + // '[BigNumber Error] Object expected: {v}' + throw Error + (bignumberError + 'Object expected: ' + obj); + } + } + + return { + DECIMAL_PLACES: DECIMAL_PLACES, + ROUNDING_MODE: ROUNDING_MODE, + EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], + RANGE: [MIN_EXP, MAX_EXP], + CRYPTO: CRYPTO, + MODULO_MODE: MODULO_MODE, + POW_PRECISION: POW_PRECISION, + FORMAT: FORMAT, + ALPHABET: ALPHABET + }; + }; + + + /* + * Return true if v is a BigNumber instance, otherwise return false. + * + * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. + * + * v {any} + * + * '[BigNumber Error] Invalid BigNumber: {v}' + */ + BigNumber.isBigNumber = function (v) { + if (!v || v._isBigNumber !== true) return false; + if (!BigNumber.DEBUG) return true; + + var i, n, + c = v.c, + e = v.e, + s = v.s; + + out: if ({}.toString.call(c) == '[object Array]') { + + if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { + + // If the first element is zero, the BigNumber value must be zero. + if (c[0] === 0) { + if (e === 0 && c.length === 1) return true; + break out; + } + + // Calculate number of digits that c[0] should have, based on the exponent. + i = (e + 1) % LOG_BASE; + if (i < 1) i += LOG_BASE; + + // Calculate number of digits of c[0]. + //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { + if (String(c[0]).length == i) { + + for (i = 0; i < c.length; i++) { + n = c[i]; + if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; + } + + // Last element cannot be zero, unless it is the only element. + if (n !== 0) return true; + } + } + + // Infinity/NaN + } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { + return true; + } + + throw Error + (bignumberError + 'Invalid BigNumber: ' + v); + }; + + + /* + * Return a new BigNumber whose value is the maximum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.maximum = BigNumber.max = function () { + return maxOrMin(arguments, -1); + }; + + + /* + * Return a new BigNumber whose value is the minimum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.minimum = BigNumber.min = function () { + return maxOrMin(arguments, 1); + }; + + + /* + * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, + * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing + * zeros are produced). + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' + * '[BigNumber Error] crypto unavailable' + */ + BigNumber.random = (function () { + var pow2_53 = 0x20000000000000; + + // Return a 53 bit integer n, where 0 <= n < 9007199254740992. + // Check if Math.random() produces more than 32 bits of randomness. + // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. + // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. + var random53bitInt = (Math.random() * pow2_53) & 0x1fffff + ? function () { return mathfloor(Math.random() * pow2_53); } + : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + + (Math.random() * 0x800000 | 0); }; + + return function (dp) { + var a, b, e, k, v, + i = 0, + c = [], + rand = new BigNumber(ONE); + + if (dp == null) dp = DECIMAL_PLACES; + else intCheck(dp, 0, MAX); + + k = mathceil(dp / LOG_BASE); + + if (CRYPTO) { + + // Browsers supporting crypto.getRandomValues. + if (crypto.getRandomValues) { + + a = crypto.getRandomValues(new Uint32Array(k *= 2)); + + for (; i < k;) { + + // 53 bits: + // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) + // 11111 11111111 11111111 11111111 11100000 00000000 00000000 + // ((Math.pow(2, 32) - 1) >>> 11).toString(2) + // 11111 11111111 11111111 + // 0x20000 is 2^21. + v = a[i] * 0x20000 + (a[i + 1] >>> 11); + + // Rejection sampling: + // 0 <= v < 9007199254740992 + // Probability that v >= 9e15, is + // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 + if (v >= 9e15) { + b = crypto.getRandomValues(new Uint32Array(2)); + a[i] = b[0]; + a[i + 1] = b[1]; + } else { + + // 0 <= v <= 8999999999999999 + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 2; + } + } + i = k / 2; + + // Node.js supporting crypto.randomBytes. + } else if (crypto.randomBytes) { + + // buffer + a = crypto.randomBytes(k *= 7); + + for (; i < k;) { + + // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 + // 0x100000000 is 2^32, 0x1000000 is 2^24 + // 11111 11111111 11111111 11111111 11111111 11111111 11111111 + // 0 <= v < 9007199254740992 + v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + + if (v >= 9e15) { + crypto.randomBytes(7).copy(a, i); + } else { + + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 7; + } + } + i = k / 7; + } else { + CRYPTO = false; + throw Error + (bignumberError + 'crypto unavailable'); + } + } + + // Use Math.random. + if (!CRYPTO) { + + for (; i < k;) { + v = random53bitInt(); + if (v < 9e15) c[i++] = v % 1e14; + } + } + + k = c[--i]; + dp %= LOG_BASE; + + // Convert trailing digits to zeros according to dp. + if (k && dp) { + v = POWS_TEN[LOG_BASE - dp]; + c[i] = mathfloor(k / v) * v; + } + + // Remove trailing elements which are zero. + for (; c[i] === 0; c.pop(), i--); + + // Zero? + if (i < 0) { + c = [e = 0]; + } else { + + // Remove leading elements which are zero and adjust exponent accordingly. + for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); + + // Count the digits of the first element of c to determine leading zeros, and... + for (i = 1, v = c[0]; v >= 10; v /= 10, i++); + + // adjust the exponent accordingly. + if (i < LOG_BASE) e -= LOG_BASE - i; + } + + rand.e = e; + rand.c = c; + return rand; + }; + })(); + + + /* + * Return a BigNumber whose value is the sum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.sum = function () { + var i = 1, + args = arguments, + sum = new BigNumber(args[0]); + for (; i < args.length;) sum = sum.plus(args[i++]); + return sum; + }; + + + // PRIVATE FUNCTIONS + + + // Called by BigNumber and BigNumber.prototype.toString. + convertBase = (function () { + var decimal = '0123456789'; + + /* + * Convert string of baseIn to an array of numbers of baseOut. + * Eg. toBaseOut('255', 10, 16) returns [15, 15]. + * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. + */ + function toBaseOut(str, baseIn, baseOut, alphabet) { + var j, + arr = [0], + arrL, + i = 0, + len = str.length; + + for (; i < len;) { + for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); + + arr[0] += alphabet.indexOf(str.charAt(i++)); + + for (j = 0; j < arr.length; j++) { + + if (arr[j] > baseOut - 1) { + if (arr[j + 1] == null) arr[j + 1] = 0; + arr[j + 1] += arr[j] / baseOut | 0; + arr[j] %= baseOut; + } + } + } + + return arr.reverse(); + } + + // Convert a numeric string of baseIn to a numeric string of baseOut. + // If the caller is toString, we are converting from base 10 to baseOut. + // If the caller is BigNumber, we are converting from baseIn to base 10. + return function (str, baseIn, baseOut, sign, callerIsToString) { + var alphabet, d, e, k, r, x, xc, y, + i = str.indexOf('.'), + dp = DECIMAL_PLACES, + rm = ROUNDING_MODE; + + // Non-integer. + if (i >= 0) { + k = POW_PRECISION; + + // Unlimited precision. + POW_PRECISION = 0; + str = str.replace('.', ''); + y = new BigNumber(baseIn); + x = y.pow(str.length - i); + POW_PRECISION = k; + + // Convert str as if an integer, then restore the fraction part by dividing the + // result by its base raised to a power. + + y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), + 10, baseOut, decimal); + y.e = y.c.length; + } + + // Convert the number as integer. + + xc = toBaseOut(str, baseIn, baseOut, callerIsToString + ? (alphabet = ALPHABET, decimal) + : (alphabet = decimal, ALPHABET)); + + // xc now represents str as an integer and converted to baseOut. e is the exponent. + e = k = xc.length; + + // Remove trailing zeros. + for (; xc[--k] == 0; xc.pop()); + + // Zero? + if (!xc[0]) return alphabet.charAt(0); + + // Does str represent an integer? If so, no need for the division. + if (i < 0) { + --e; + } else { + x.c = xc; + x.e = e; + + // The sign is needed for correct rounding. + x.s = sign; + x = div(x, y, dp, rm, baseOut); + xc = x.c; + r = x.r; + e = x.e; + } + + // xc now represents str converted to baseOut. + + // THe index of the rounding digit. + d = e + dp + 1; + + // The rounding digit: the digit to the right of the digit that may be rounded up. + i = xc[d]; + + // Look at the rounding digits and mode to determine whether to round up. + + k = baseOut / 2; + r = r || d < 0 || xc[d + 1] != null; + + r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || + rm == (x.s < 0 ? 8 : 7)); + + // If the index of the rounding digit is not greater than zero, or xc represents + // zero, then the result of the base conversion is zero or, if rounding up, a value + // such as 0.00001. + if (d < 1 || !xc[0]) { + + // 1^-dp or 0 + str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + } else { + + // Truncate xc to the required number of decimal places. + xc.length = d; + + // Round up? + if (r) { + + // Rounding up may mean the previous digit has to be rounded up and so on. + for (--baseOut; ++xc[--d] > baseOut;) { + xc[d] = 0; + + if (!d) { + ++e; + xc = [1].concat(xc); + } + } + } + + // Determine trailing zeros. + for (k = xc.length; !xc[--k];); + + // E.g. [4, 11, 15] becomes 4bf. + for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); + + // Add leading zeros, decimal point and trailing zeros as required. + str = toFixedPoint(str, e, alphabet.charAt(0)); + } + + // The caller will add the sign. + return str; + }; + })(); + + + // Perform division in the specified base. Called by div and convertBase. + div = (function () { + + // Assume non-zero x and k. + function multiply(x, k, base) { + var m, temp, xlo, xhi, + carry = 0, + i = x.length, + klo = k % SQRT_BASE, + khi = k / SQRT_BASE | 0; + + for (x = x.slice(); i--;) { + xlo = x[i] % SQRT_BASE; + xhi = x[i] / SQRT_BASE | 0; + m = khi * xlo + xhi * klo; + temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; + carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; + x[i] = temp % base; + } + + if (carry) x = [carry].concat(x); + + return x; + } + + function compare(a, b, aL, bL) { + var i, cmp; + + if (aL != bL) { + cmp = aL > bL ? 1 : -1; + } else { + + for (i = cmp = 0; i < aL; i++) { + + if (a[i] != b[i]) { + cmp = a[i] > b[i] ? 1 : -1; + break; + } + } + } + + return cmp; + } + + function subtract(a, b, aL, base) { + var i = 0; + + // Subtract b from a. + for (; aL--;) { + a[aL] -= i; + i = a[aL] < b[aL] ? 1 : 0; + a[aL] = i * base + a[aL] - b[aL]; + } + + // Remove leading zeros. + for (; !a[0] && a.length > 1; a.splice(0, 1)); + } + + // x: dividend, y: divisor. + return function (x, y, dp, rm, base) { + var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, + yL, yz, + s = x.s == y.s ? 1 : -1, + xc = x.c, + yc = y.c; + + // Either NaN, Infinity or 0? + if (!xc || !xc[0] || !yc || !yc[0]) { + + return new BigNumber( + + // Return NaN if either NaN, or both Infinity or 0. + !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : + + // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. + xc && xc[0] == 0 || !yc ? s * 0 : s / 0 + ); + } + + q = new BigNumber(s); + qc = q.c = []; + e = x.e - y.e; + s = dp + e + 1; + + if (!base) { + base = BASE; + e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); + s = s / LOG_BASE | 0; + } + + // Result exponent may be one less then the current value of e. + // The coefficients of the BigNumbers from convertBase may have trailing zeros. + for (i = 0; yc[i] == (xc[i] || 0); i++); + + if (yc[i] > (xc[i] || 0)) e--; + + if (s < 0) { + qc.push(1); + more = true; + } else { + xL = xc.length; + yL = yc.length; + i = 0; + s += 2; + + // Normalise xc and yc so highest order digit of yc is >= base / 2. + + n = mathfloor(base / (yc[0] + 1)); + + // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. + // if (n > 1 || n++ == 1 && yc[0] < base / 2) { + if (n > 1) { + yc = multiply(yc, n, base); + xc = multiply(xc, n, base); + yL = yc.length; + xL = xc.length; + } + + xi = yL; + rem = xc.slice(0, yL); + remL = rem.length; + + // Add zeros to make remainder as long as divisor. + for (; remL < yL; rem[remL++] = 0); + yz = yc.slice(); + yz = [0].concat(yz); + yc0 = yc[0]; + if (yc[1] >= base / 2) yc0++; + // Not necessary, but to prevent trial digit n > base, when using base 3. + // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; + + do { + n = 0; + + // Compare divisor and remainder. + cmp = compare(yc, rem, yL, remL); + + // If divisor < remainder. + if (cmp < 0) { + + // Calculate trial digit, n. + + rem0 = rem[0]; + if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); + + // n is how many times the divisor goes into the current remainder. + n = mathfloor(rem0 / yc0); + + // Algorithm: + // product = divisor multiplied by trial digit (n). + // Compare product and remainder. + // If product is greater than remainder: + // Subtract divisor from product, decrement trial digit. + // Subtract product from remainder. + // If product was less than remainder at the last compare: + // Compare new remainder and divisor. + // If remainder is greater than divisor: + // Subtract divisor from remainder, increment trial digit. + + if (n > 1) { + + // n may be > base only when base is 3. + if (n >= base) n = base - 1; + + // product = divisor * trial digit. + prod = multiply(yc, n, base); + prodL = prod.length; + remL = rem.length; + + // Compare product and remainder. + // If product > remainder then trial digit n too high. + // n is 1 too high about 5% of the time, and is not known to have + // ever been more than 1 too high. + while (compare(prod, rem, prodL, remL) == 1) { + n--; + + // Subtract divisor from product. + subtract(prod, yL < prodL ? yz : yc, prodL, base); + prodL = prod.length; + cmp = 1; + } + } else { + + // n is 0 or 1, cmp is -1. + // If n is 0, there is no need to compare yc and rem again below, + // so change cmp to 1 to avoid it. + // If n is 1, leave cmp as -1, so yc and rem are compared again. + if (n == 0) { + + // divisor < remainder, so n must be at least 1. + cmp = n = 1; + } + + // product = divisor + prod = yc.slice(); + prodL = prod.length; + } + + if (prodL < remL) prod = [0].concat(prod); + + // Subtract product from remainder. + subtract(rem, prod, remL, base); + remL = rem.length; + + // If product was < remainder. + if (cmp == -1) { + + // Compare divisor and new remainder. + // If divisor < new remainder, subtract divisor from remainder. + // Trial digit n too low. + // n is 1 too low about 5% of the time, and very rarely 2 too low. + while (compare(yc, rem, yL, remL) < 1) { + n++; + + // Subtract divisor from remainder. + subtract(rem, yL < remL ? yz : yc, remL, base); + remL = rem.length; + } + } + } else if (cmp === 0) { + n++; + rem = [0]; + } // else cmp === 1 and n will be 0 + + // Add the next digit, n, to the result array. + qc[i++] = n; + + // Update the remainder. + if (rem[0]) { + rem[remL++] = xc[xi] || 0; + } else { + rem = [xc[xi]]; + remL = 1; + } + } while ((xi++ < xL || rem[0] != null) && s--); + + more = rem[0] != null; + + // Leading zero? + if (!qc[0]) qc.splice(0, 1); + } + + if (base == BASE) { + + // To calculate q.e, first get the number of digits of qc[0]. + for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); + + round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); + + // Caller is convertBase. + } else { + q.e = e; + q.r = +more; + } + + return q; + }; + })(); + + + /* + * Return a string representing the value of BigNumber n in fixed-point or exponential + * notation rounded to the specified decimal places or significant digits. + * + * n: a BigNumber. + * i: the index of the last digit required (i.e. the digit that may be rounded up). + * rm: the rounding mode. + * id: 1 (toExponential) or 2 (toPrecision). + */ + function format(n, i, rm, id) { + var c0, e, ne, len, str; + + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + if (!n.c) return n.toString(); + + c0 = n.c[0]; + ne = n.e; + + if (i == null) { + str = coeffToString(n.c); + str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) + ? toExponential(str, ne) + : toFixedPoint(str, ne, '0'); + } else { + n = round(new BigNumber(n), i, rm); + + // n.e may have changed if the value was rounded up. + e = n.e; + + str = coeffToString(n.c); + len = str.length; + + // toPrecision returns exponential notation if the number of significant digits + // specified is less than the number of digits necessary to represent the integer + // part of the value in fixed-point notation. + + // Exponential notation. + if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { + + // Append zeros? + for (; len < i; str += '0', len++); + str = toExponential(str, e); + + // Fixed-point notation. + } else { + i -= ne; + str = toFixedPoint(str, e, '0'); + + // Append zeros? + if (e + 1 > len) { + if (--i > 0) for (str += '.'; i--; str += '0'); + } else { + i += e - len; + if (i > 0) { + if (e + 1 == len) str += '.'; + for (; i--; str += '0'); + } + } + } + } + + return n.s < 0 && c0 ? '-' + str : str; + } + + + // Handle BigNumber.max and BigNumber.min. + // If any number is NaN, return NaN. + function maxOrMin(args, n) { + var k, y, + i = 1, + x = new BigNumber(args[0]); + + for (; i < args.length; i++) { + y = new BigNumber(args[i]); + if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { + x = y; + } + } + + return x; + } + + + /* + * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. + * Called by minus, plus and times. + */ + function normalise(n, c, e) { + var i = 1, + j = c.length; + + // Remove trailing zeros. + for (; !c[--j]; c.pop()); + + // Calculate the base 10 exponent. First get the number of digits of c[0]. + for (j = c[0]; j >= 10; j /= 10, i++); + + // Overflow? + if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { + + // Infinity. + n.c = n.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + n.c = [n.e = 0]; + } else { + n.e = e; + n.c = c; + } + + return n; + } + + + // Handle values that fail the validity test in BigNumber. + parseNumeric = (function () { + var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, + dotAfter = /^([^.]+)\.$/, + dotBefore = /^\.([^.]+)$/, + isInfinityOrNaN = /^-?(Infinity|NaN)$/, + whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; + + return function (x, str, isNum, b) { + var base, + s = isNum ? str : str.replace(whitespaceOrPlus, ''); + + // No exception on ±Infinity or NaN. + if (isInfinityOrNaN.test(s)) { + x.s = isNaN(s) ? null : s < 0 ? -1 : 1; + } else { + if (!isNum) { + + // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i + s = s.replace(basePrefix, function (m, p1, p2) { + base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; + return !b || b == base ? p1 : m; + }); + + if (b) { + base = b; + + // E.g. '1.' to '1', '.1' to '0.1' + s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); + } + + if (str != s) return new BigNumber(s, base); + } + + // '[BigNumber Error] Not a number: {n}' + // '[BigNumber Error] Not a base {b} number: {n}' + if (BigNumber.DEBUG) { + throw Error + (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); + } + + // NaN + x.s = null; + } + + x.c = x.e = null; + } + })(); + + + /* + * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. + * If r is truthy, it is known that there are more digits after the rounding digit. + */ + function round(x, sd, rm, r) { + var d, i, j, k, n, ni, rd, + xc = x.c, + pows10 = POWS_TEN; + + // if x is not Infinity or NaN... + if (xc) { + + // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. + // n is a base 1e14 number, the value of the element of array x.c containing rd. + // ni is the index of n within x.c. + // d is the number of digits of n. + // i is the index of rd within n including leading zeros. + // j is the actual index of rd within n (if < 0, rd is a leading zero). + out: { + + // Get the number of digits of the first element of xc. + for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); + i = sd - d; + + // If the rounding digit is in the first element of xc... + if (i < 0) { + i += LOG_BASE; + j = sd; + n = xc[ni = 0]; + + // Get the rounding digit at index j of n. + rd = mathfloor(n / pows10[d - j - 1] % 10); + } else { + ni = mathceil((i + 1) / LOG_BASE); + + if (ni >= xc.length) { + + if (r) { + + // Needed by sqrt. + for (; xc.length <= ni; xc.push(0)); + n = rd = 0; + d = 1; + i %= LOG_BASE; + j = i - LOG_BASE + 1; + } else { + break out; + } + } else { + n = k = xc[ni]; + + // Get the number of digits of n. + for (d = 1; k >= 10; k /= 10, d++); + + // Get the index of rd within n. + i %= LOG_BASE; + + // Get the index of rd within n, adjusted for leading zeros. + // The number of leading zeros of n is given by LOG_BASE - d. + j = i - LOG_BASE + d; + + // Get the rounding digit at index j of n. + rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); + } + } + + r = r || sd < 0 || + + // Are there any non-zero digits after the rounding digit? + // The expression n % pows10[d - j - 1] returns all digits of n to the right + // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. + xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); + + r = rm < 4 + ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && + + // Check whether the digit to the left of the rounding digit is odd. + ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || + rm == (x.s < 0 ? 8 : 7)); + + if (sd < 1 || !xc[0]) { + xc.length = 0; + + if (r) { + + // Convert sd to decimal places. + sd -= x.e + 1; + + // 1, 0.1, 0.01, 0.001, 0.0001 etc. + xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; + x.e = -sd || 0; + } else { + + // Zero. + xc[0] = x.e = 0; + } + + return x; + } + + // Remove excess digits. + if (i == 0) { + xc.length = ni; + k = 1; + ni--; + } else { + xc.length = ni + 1; + k = pows10[LOG_BASE - i]; + + // E.g. 56700 becomes 56000 if 7 is the rounding digit. + // j > 0 means i > number of leading zeros of n. + xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; + } + + // Round up? + if (r) { + + for (; ;) { + + // If the digit to be rounded up is in the first element of xc... + if (ni == 0) { + + // i will be the length of xc[0] before k is added. + for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); + j = xc[0] += k; + for (k = 1; j >= 10; j /= 10, k++); + + // if i != k the length has increased. + if (i != k) { + x.e++; + if (xc[0] == BASE) xc[0] = 1; + } + + break; + } else { + xc[ni] += k; + if (xc[ni] != BASE) break; + xc[ni--] = 0; + k = 1; + } + } + } + + // Remove trailing zeros. + for (i = xc.length; xc[--i] === 0; xc.pop()); + } + + // Overflow? Infinity. + if (x.e > MAX_EXP) { + x.c = x.e = null; + + // Underflow? Zero. + } else if (x.e < MIN_EXP) { + x.c = [x.e = 0]; + } + } + + return x; + } + + + function valueOf(n) { + var str, + e = n.e; + + if (e === null) return n.toString(); + + str = coeffToString(n.c); + + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(str, e) + : toFixedPoint(str, e, '0'); + + return n.s < 0 ? '-' + str : str; + } + + + // PROTOTYPE/INSTANCE METHODS + + + /* + * Return a new BigNumber whose value is the absolute value of this BigNumber. + */ + P.absoluteValue = P.abs = function () { + var x = new BigNumber(this); + if (x.s < 0) x.s = 1; + return x; + }; + + + /* + * Return + * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), + * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), + * 0 if they have the same value, + * or null if the value of either is NaN. + */ + P.comparedTo = function (y, b) { + return compare(this, new BigNumber(y, b)); + }; + + + /* + * If dp is undefined or null or true or false, return the number of decimal places of the + * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * + * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * [dp] {number} Decimal places: integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.decimalPlaces = P.dp = function (dp, rm) { + var c, n, v, + x = this; + + if (dp != null) { + intCheck(dp, 0, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), dp + x.e + 1, rm); + } + + if (!(c = x.c)) return null; + n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; + + // Subtract the number of trailing zeros of the last number. + if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); + if (n < 0) n = 0; + + return n; + }; + + + /* + * n / 0 = I + * n / N = N + * n / I = 0 + * 0 / n = 0 + * 0 / 0 = N + * 0 / N = N + * 0 / I = 0 + * N / n = N + * N / 0 = N + * N / N = N + * N / I = N + * I / n = I + * I / 0 = I + * I / N = N + * I / I = N + * + * Return a new BigNumber whose value is the value of this BigNumber divided by the value of + * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.dividedBy = P.div = function (y, b) { + return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); + }; + + + /* + * Return a new BigNumber whose value is the integer part of dividing the value of this + * BigNumber by the value of BigNumber(y, b). + */ + P.dividedToIntegerBy = P.idiv = function (y, b) { + return div(this, new BigNumber(y, b), 0, 1); + }; + + + /* + * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. + * + * If m is present, return the result modulo m. + * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. + * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. + * + * The modular power operation works efficiently when x, n, and m are integers, otherwise it + * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. + * + * n {number|string|BigNumber} The exponent. An integer. + * [m] {number|string|BigNumber} The modulus. + * + * '[BigNumber Error] Exponent not an integer: {n}' + */ + P.exponentiatedBy = P.pow = function (n, m) { + var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, + x = this; + + n = new BigNumber(n); + + // Allow NaN and ±Infinity, but not other non-integers. + if (n.c && !n.isInteger()) { + throw Error + (bignumberError + 'Exponent not an integer: ' + valueOf(n)); + } + + if (m != null) m = new BigNumber(m); + + // Exponent of MAX_SAFE_INTEGER is 15. + nIsBig = n.e > 14; + + // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. + if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + + // The sign of the result of pow when x is negative depends on the evenness of n. + // If +n overflows to ±Infinity, the evenness of n would be not be known. + y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); + return m ? y.mod(m) : y; + } + + nIsNeg = n.s < 0; + + if (m) { + + // x % m returns NaN if abs(m) is zero, or m is NaN. + if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); + + isModExp = !nIsNeg && x.isInteger() && m.isInteger(); + + if (isModExp) x = x.mod(m); + + // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. + // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. + } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 + // [1, 240000000] + ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 + // [80000000000000] [99999750000000] + : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { + + // If x is negative and n is odd, k = -0, else k = 0. + k = x.s < 0 && isOdd(n) ? -0 : 0; + + // If x >= 1, k = ±Infinity. + if (x.e > -1) k = 1 / k; + + // If n is negative return ±0, else return ±Infinity. + return new BigNumber(nIsNeg ? 1 / k : k); + + } else if (POW_PRECISION) { + + // Truncating each coefficient array to a length of k after each multiplication + // equates to truncating significant digits to POW_PRECISION + [28, 41], + // i.e. there will be a minimum of 28 guard digits retained. + k = mathceil(POW_PRECISION / LOG_BASE + 2); + } + + if (nIsBig) { + half = new BigNumber(0.5); + if (nIsNeg) n.s = 1; + nIsOdd = isOdd(n); + } else { + i = Math.abs(+valueOf(n)); + nIsOdd = i % 2; + } + + y = new BigNumber(ONE); + + // Performs 54 loop iterations for n of 9007199254740991. + for (; ;) { + + if (nIsOdd) { + y = y.times(x); + if (!y.c) break; + + if (k) { + if (y.c.length > k) y.c.length = k; + } else if (isModExp) { + y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); + } + } + + if (i) { + i = mathfloor(i / 2); + if (i === 0) break; + nIsOdd = i % 2; + } else { + n = n.times(half); + round(n, n.e + 1, 1); + + if (n.e > 14) { + nIsOdd = isOdd(n); + } else { + i = +valueOf(n); + if (i === 0) break; + nIsOdd = i % 2; + } + } + + x = x.times(x); + + if (k) { + if (x.c && x.c.length > k) x.c.length = k; + } else if (isModExp) { + x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); + } + } + + if (isModExp) return y; + if (nIsNeg) y = ONE.div(y); + + return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer + * using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' + */ + P.integerValue = function (rm) { + var n = new BigNumber(this); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + return round(n, n.e + 1, rm); + }; + + + /* + * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), + * otherwise return false. + */ + P.isEqualTo = P.eq = function (y, b) { + return compare(this, new BigNumber(y, b)) === 0; + }; + + + /* + * Return true if the value of this BigNumber is a finite number, otherwise return false. + */ + P.isFinite = function () { + return !!this.c; + }; + + + /* + * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isGreaterThan = P.gt = function (y, b) { + return compare(this, new BigNumber(y, b)) > 0; + }; + + + /* + * Return true if the value of this BigNumber is greater than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isGreaterThanOrEqualTo = P.gte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; + + }; + + + /* + * Return true if the value of this BigNumber is an integer, otherwise return false. + */ + P.isInteger = function () { + return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; + }; + + + /* + * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isLessThan = P.lt = function (y, b) { + return compare(this, new BigNumber(y, b)) < 0; + }; + + + /* + * Return true if the value of this BigNumber is less than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isLessThanOrEqualTo = P.lte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; + }; + + + /* + * Return true if the value of this BigNumber is NaN, otherwise return false. + */ + P.isNaN = function () { + return !this.s; + }; + + + /* + * Return true if the value of this BigNumber is negative, otherwise return false. + */ + P.isNegative = function () { + return this.s < 0; + }; + + + /* + * Return true if the value of this BigNumber is positive, otherwise return false. + */ + P.isPositive = function () { + return this.s > 0; + }; + + + /* + * Return true if the value of this BigNumber is 0 or -0, otherwise return false. + */ + P.isZero = function () { + return !!this.c && this.c[0] == 0; + }; + + + /* + * n - 0 = n + * n - N = N + * n - I = -I + * 0 - n = -n + * 0 - 0 = 0 + * 0 - N = N + * 0 - I = -I + * N - n = N + * N - 0 = N + * N - N = N + * N - I = N + * I - n = I + * I - 0 = I + * I - N = N + * I - I = N + * + * Return a new BigNumber whose value is the value of this BigNumber minus the value of + * BigNumber(y, b). + */ + P.minus = function (y, b) { + var i, j, t, xLTy, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.plus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Either Infinity? + if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); + + // Either zero? + if (!xc[0] || !yc[0]) { + + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : + + // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity + ROUNDING_MODE == 3 ? -0 : 0); + } + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Determine which is the bigger number. + if (a = xe - ye) { + + if (xLTy = a < 0) { + a = -a; + t = xc; + } else { + ye = xe; + t = yc; + } + + t.reverse(); + + // Prepend zeros to equalise exponents. + for (b = a; b--; t.push(0)); + t.reverse(); + } else { + + // Exponents equal. Check digit by digit. + j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; + + for (a = b = 0; b < j; b++) { + + if (xc[b] != yc[b]) { + xLTy = xc[b] < yc[b]; + break; + } + } + } + + // x < y? Point xc to the array of the bigger number. + if (xLTy) { + t = xc; + xc = yc; + yc = t; + y.s = -y.s; + } + + b = (j = yc.length) - (i = xc.length); + + // Append zeros to xc if shorter. + // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. + if (b > 0) for (; b--; xc[i++] = 0); + b = BASE - 1; + + // Subtract yc from xc. + for (; j > a;) { + + if (xc[--j] < yc[j]) { + for (i = j; i && !xc[--i]; xc[i] = b); + --xc[i]; + xc[j] += BASE; + } + + xc[j] -= yc[j]; + } + + // Remove leading zeros and adjust exponent accordingly. + for (; xc[0] == 0; xc.splice(0, 1), --ye); + + // Zero? + if (!xc[0]) { + + // Following IEEE 754 (2008) 6.3, + // n - n = +0 but n - n = -0 when rounding towards -Infinity. + y.s = ROUNDING_MODE == 3 ? -1 : 1; + y.c = [y.e = 0]; + return y; + } + + // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity + // for finite x and y. + return normalise(y, xc, ye); + }; + + + /* + * n % 0 = N + * n % N = N + * n % I = n + * 0 % n = 0 + * -0 % n = -0 + * 0 % 0 = N + * 0 % N = N + * 0 % I = 0 + * N % n = N + * N % 0 = N + * N % N = N + * N % I = N + * I % n = N + * I % 0 = N + * I % N = N + * I % I = N + * + * Return a new BigNumber whose value is the value of this BigNumber modulo the value of + * BigNumber(y, b). The result depends on the value of MODULO_MODE. + */ + P.modulo = P.mod = function (y, b) { + var q, s, + x = this; + + y = new BigNumber(y, b); + + // Return NaN if x is Infinity or NaN, or y is NaN or zero. + if (!x.c || !y.s || y.c && !y.c[0]) { + return new BigNumber(NaN); + + // Return x if y is Infinity or x is zero. + } else if (!y.c || x.c && !x.c[0]) { + return new BigNumber(x); + } + + if (MODULO_MODE == 9) { + + // Euclidian division: q = sign(y) * floor(x / abs(y)) + // r = x - qy where 0 <= r < abs(y) + s = y.s; + y.s = 1; + q = div(x, y, 0, 3); + y.s = s; + q.s *= s; + } else { + q = div(x, y, 0, MODULO_MODE); + } + + y = x.minus(q.times(y)); + + // To match JavaScript %, ensure sign of zero is sign of dividend. + if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + + return y; + }; + + + /* + * n * 0 = 0 + * n * N = N + * n * I = I + * 0 * n = 0 + * 0 * 0 = 0 + * 0 * N = N + * 0 * I = N + * N * n = N + * N * 0 = N + * N * N = N + * N * I = N + * I * n = I + * I * 0 = N + * I * N = N + * I * I = I + * + * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value + * of BigNumber(y, b). + */ + P.multipliedBy = P.times = function (y, b) { + var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, + base, sqrtBase, + x = this, + xc = x.c, + yc = (y = new BigNumber(y, b)).c; + + // Either NaN, ±Infinity or ±0? + if (!xc || !yc || !xc[0] || !yc[0]) { + + // Return NaN if either is NaN, or one is 0 and the other is Infinity. + if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { + y.c = y.e = y.s = null; + } else { + y.s *= x.s; + + // Return ±Infinity if either is ±Infinity. + if (!xc || !yc) { + y.c = y.e = null; + + // Return ±0 if either is ±0. + } else { + y.c = [0]; + y.e = 0; + } + } + + return y; + } + + e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); + y.s *= x.s; + xcL = xc.length; + ycL = yc.length; + + // Ensure xc points to longer array and xcL to its length. + if (xcL < ycL) { + zc = xc; + xc = yc; + yc = zc; + i = xcL; + xcL = ycL; + ycL = i; + } + + // Initialise the result array with zeros. + for (i = xcL + ycL, zc = []; i--; zc.push(0)); + + base = BASE; + sqrtBase = SQRT_BASE; + + for (i = ycL; --i >= 0;) { + c = 0; + ylo = yc[i] % sqrtBase; + yhi = yc[i] / sqrtBase | 0; + + for (k = xcL, j = i + k; j > i;) { + xlo = xc[--k] % sqrtBase; + xhi = xc[k] / sqrtBase | 0; + m = yhi * xlo + xhi * ylo; + xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; + c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; + zc[j--] = xlo % base; + } + + zc[j] = c; + } + + if (c) { + ++e; + } else { + zc.splice(0, 1); + } + + return normalise(y, zc, e); + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber negated, + * i.e. multiplied by -1. + */ + P.negated = function () { + var x = new BigNumber(this); + x.s = -x.s || null; + return x; + }; + + + /* + * n + 0 = n + * n + N = N + * n + I = I + * 0 + n = n + * 0 + 0 = 0 + * 0 + N = N + * 0 + I = I + * N + n = N + * N + 0 = N + * N + N = N + * N + I = N + * I + n = I + * I + 0 = I + * I + N = N + * I + I = I + * + * Return a new BigNumber whose value is the value of this BigNumber plus the value of + * BigNumber(y, b). + */ + P.plus = function (y, b) { + var t, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.minus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Return ±Infinity if either ±Infinity. + if (!xc || !yc) return new BigNumber(a / 0); + + // Either zero? + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. + if (a = xe - ye) { + if (a > 0) { + ye = xe; + t = yc; + } else { + a = -a; + t = xc; + } + + t.reverse(); + for (; a--; t.push(0)); + t.reverse(); + } + + a = xc.length; + b = yc.length; + + // Point xc to the longer array, and b to the shorter length. + if (a - b < 0) { + t = yc; + yc = xc; + xc = t; + b = a; + } + + // Only start adding at yc.length - 1 as the further digits of xc can be ignored. + for (a = 0; b;) { + a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; + xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; + } + + if (a) { + xc = [a].concat(xc); + ++ye; + } + + // No need to check for zero, as +x + +y != 0 && -x + -y != 0 + // ye = MAX_EXP + 1 possible + return normalise(y, xc, ye); + }; + + + /* + * If sd is undefined or null or true or false, return the number of significant digits of + * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * If sd is true include integer-part trailing zeros in the count. + * + * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. + * boolean: whether to count integer-part trailing zeros: true or false. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.precision = P.sd = function (sd, rm) { + var c, n, v, + x = this; + + if (sd != null && sd !== !!sd) { + intCheck(sd, 1, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), sd, rm); + } + + if (!(c = x.c)) return null; + v = c.length - 1; + n = v * LOG_BASE + 1; + + if (v = c[v]) { + + // Subtract the number of trailing zeros of the last element. + for (; v % 10 == 0; v /= 10, n--); + + // Add the number of digits of the first element. + for (v = c[0]; v >= 10; v /= 10, n++); + } + + if (sd && x.e + 1 > n) n = x.e + 1; + + return n; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber shifted by k places + * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. + * + * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' + */ + P.shiftedBy = function (k) { + intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); + return this.times('1e' + k); + }; + + + /* + * sqrt(-n) = N + * sqrt(N) = N + * sqrt(-I) = N + * sqrt(I) = I + * sqrt(0) = 0 + * sqrt(-0) = -0 + * + * Return a new BigNumber whose value is the square root of the value of this BigNumber, + * rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.squareRoot = P.sqrt = function () { + var m, n, r, rep, t, + x = this, + c = x.c, + s = x.s, + e = x.e, + dp = DECIMAL_PLACES + 4, + half = new BigNumber('0.5'); + + // Negative/NaN/Infinity/zero? + if (s !== 1 || !c || !c[0]) { + return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); + } + + // Initial estimate. + s = Math.sqrt(+valueOf(x)); + + // Math.sqrt underflow/overflow? + // Pass x to Math.sqrt as integer, then adjust the exponent of the result. + if (s == 0 || s == 1 / 0) { + n = coeffToString(c); + if ((n.length + e) % 2 == 0) n += '0'; + s = Math.sqrt(+n); + e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); + + if (s == 1 / 0) { + n = '5e' + e; + } else { + n = s.toExponential(); + n = n.slice(0, n.indexOf('e') + 1) + e; + } + + r = new BigNumber(n); + } else { + r = new BigNumber(s + ''); + } + + // Check for zero. + // r could be zero if MIN_EXP is changed after the this value was created. + // This would cause a division by zero (x/t) and hence Infinity below, which would cause + // coeffToString to throw. + if (r.c[0]) { + e = r.e; + s = e + dp; + if (s < 3) s = 0; + + // Newton-Raphson iteration. + for (; ;) { + t = r; + r = half.times(t.plus(div(x, t, dp, 1))); + + if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { + + // The exponent of r may here be one less than the final result exponent, + // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits + // are indexed correctly. + if (r.e < e) --s; + n = n.slice(s - 3, s + 1); + + // The 4th rounding digit may be in error by -1 so if the 4 rounding digits + // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the + // iteration. + if (n == '9999' || !rep && n == '4999') { + + // On the first iteration only, check to see if rounding up gives the + // exact result as the nines may infinitely repeat. + if (!rep) { + round(t, t.e + DECIMAL_PLACES + 2, 0); + + if (t.times(t).eq(x)) { + r = t; + break; + } + } + + dp += 4; + s += 4; + rep = 1; + } else { + + // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact + // result. If not, then there are further digits and m will be truthy. + if (!+n || !+n.slice(1) && n.charAt(0) == '5') { + + // Truncate to the first rounding digit. + round(r, r.e + DECIMAL_PLACES + 2, 1); + m = !r.times(r).eq(x); + } + + break; + } + } + } + } + + return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); + }; + + + /* + * Return a string representing the value of this BigNumber in exponential notation and + * rounded using ROUNDING_MODE to dp fixed decimal places. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toExponential = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp++; + } + return format(this, dp, rm, 1); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounding + * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', + * but e.g. (-0.00001).toFixed(0) is '-0'. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toFixed = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp = dp + this.e + 1; + } + return format(this, dp, rm); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounded + * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties + * of the format or FORMAT object (see BigNumber.set). + * + * The formatting object may contain some or all of the properties shown below. + * + * FORMAT = { + * prefix: '', + * groupSize: 3, + * secondaryGroupSize: 0, + * groupSeparator: ',', + * decimalSeparator: '.', + * fractionGroupSize: 0, + * fractionGroupSeparator: '\xA0', // non-breaking space + * suffix: '' + * }; + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * [format] {object} Formatting options. See FORMAT pbject above. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + * '[BigNumber Error] Argument not an object: {format}' + */ + P.toFormat = function (dp, rm, format) { + var str, + x = this; + + if (format == null) { + if (dp != null && rm && typeof rm == 'object') { + format = rm; + rm = null; + } else if (dp && typeof dp == 'object') { + format = dp; + dp = rm = null; + } else { + format = FORMAT; + } + } else if (typeof format != 'object') { + throw Error + (bignumberError + 'Argument not an object: ' + format); + } + + str = x.toFixed(dp, rm); + + if (x.c) { + var i, + arr = str.split('.'), + g1 = +format.groupSize, + g2 = +format.secondaryGroupSize, + groupSeparator = format.groupSeparator || '', + intPart = arr[0], + fractionPart = arr[1], + isNeg = x.s < 0, + intDigits = isNeg ? intPart.slice(1) : intPart, + len = intDigits.length; + + if (g2) { + i = g1; + g1 = g2; + g2 = i; + len -= i; + } + + if (g1 > 0 && len > 0) { + i = len % g1 || g1; + intPart = intDigits.substr(0, i); + for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); + if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); + if (isNeg) intPart = '-' + intPart; + } + + str = fractionPart + ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) + ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), + '$&' + (format.fractionGroupSeparator || '')) + : fractionPart) + : intPart; + } + + return (format.prefix || '') + str + (format.suffix || ''); + }; + + + /* + * Return an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to the specified + * maximum denominator. If a maximum denominator is not specified, the denominator will be + * the lowest value necessary to represent the number exactly. + * + * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. + * + * '[BigNumber Error] Argument {not an integer|out of range} : {md}' + */ + P.toFraction = function (md) { + var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, + x = this, + xc = x.c; + + if (md != null) { + n = new BigNumber(md); + + // Throw if md is less than one or is not an integer, unless it is Infinity. + if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { + throw Error + (bignumberError + 'Argument ' + + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); + } + } + + if (!xc) return new BigNumber(x); + + d = new BigNumber(ONE); + n1 = d0 = new BigNumber(ONE); + d1 = n0 = new BigNumber(ONE); + s = coeffToString(xc); + + // Determine initial denominator. + // d is a power of 10 and the minimum max denominator that specifies the value exactly. + e = d.e = s.length - x.e - 1; + d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; + md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; + + exp = MAX_EXP; + MAX_EXP = 1 / 0; + n = new BigNumber(s); + + // n0 = d1 = 0 + n0.c[0] = 0; + + for (; ;) { + q = div(n, d, 0, 1); + d2 = d0.plus(q.times(d1)); + if (d2.comparedTo(md) == 1) break; + d0 = d1; + d1 = d2; + n1 = n0.plus(q.times(d2 = n1)); + n0 = d2; + d = n.minus(q.times(d2 = d)); + n = d2; + } + + d2 = div(md.minus(d0), d1, 0, 1); + n0 = n0.plus(d2.times(n1)); + d0 = d0.plus(d2.times(d1)); + n0.s = n1.s = x.s; + e = e * 2; + + // Determine which fraction is closer to x, n0/d0 or n1/d1 + r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( + div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; + + MAX_EXP = exp; + + return r; + }; + + + /* + * Return the value of this BigNumber converted to a number primitive. + */ + P.toNumber = function () { + return +valueOf(this); + }; + + + /* + * Return a string representing the value of this BigNumber rounded to sd significant digits + * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits + * necessary to represent the integer part of the value in fixed-point notation, then use + * exponential notation. + * + * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.toPrecision = function (sd, rm) { + if (sd != null) intCheck(sd, 1, MAX); + return format(this, sd, rm, 2); + }; + + + /* + * Return a string representing the value of this BigNumber in base b, or base 10 if b is + * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and + * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent + * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than + * TO_EXP_NEG, return exponential notation. + * + * [b] {number} Integer, 2 to ALPHABET.length inclusive. + * + * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + */ + P.toString = function (b) { + var str, + n = this, + s = n.s, + e = n.e; + + // Infinity or NaN? + if (e === null) { + if (s) { + str = 'Infinity'; + if (s < 0) str = '-' + str; + } else { + str = 'NaN'; + } + } else { + if (b == null) { + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(coeffToString(n.c), e) + : toFixedPoint(coeffToString(n.c), e, '0'); + } else if (b === 10 && alphabetHasNormalDecimalDigits) { + n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); + str = toFixedPoint(coeffToString(n.c), n.e, '0'); + } else { + intCheck(b, 2, ALPHABET.length, 'Base'); + str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); + } + + if (s < 0 && n.c[0]) str = '-' + str; + } + + return str; + }; + + + /* + * Return as toString, but do not accept a base argument, and include the minus sign for + * negative zero. + */ + P.valueOf = P.toJSON = function () { + return valueOf(this); + }; + + + P._isBigNumber = true; + + if (configObject != null) BigNumber.set(configObject); + + return BigNumber; + } + + + // PRIVATE HELPER FUNCTIONS + + // These functions don't need access to variables, + // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. + + + function bitFloor(n) { + var i = n | 0; + return n > 0 || n === i ? i : i - 1; + } + + + // Return a coefficient array as a string of base 10 digits. + function coeffToString(a) { + var s, z, + i = 1, + j = a.length, + r = a[0] + ''; + + for (; i < j;) { + s = a[i++] + ''; + z = LOG_BASE - s.length; + for (; z--; s = '0' + s); + r += s; + } + + // Determine trailing zeros. + for (j = r.length; r.charCodeAt(--j) === 48;); + + return r.slice(0, j + 1 || 1); + } + + + // Compare the value of BigNumbers x and y. + function compare(x, y) { + var a, b, + xc = x.c, + yc = y.c, + i = x.s, + j = y.s, + k = x.e, + l = y.e; + + // Either NaN? + if (!i || !j) return null; + + a = xc && !xc[0]; + b = yc && !yc[0]; + + // Either zero? + if (a || b) return a ? b ? 0 : -j : i; + + // Signs differ? + if (i != j) return i; + + a = i < 0; + b = k == l; + + // Either Infinity? + if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; + + // Compare exponents. + if (!b) return k > l ^ a ? 1 : -1; + + j = (k = xc.length) < (l = yc.length) ? k : l; + + // Compare digit by digit. + for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; + + // Compare lengths. + return k == l ? 0 : k > l ^ a ? 1 : -1; + } + + + /* + * Check that n is a primitive number, an integer, and in range, otherwise throw. + */ + function intCheck(n, min, max, name) { + if (n < min || n > max || n !== mathfloor(n)) { + throw Error + (bignumberError + (name || 'Argument') + (typeof n == 'number' + ? n < min || n > max ? ' out of range: ' : ' not an integer: ' + : ' not a primitive number: ') + String(n)); + } + } + + + // Assumes finite n. + function isOdd(n) { + var k = n.c.length - 1; + return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; + } + + + function toExponential(str, e) { + return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + + (e < 0 ? 'e' : 'e+') + e; + } + + + function toFixedPoint(str, e, z) { + var len, zs; + + // Negative exponent? + if (e < 0) { + + // Prepend zeros. + for (zs = z + '.'; ++e; zs += z); + str = zs + str; + + // Positive exponent + } else { + len = str.length; + + // Append zeros. + if (++e > len) { + for (zs = z, e -= len; --e; zs += z); + str += zs; + } else if (e < len) { + str = str.slice(0, e) + '.' + str.slice(e); + } + } + + return str; + } + + + // EXPORT + + + BigNumber = clone(); + BigNumber['default'] = BigNumber.BigNumber = BigNumber; + + // AMD. + if (typeof define == 'function' && define.amd) { + define(function () { return BigNumber; }); + + // Node.js and other environments that support module.exports. + } else if ( true && module.exports) { + module.exports = BigNumber; + + // Browser. + } else { + if (!globalObject) { + globalObject = typeof self != 'undefined' && self ? self : window; + } + + globalObject.BigNumber = BigNumber; + } +})(this); /***/ }), -/***/ 16181: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9239: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +/*jshint node:true */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(33193), exports); +var Buffer = (__nccwpck_require__(14300).Buffer); // browserify +var SlowBuffer = (__nccwpck_require__(14300).SlowBuffer); +module.exports = bufferEq; -/***/ }), +function bufferEq(a, b) { -/***/ 76865: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // shortcutting on type is necessary for correctness + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + return false; + } -"use strict"; + // buffer sizes should be well-known information, so despite this + // shortcutting, it doesn't leak any information about the *contents* of the + // buffers. + if (a.length !== b.length) { + return false; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.EventStreamMarshaller = void 0; -const eventstream_serde_universal_1 = __nccwpck_require__(66673); -const stream_1 = __nccwpck_require__(12781); -const utils_1 = __nccwpck_require__(58047); -class EventStreamMarshaller { - constructor({ utf8Encoder, utf8Decoder }) { - this.universalMarshaller = new eventstream_serde_universal_1.EventStreamMarshaller({ - utf8Decoder, - utf8Encoder, - }); - } - deserialize(body, deserializer) { - const bodyIterable = typeof body[Symbol.asyncIterator] === "function" ? body : (0, utils_1.readabletoIterable)(body); - return this.universalMarshaller.deserialize(bodyIterable, deserializer); - } - serialize(input, serializer) { - return stream_1.Readable.from(this.universalMarshaller.serialize(input, serializer)); - } + var c = 0; + for (var i = 0; i < a.length; i++) { + /*jshint bitwise:false */ + c |= a[i] ^ b[i]; // XOR + } + return c === 0; } -exports.EventStreamMarshaller = EventStreamMarshaller; - - -/***/ }), -/***/ 77682: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +bufferEq.install = function() { + Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { + return bufferEq(this, that); + }; +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(76865), exports); -tslib_1.__exportStar(__nccwpck_require__(56887), exports); +var origBufEqual = Buffer.prototype.equal; +var origSlowBufEqual = SlowBuffer.prototype.equal; +bufferEq.restore = function() { + Buffer.prototype.equal = origBufEqual; + SlowBuffer.prototype.equal = origSlowBufEqual; +}; /***/ }), -/***/ 56887: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.eventStreamSerdeProvider = void 0; -const EventStreamMarshaller_1 = __nccwpck_require__(76865); -const eventStreamSerdeProvider = (options) => new EventStreamMarshaller_1.EventStreamMarshaller(options); -exports.eventStreamSerdeProvider = eventStreamSerdeProvider; +/***/ 28222: +/***/ ((module, exports, __nccwpck_require__) => { +/* eslint-env browser */ -/***/ }), +/** + * This is the web browser implementation of `debug()`. + */ -/***/ 58047: -/***/ ((__unused_webpack_module, exports) => { +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; -"use strict"; + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.readabletoIterable = void 0; -async function* readabletoIterable(readStream) { - let streamEnded = false; - let generationEnded = false; - const records = new Array(); - readStream.on("error", (err) => { - if (!streamEnded) { - streamEnded = true; - } - if (err) { - throw err; - } - }); - readStream.on("data", (data) => { - records.push(data); - }); - readStream.on("end", () => { - streamEnded = true; - }); - while (!generationEnded) { - const value = await new Promise((resolve) => setTimeout(() => resolve(records.shift()), 0)); - if (value) { - yield value; - } - generationEnded = streamEnded && records.length === 0; - } -} -exports.readabletoIterable = readabletoIterable; +/** + * Colors. + */ +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; -/***/ }), +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ -/***/ 84340: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } -"use strict"; + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.EventStreamMarshaller = void 0; -const eventstream_codec_1 = __nccwpck_require__(56459); -const getChunkedStream_1 = __nccwpck_require__(2453); -const getUnmarshalledStream_1 = __nccwpck_require__(43597); -class EventStreamMarshaller { - constructor({ utf8Encoder, utf8Decoder }) { - this.eventStreamCodec = new eventstream_codec_1.EventStreamCodec(utf8Encoder, utf8Decoder); - this.utfEncoder = utf8Encoder; - } - deserialize(body, deserializer) { - const inputStream = (0, getChunkedStream_1.getChunkedStream)(body); - return new eventstream_codec_1.SmithyMessageDecoderStream({ - messageStream: new eventstream_codec_1.MessageDecoderStream({ inputStream, decoder: this.eventStreamCodec }), - deserializer: (0, getUnmarshalledStream_1.getMessageUnmarshaller)(deserializer, this.utfEncoder), - }); - } - serialize(inputStream, serializer) { - return new eventstream_codec_1.MessageEncoderStream({ - messageStream: new eventstream_codec_1.SmithyMessageEncoderStream({ inputStream, serializer }), - encoder: this.eventStreamCodec, - includeEndFrame: true, - }); - } + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); } -exports.EventStreamMarshaller = EventStreamMarshaller; - -/***/ }), - -/***/ 2453: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getChunkedStream = void 0; -function getChunkedStream(source) { - let currentMessageTotalLength = 0; - let currentMessagePendingLength = 0; - let currentMessage = null; - let messageLengthBuffer = null; - const allocateMessage = (size) => { - if (typeof size !== "number") { - throw new Error("Attempted to allocate an event message where size was not a number: " + size); - } - currentMessageTotalLength = size; - currentMessagePendingLength = 4; - currentMessage = new Uint8Array(size); - const currentMessageView = new DataView(currentMessage.buffer); - currentMessageView.setUint32(0, size, false); - }; - const iterator = async function* () { - const sourceIterator = source[Symbol.asyncIterator](); - while (true) { - const { value, done } = await sourceIterator.next(); - if (done) { - if (!currentMessageTotalLength) { - return; - } - else if (currentMessageTotalLength === currentMessagePendingLength) { - yield currentMessage; - } - else { - throw new Error("Truncated event message received."); - } - return; - } - const chunkLength = value.length; - let currentOffset = 0; - while (currentOffset < chunkLength) { - if (!currentMessage) { - const bytesRemaining = chunkLength - currentOffset; - if (!messageLengthBuffer) { - messageLengthBuffer = new Uint8Array(4); - } - const numBytesForTotal = Math.min(4 - currentMessagePendingLength, bytesRemaining); - messageLengthBuffer.set(value.slice(currentOffset, currentOffset + numBytesForTotal), currentMessagePendingLength); - currentMessagePendingLength += numBytesForTotal; - currentOffset += numBytesForTotal; - if (currentMessagePendingLength < 4) { - break; - } - allocateMessage(new DataView(messageLengthBuffer.buffer).getUint32(0, false)); - messageLengthBuffer = null; - } - const numBytesToWrite = Math.min(currentMessageTotalLength - currentMessagePendingLength, chunkLength - currentOffset); - currentMessage.set(value.slice(currentOffset, currentOffset + numBytesToWrite), currentMessagePendingLength); - currentMessagePendingLength += numBytesToWrite; - currentOffset += numBytesToWrite; - if (currentMessageTotalLength && currentMessageTotalLength === currentMessagePendingLength) { - yield currentMessage; - currentMessage = null; - currentMessageTotalLength = 0; - currentMessagePendingLength = 0; - } - } - } - }; - return { - [Symbol.asyncIterator]: iterator, - }; -} -exports.getChunkedStream = getChunkedStream; +/** + * Colorize log arguments if enabled. + * + * @api public + */ +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); -/***/ }), + if (!this.useColors) { + return; + } -/***/ 43597: -/***/ ((__unused_webpack_module, exports) => { + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); -"use strict"; + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getMessageUnmarshaller = exports.getUnmarshalledStream = void 0; -function getUnmarshalledStream(source, options) { - const messageUnmarshaller = getMessageUnmarshaller(options.deserializer, options.toUtf8); - return { - [Symbol.asyncIterator]: async function* () { - for await (const chunk of source) { - const message = options.eventStreamCodec.decode(chunk); - const type = await messageUnmarshaller(message); - if (type === undefined) - continue; - yield type; - } - }, - }; -} -exports.getUnmarshalledStream = getUnmarshalledStream; -function getMessageUnmarshaller(deserializer, toUtf8) { - return async function (message) { - const { value: messageType } = message.headers[":message-type"]; - if (messageType === "error") { - const unmodeledError = new Error(message.headers[":error-message"].value || "UnknownError"); - unmodeledError.name = message.headers[":error-code"].value; - throw unmodeledError; - } - else if (messageType === "exception") { - const code = message.headers[":exception-type"].value; - const exception = { [code]: message }; - const deserializedException = await deserializer(exception); - if (deserializedException.$unknown) { - const error = new Error(toUtf8(message.body)); - error.name = code; - throw error; - } - throw deserializedException[code]; - } - else if (messageType === "event") { - const event = { - [message.headers[":event-type"].value]: message, - }; - const deserialized = await deserializer(event); - if (deserialized.$unknown) - return; - return deserialized; - } - else { - throw Error(`Unrecognizable event type: ${message.headers[":event-type"].value}`); - } - }; + args.splice(lastC, 0, c); } -exports.getMessageUnmarshaller = getMessageUnmarshaller; - - -/***/ }), - -/***/ 66673: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(84340), exports); -tslib_1.__exportStar(__nccwpck_require__(40721), exports); - - -/***/ }), -/***/ 40721: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.eventStreamSerdeProvider = void 0; -const EventStreamMarshaller_1 = __nccwpck_require__(84340); -const eventStreamSerdeProvider = (options) => new EventStreamMarshaller_1.EventStreamMarshaller(options); -exports.eventStreamSerdeProvider = eventStreamSerdeProvider; - - -/***/ }), - -/***/ 3081: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Hash = void 0; -const util_buffer_from_1 = __nccwpck_require__(31381); -const util_utf8_1 = __nccwpck_require__(41895); -const buffer_1 = __nccwpck_require__(14300); -const crypto_1 = __nccwpck_require__(6113); -class Hash { - constructor(algorithmIdentifier, secret) { - this.algorithmIdentifier = algorithmIdentifier; - this.secret = secret; - this.reset(); - } - update(toHash, encoding) { - this.hash.update((0, util_utf8_1.toUint8Array)(castSourceData(toHash, encoding))); - } - digest() { - return Promise.resolve(this.hash.digest()); - } - reset() { - this.hash = this.secret - ? (0, crypto_1.createHmac)(this.algorithmIdentifier, castSourceData(this.secret)) - : (0, crypto_1.createHash)(this.algorithmIdentifier); - } -} -exports.Hash = Hash; -function castSourceData(toCast, encoding) { - if (buffer_1.Buffer.isBuffer(toCast)) { - return toCast; - } - if (typeof toCast === "string") { - return (0, util_buffer_from_1.fromString)(toCast, encoding); - } - if (ArrayBuffer.isView(toCast)) { - return (0, util_buffer_from_1.fromArrayBuffer)(toCast.buffer, toCast.byteOffset, toCast.byteLength); - } - return (0, util_buffer_from_1.fromArrayBuffer)(toCast); +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } } +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } -/***/ }), - -/***/ 4671: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HashCalculator = void 0; -const util_utf8_1 = __nccwpck_require__(41895); -const stream_1 = __nccwpck_require__(12781); -class HashCalculator extends stream_1.Writable { - constructor(hash, options) { - super(options); - this.hash = hash; - } - _write(chunk, encoding, callback) { - try { - this.hash.update((0, util_utf8_1.toUint8Array)(chunk)); - } - catch (err) { - return callback(err); - } - callback(); - } + return r; } -exports.HashCalculator = HashCalculator; - - -/***/ }), - -/***/ 80075: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fileStreamHasher = void 0; -const fs_1 = __nccwpck_require__(57147); -const HashCalculator_1 = __nccwpck_require__(4671); -const fileStreamHasher = (hashCtor, fileStream) => new Promise((resolve, reject) => { - if (!isReadStream(fileStream)) { - reject(new Error("Unable to calculate hash for non-file streams.")); - return; - } - const fileStreamTee = (0, fs_1.createReadStream)(fileStream.path, { - start: fileStream.start, - end: fileStream.end, - }); - const hash = new hashCtor(); - const hashCalculator = new HashCalculator_1.HashCalculator(hash); - fileStreamTee.pipe(hashCalculator); - fileStreamTee.on("error", (err) => { - hashCalculator.end(); - reject(err); - }); - hashCalculator.on("error", reject); - hashCalculator.on("finish", function () { - hash.digest().then(resolve).catch(reject); - }); -}); -exports.fileStreamHasher = fileStreamHasher; -const isReadStream = (stream) => typeof stream.path === "string"; - - -/***/ }), - -/***/ 48866: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(80075), exports); -tslib_1.__exportStar(__nccwpck_require__(87715), exports); - - -/***/ }), - -/***/ 87715: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.readableStreamHasher = void 0; -const HashCalculator_1 = __nccwpck_require__(4671); -const readableStreamHasher = (hashCtor, readableStream) => { - if (readableStream.readableFlowing !== null) { - throw new Error("Unable to calculate hash for flowing readable stream"); - } - const hash = new hashCtor(); - const hashCalculator = new HashCalculator_1.HashCalculator(hash); - readableStream.pipe(hashCalculator); - return new Promise((resolve, reject) => { - readableStream.on("error", (err) => { - hashCalculator.end(); - reject(err); - }); - hashCalculator.on("error", reject); - hashCalculator.on("finish", () => { - hash.digest().then(resolve).catch(reject); - }); - }); -}; -exports.readableStreamHasher = readableStreamHasher; - - -/***/ }), - -/***/ 10780: -/***/ ((__unused_webpack_module, exports) => { -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isArrayBuffer = void 0; -const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || - Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; -exports.isArrayBuffer = isArrayBuffer; - - -/***/ }), - -/***/ 82800: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getContentLengthPlugin = exports.contentLengthMiddlewareOptions = exports.contentLengthMiddleware = void 0; -const protocol_http_1 = __nccwpck_require__(64418); -const CONTENT_LENGTH_HEADER = "content-length"; -function contentLengthMiddleware(bodyLengthChecker) { - return (next) => async (args) => { - const request = args.request; - if (protocol_http_1.HttpRequest.isInstance(request)) { - const { body, headers } = request; - if (body && - Object.keys(headers) - .map((str) => str.toLowerCase()) - .indexOf(CONTENT_LENGTH_HEADER) === -1) { - try { - const length = bodyLengthChecker(body); - request.headers = { - ...request.headers, - [CONTENT_LENGTH_HEADER]: String(length), - }; - } - catch (error) { - } - } - } - return next({ - ...args, - request, - }); - }; +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } } -exports.contentLengthMiddleware = contentLengthMiddleware; -exports.contentLengthMiddlewareOptions = { - step: "build", - tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], - name: "contentLengthMiddleware", - override: true, -}; -const getContentLengthPlugin = (options) => ({ - applyToStack: (clientStack) => { - clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), exports.contentLengthMiddlewareOptions); - }, -}); -exports.getContentLengthPlugin = getContentLengthPlugin; - - -/***/ }), - -/***/ 465: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createConfigValueProvider = void 0; -const createConfigValueProvider = (configKey, canonicalEndpointParamKey, config) => { - const configProvider = async () => { - var _a; - const configValue = (_a = config[configKey]) !== null && _a !== void 0 ? _a : config[canonicalEndpointParamKey]; - if (typeof configValue === "function") { - return configValue(); - } - return configValue; - }; - if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { - return async () => { - const endpoint = await configProvider(); - if (endpoint && typeof endpoint === "object") { - if ("url" in endpoint) { - return endpoint.url.href; - } - if ("hostname" in endpoint) { - const { protocol, hostname, port, path } = endpoint; - return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; - } - } - return endpoint; - }; - } - return configProvider; -}; -exports.createConfigValueProvider = createConfigValueProvider; - - -/***/ }), - -/***/ 31518: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEndpointFromConfig = void 0; -const node_config_provider_1 = __nccwpck_require__(33461); -const getEndpointUrlConfig_1 = __nccwpck_require__(7574); -const getEndpointFromConfig = async (serviceId) => (0, node_config_provider_1.loadConfig)((0, getEndpointUrlConfig_1.getEndpointUrlConfig)(serviceId))(); -exports.getEndpointFromConfig = getEndpointFromConfig; - - -/***/ }), - -/***/ 73929: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveParams = exports.getEndpointFromInstructions = void 0; -const service_customizations_1 = __nccwpck_require__(13105); -const createConfigValueProvider_1 = __nccwpck_require__(465); -const getEndpointFromConfig_1 = __nccwpck_require__(31518); -const toEndpointV1_1 = __nccwpck_require__(38938); -const getEndpointFromInstructions = async (commandInput, instructionsSupplier, clientConfig, context) => { - if (!clientConfig.endpoint) { - const endpointFromConfig = await (0, getEndpointFromConfig_1.getEndpointFromConfig)(clientConfig.serviceId || ""); - if (endpointFromConfig) { - clientConfig.endpoint = () => Promise.resolve((0, toEndpointV1_1.toEndpointV1)(endpointFromConfig)); - } - } - const endpointParams = await (0, exports.resolveParams)(commandInput, instructionsSupplier, clientConfig); - if (typeof clientConfig.endpointProvider !== "function") { - throw new Error("config.endpointProvider is not set."); - } - const endpoint = clientConfig.endpointProvider(endpointParams, context); - return endpoint; -}; -exports.getEndpointFromInstructions = getEndpointFromInstructions; -const resolveParams = async (commandInput, instructionsSupplier, clientConfig) => { - var _a; - const endpointParams = {}; - const instructions = ((_a = instructionsSupplier === null || instructionsSupplier === void 0 ? void 0 : instructionsSupplier.getEndpointParameterInstructions) === null || _a === void 0 ? void 0 : _a.call(instructionsSupplier)) || {}; - for (const [name, instruction] of Object.entries(instructions)) { - switch (instruction.type) { - case "staticContextParams": - endpointParams[name] = instruction.value; - break; - case "contextParams": - endpointParams[name] = commandInput[instruction.name]; - break; - case "clientContextParams": - case "builtInParams": - endpointParams[name] = await (0, createConfigValueProvider_1.createConfigValueProvider)(instruction.name, name, clientConfig)(); - break; - default: - throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); - } - } - if (Object.keys(instructions).length === 0) { - Object.assign(endpointParams, clientConfig); - } - if (String(clientConfig.serviceId).toLowerCase() === "s3") { - await (0, service_customizations_1.resolveParamsForS3)(endpointParams); - } - return endpointParams; -}; -exports.resolveParams = resolveParams; - - -/***/ }), - -/***/ 7574: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEndpointUrlConfig = void 0; -const shared_ini_file_loader_1 = __nccwpck_require__(43507); -const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; -const CONFIG_ENDPOINT_URL = "endpoint_url"; -const getEndpointUrlConfig = (serviceId) => ({ - environmentVariableSelector: (env) => { - const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); - const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; - if (serviceEndpointUrl) - return serviceEndpointUrl; - const endpointUrl = env[ENV_ENDPOINT_URL]; - if (endpointUrl) - return endpointUrl; - return undefined; - }, - configFileSelector: (profile, config) => { - if (config && profile.services) { - const servicesSection = config[["services", profile.services].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; - if (servicesSection) { - const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); - const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; - if (endpointUrl) - return endpointUrl; - } - } - const endpointUrl = profile[CONFIG_ENDPOINT_URL]; - if (endpointUrl) - return endpointUrl; - return undefined; - }, - default: undefined, -}); -exports.getEndpointUrlConfig = getEndpointUrlConfig; - - -/***/ }), - -/***/ 50890: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(73929), exports); -tslib_1.__exportStar(__nccwpck_require__(38938), exports); - - -/***/ }), - -/***/ 38938: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toEndpointV1 = void 0; -const url_parser_1 = __nccwpck_require__(14681); -const toEndpointV1 = (endpoint) => { - if (typeof endpoint === "object") { - if ("url" in endpoint) { - return (0, url_parser_1.parseUrl)(endpoint.url); - } - return endpoint; - } - return (0, url_parser_1.parseUrl)(endpoint); -}; -exports.toEndpointV1 = toEndpointV1; - - -/***/ }), - -/***/ 55520: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.endpointMiddleware = void 0; -const util_middleware_1 = __nccwpck_require__(2390); -const getEndpointFromInstructions_1 = __nccwpck_require__(73929); -const endpointMiddleware = ({ config, instructions, }) => { - return (next, context) => async (args) => { - var _a, _b, _c; - const endpoint = await (0, getEndpointFromInstructions_1.getEndpointFromInstructions)(args.input, { - getEndpointParameterInstructions() { - return instructions; - }, - }, { ...config }, context); - context.endpointV2 = endpoint; - context.authSchemes = (_a = endpoint.properties) === null || _a === void 0 ? void 0 : _a.authSchemes; - const authScheme = (_b = context.authSchemes) === null || _b === void 0 ? void 0 : _b[0]; - if (authScheme) { - context["signing_region"] = authScheme.signingRegion; - context["signing_service"] = authScheme.signingName; - const smithyContext = (0, util_middleware_1.getSmithyContext)(context); - const httpAuthOption = (_c = smithyContext === null || smithyContext === void 0 ? void 0 : smithyContext.selectedHttpAuthScheme) === null || _c === void 0 ? void 0 : _c.httpAuthOption; - if (httpAuthOption) { - httpAuthOption.signingProperties = Object.assign(httpAuthOption.signingProperties || {}, { - signing_region: authScheme.signingRegion, - signingRegion: authScheme.signingRegion, - signing_service: authScheme.signingName, - signingName: authScheme.signingName, - signingRegionSet: authScheme.signingRegionSet, - }, authScheme.properties); - } - } - return next({ - ...args, - }); - }; -}; -exports.endpointMiddleware = endpointMiddleware; - - -/***/ }), - -/***/ 71329: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEndpointPlugin = exports.endpointMiddlewareOptions = void 0; -const middleware_serde_1 = __nccwpck_require__(81238); -const endpointMiddleware_1 = __nccwpck_require__(55520); -exports.endpointMiddlewareOptions = { - step: "serialize", - tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], - name: "endpointV2Middleware", - override: true, - relation: "before", - toMiddleware: middleware_serde_1.serializerMiddlewareOption.name, -}; -const getEndpointPlugin = (config, instructions) => ({ - applyToStack: (clientStack) => { - clientStack.addRelativeTo((0, endpointMiddleware_1.endpointMiddleware)({ - config, - instructions, - }), exports.endpointMiddlewareOptions); - }, -}); -exports.getEndpointPlugin = getEndpointPlugin; - - -/***/ }), - -/***/ 82918: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(50890), exports); -tslib_1.__exportStar(__nccwpck_require__(55520), exports); -tslib_1.__exportStar(__nccwpck_require__(71329), exports); -tslib_1.__exportStar(__nccwpck_require__(74139), exports); -tslib_1.__exportStar(__nccwpck_require__(39720), exports); - - -/***/ }), - -/***/ 74139: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveEndpointConfig = void 0; -const util_middleware_1 = __nccwpck_require__(2390); -const toEndpointV1_1 = __nccwpck_require__(38938); -const resolveEndpointConfig = (input) => { - var _a, _b, _c; - const tls = (_a = input.tls) !== null && _a !== void 0 ? _a : true; - const { endpoint } = input; - const customEndpointProvider = endpoint != null ? async () => (0, toEndpointV1_1.toEndpointV1)(await (0, util_middleware_1.normalizeProvider)(endpoint)()) : undefined; - const isCustomEndpoint = !!endpoint; - return { - ...input, - endpoint: customEndpointProvider, - tls, - isCustomEndpoint, - useDualstackEndpoint: (0, util_middleware_1.normalizeProvider)((_b = input.useDualstackEndpoint) !== null && _b !== void 0 ? _b : false), - useFipsEndpoint: (0, util_middleware_1.normalizeProvider)((_c = input.useFipsEndpoint) !== null && _c !== void 0 ? _c : false), - }; + +module.exports = __nccwpck_require__(46243)(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } }; -exports.resolveEndpointConfig = resolveEndpointConfig; /***/ }), -/***/ 13105: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 46243: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(19194), exports); +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __nccwpck_require__(80900); + createDebug.destroy = destroy; -/***/ }), + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); -/***/ 19194: -/***/ ((__unused_webpack_module, exports) => { + /** + * The currently active debug mode names, and names to skip. + */ -"use strict"; + createDebug.names = []; + createDebug.skips = []; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isArnBucketName = exports.isDnsCompatibleBucketName = exports.S3_HOSTNAME_PATTERN = exports.DOT_PATTERN = exports.resolveParamsForS3 = void 0; -const resolveParamsForS3 = async (endpointParams) => { - const bucket = (endpointParams === null || endpointParams === void 0 ? void 0 : endpointParams.Bucket) || ""; - if (typeof endpointParams.Bucket === "string") { - endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); - } - if ((0, exports.isArnBucketName)(bucket)) { - if (endpointParams.ForcePathStyle === true) { - throw new Error("Path-style addressing cannot be used with ARN buckets"); - } - } - else if (!(0, exports.isDnsCompatibleBucketName)(bucket) || - (bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:")) || - bucket.toLowerCase() !== bucket || - bucket.length < 3) { - endpointParams.ForcePathStyle = true; - } - if (endpointParams.DisableMultiRegionAccessPoints) { - endpointParams.disableMultiRegionAccessPoints = true; - endpointParams.DisableMRAP = true; - } - return endpointParams; -}; -exports.resolveParamsForS3 = resolveParamsForS3; -const DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; -const IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; -const DOTS_PATTERN = /\.\./; -exports.DOT_PATTERN = /\./; -exports.S3_HOSTNAME_PATTERN = /^(.+\.)?s3(-fips)?(\.dualstack)?[.-]([a-z0-9-]+)\./; -const isDnsCompatibleBucketName = (bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName); -exports.isDnsCompatibleBucketName = isDnsCompatibleBucketName; -const isArnBucketName = (bucketName) => { - const [arn, partition, service, region, account, typeOrId] = bucketName.split(":"); - const isArn = arn === "arn" && bucketName.split(":").length >= 6; - const isValidArn = [arn, partition, service, account, typeOrId].filter(Boolean).length === 5; - if (isArn && !isValidArn) { - throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); - } - return arn === "arn" && !!partition && !!service && !!account && !!typeOrId; -}; -exports.isArnBucketName = isArnBucketName; + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; -/***/ }), + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } -/***/ 39720: -/***/ ((__unused_webpack_module, exports) => { + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; -"use strict"; + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; -Object.defineProperty(exports, "__esModule", ({ value: true })); + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + const self = debug; -/***/ }), + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; -/***/ 80155: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + args[0] = createDebug.coerce(args[0]); -"use strict"; + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AdaptiveRetryStrategy = void 0; -const util_retry_1 = __nccwpck_require__(84902); -const StandardRetryStrategy_1 = __nccwpck_require__(94582); -class AdaptiveRetryStrategy extends StandardRetryStrategy_1.StandardRetryStrategy { - constructor(maxAttemptsProvider, options) { - const { rateLimiter, ...superOptions } = options !== null && options !== void 0 ? options : {}; - super(maxAttemptsProvider, superOptions); - this.rateLimiter = rateLimiter !== null && rateLimiter !== void 0 ? rateLimiter : new util_retry_1.DefaultRateLimiter(); - this.mode = util_retry_1.RETRY_MODES.ADAPTIVE; - } - async retry(next, args) { - return super.retry(next, args, { - beforeRequest: async () => { - return this.rateLimiter.getSendToken(); - }, - afterRequest: (response) => { - this.rateLimiter.updateClientSendingRate(response); - }, - }); - } -} -exports.AdaptiveRetryStrategy = AdaptiveRetryStrategy; + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); -/***/ }), + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); -/***/ 94582: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } -"use strict"; + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.StandardRetryStrategy = void 0; -const protocol_http_1 = __nccwpck_require__(64418); -const service_error_classification_1 = __nccwpck_require__(6375); -const util_retry_1 = __nccwpck_require__(84902); -const uuid_1 = __nccwpck_require__(7761); -const defaultRetryQuota_1 = __nccwpck_require__(29991); -const delayDecider_1 = __nccwpck_require__(27233); -const retryDecider_1 = __nccwpck_require__(67653); -const util_1 = __nccwpck_require__(42827); -class StandardRetryStrategy { - constructor(maxAttemptsProvider, options) { - var _a, _b, _c; - this.maxAttemptsProvider = maxAttemptsProvider; - this.mode = util_retry_1.RETRY_MODES.STANDARD; - this.retryDecider = (_a = options === null || options === void 0 ? void 0 : options.retryDecider) !== null && _a !== void 0 ? _a : retryDecider_1.defaultRetryDecider; - this.delayDecider = (_b = options === null || options === void 0 ? void 0 : options.delayDecider) !== null && _b !== void 0 ? _b : delayDecider_1.defaultDelayDecider; - this.retryQuota = (_c = options === null || options === void 0 ? void 0 : options.retryQuota) !== null && _c !== void 0 ? _c : (0, defaultRetryQuota_1.getDefaultRetryQuota)(util_retry_1.INITIAL_RETRY_TOKENS); - } - shouldRetry(error, attempts, maxAttempts) { - return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); - } - async getMaxAttempts() { - let maxAttempts; - try { - maxAttempts = await this.maxAttemptsProvider(); - } - catch (error) { - maxAttempts = util_retry_1.DEFAULT_MAX_ATTEMPTS; - } - return maxAttempts; - } - async retry(next, args, options) { - let retryTokenAmount; - let attempts = 0; - let totalDelay = 0; - const maxAttempts = await this.getMaxAttempts(); - const { request } = args; - if (protocol_http_1.HttpRequest.isInstance(request)) { - request.headers[util_retry_1.INVOCATION_ID_HEADER] = (0, uuid_1.v4)(); - } - while (true) { - try { - if (protocol_http_1.HttpRequest.isInstance(request)) { - request.headers[util_retry_1.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; - } - if (options === null || options === void 0 ? void 0 : options.beforeRequest) { - await options.beforeRequest(); - } - const { response, output } = await next(args); - if (options === null || options === void 0 ? void 0 : options.afterRequest) { - options.afterRequest(response); - } - this.retryQuota.releaseRetryTokens(retryTokenAmount); - output.$metadata.attempts = attempts + 1; - output.$metadata.totalRetryDelay = totalDelay; - return { response, output }; - } - catch (e) { - const err = (0, util_1.asSdkError)(e); - attempts++; - if (this.shouldRetry(err, attempts, maxAttempts)) { - retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); - const delayFromDecider = this.delayDecider((0, service_error_classification_1.isThrottlingError)(err) ? util_retry_1.THROTTLING_RETRY_DELAY_BASE : util_retry_1.DEFAULT_RETRY_DELAY_BASE, attempts); - const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); - const delay = Math.max(delayFromResponse || 0, delayFromDecider); - totalDelay += delay; - await new Promise((resolve) => setTimeout(resolve, delay)); - continue; - } - if (!err.$metadata) { - err.$metadata = {}; - } - err.$metadata.attempts = attempts; - err.$metadata.totalRetryDelay = totalDelay; - throw err; - } - } - } -} -exports.StandardRetryStrategy = StandardRetryStrategy; -const getDelayFromRetryAfterHeader = (response) => { - if (!protocol_http_1.HttpResponse.isInstance(response)) - return; - const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); - if (!retryAfterHeaderName) - return; - const retryAfter = response.headers[retryAfterHeaderName]; - const retryAfterSeconds = Number(retryAfter); - if (!Number.isNaN(retryAfterSeconds)) - return retryAfterSeconds * 1000; - const retryAfterDate = new Date(retryAfter); - return retryAfterDate.getTime() - Date.now(); -}; + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); -/***/ }), + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } -/***/ 58709: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return debug; + } -"use strict"; + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NODE_RETRY_MODE_CONFIG_OPTIONS = exports.CONFIG_RETRY_MODE = exports.ENV_RETRY_MODE = exports.resolveRetryConfig = exports.NODE_MAX_ATTEMPT_CONFIG_OPTIONS = exports.CONFIG_MAX_ATTEMPTS = exports.ENV_MAX_ATTEMPTS = void 0; -const util_middleware_1 = __nccwpck_require__(2390); -const util_retry_1 = __nccwpck_require__(84902); -exports.ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; -exports.CONFIG_MAX_ATTEMPTS = "max_attempts"; -exports.NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { - environmentVariableSelector: (env) => { - const value = env[exports.ENV_MAX_ATTEMPTS]; - if (!value) - return undefined; - const maxAttempt = parseInt(value); - if (Number.isNaN(maxAttempt)) { - throw new Error(`Environment variable ${exports.ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); - } - return maxAttempt; - }, - configFileSelector: (profile) => { - const value = profile[exports.CONFIG_MAX_ATTEMPTS]; - if (!value) - return undefined; - const maxAttempt = parseInt(value); - if (Number.isNaN(maxAttempt)) { - throw new Error(`Shared config file entry ${exports.CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); - } - return maxAttempt; - }, - default: util_retry_1.DEFAULT_MAX_ATTEMPTS, -}; -const resolveRetryConfig = (input) => { - var _a; - const { retryStrategy } = input; - const maxAttempts = (0, util_middleware_1.normalizeProvider)((_a = input.maxAttempts) !== null && _a !== void 0 ? _a : util_retry_1.DEFAULT_MAX_ATTEMPTS); - return { - ...input, - maxAttempts, - retryStrategy: async () => { - if (retryStrategy) { - return retryStrategy; - } - const retryMode = await (0, util_middleware_1.normalizeProvider)(input.retryMode)(); - if (retryMode === util_retry_1.RETRY_MODES.ADAPTIVE) { - return new util_retry_1.AdaptiveRetryStrategy(maxAttempts); - } - return new util_retry_1.StandardRetryStrategy(maxAttempts); - }, - }; -}; -exports.resolveRetryConfig = resolveRetryConfig; -exports.ENV_RETRY_MODE = "AWS_RETRY_MODE"; -exports.CONFIG_RETRY_MODE = "retry_mode"; -exports.NODE_RETRY_MODE_CONFIG_OPTIONS = { - environmentVariableSelector: (env) => env[exports.ENV_RETRY_MODE], - configFileSelector: (profile) => profile[exports.CONFIG_RETRY_MODE], - default: util_retry_1.DEFAULT_RETRY_MODE, -}; + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + createDebug.names = []; + createDebug.skips = []; -/***/ }), + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; -/***/ 29991: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } -"use strict"; + namespaces = split[i].replace(/\*/g, '.*?'); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getDefaultRetryQuota = void 0; -const util_retry_1 = __nccwpck_require__(84902); -const getDefaultRetryQuota = (initialRetryTokens, options) => { - var _a, _b, _c; - const MAX_CAPACITY = initialRetryTokens; - const noRetryIncrement = (_a = options === null || options === void 0 ? void 0 : options.noRetryIncrement) !== null && _a !== void 0 ? _a : util_retry_1.NO_RETRY_INCREMENT; - const retryCost = (_b = options === null || options === void 0 ? void 0 : options.retryCost) !== null && _b !== void 0 ? _b : util_retry_1.RETRY_COST; - const timeoutRetryCost = (_c = options === null || options === void 0 ? void 0 : options.timeoutRetryCost) !== null && _c !== void 0 ? _c : util_retry_1.TIMEOUT_RETRY_COST; - let availableCapacity = initialRetryTokens; - const getCapacityAmount = (error) => (error.name === "TimeoutError" ? timeoutRetryCost : retryCost); - const hasRetryTokens = (error) => getCapacityAmount(error) <= availableCapacity; - const retrieveRetryTokens = (error) => { - if (!hasRetryTokens(error)) { - throw new Error("No retry token available"); - } - const capacityAmount = getCapacityAmount(error); - availableCapacity -= capacityAmount; - return capacityAmount; - }; - const releaseRetryTokens = (capacityReleaseAmount) => { - availableCapacity += capacityReleaseAmount !== null && capacityReleaseAmount !== void 0 ? capacityReleaseAmount : noRetryIncrement; - availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); - }; - return Object.freeze({ - hasRetryTokens, - retrieveRetryTokens, - releaseRetryTokens, - }); -}; -exports.getDefaultRetryQuota = getDefaultRetryQuota; + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } -/***/ }), + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } -/***/ 27233: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + let i; + let len; -"use strict"; + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.defaultDelayDecider = void 0; -const util_retry_1 = __nccwpck_require__(84902); -const defaultDelayDecider = (delayBase, attempts) => Math.floor(Math.min(util_retry_1.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); -exports.defaultDelayDecider = defaultDelayDecider; + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + return false; + } -/***/ }), + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } -/***/ 96039: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } -"use strict"; + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(80155), exports); -tslib_1.__exportStar(__nccwpck_require__(94582), exports); -tslib_1.__exportStar(__nccwpck_require__(58709), exports); -tslib_1.__exportStar(__nccwpck_require__(27233), exports); -tslib_1.__exportStar(__nccwpck_require__(76556), exports); -tslib_1.__exportStar(__nccwpck_require__(67653), exports); -tslib_1.__exportStar(__nccwpck_require__(81434), exports); + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; /***/ }), -/***/ 18977: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 38237: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isStreamingPayload = void 0; -const stream_1 = __nccwpck_require__(12781); -const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof stream_1.Readable || - (typeof ReadableStream !== "undefined" && (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream); -exports.isStreamingPayload = isStreamingPayload; +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = __nccwpck_require__(28222); +} else { + module.exports = __nccwpck_require__(35332); +} /***/ }), -/***/ 76556: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 35332: +/***/ ((module, exports, __nccwpck_require__) => { -"use strict"; +/** + * Module dependencies. + */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOmitRetryHeadersPlugin = exports.omitRetryHeadersMiddlewareOptions = exports.omitRetryHeadersMiddleware = void 0; -const protocol_http_1 = __nccwpck_require__(64418); -const util_retry_1 = __nccwpck_require__(84902); -const omitRetryHeadersMiddleware = () => (next) => async (args) => { - const { request } = args; - if (protocol_http_1.HttpRequest.isInstance(request)) { - delete request.headers[util_retry_1.INVOCATION_ID_HEADER]; - delete request.headers[util_retry_1.REQUEST_HEADER]; - } - return next(args); -}; -exports.omitRetryHeadersMiddleware = omitRetryHeadersMiddleware; -exports.omitRetryHeadersMiddlewareOptions = { - name: "omitRetryHeadersMiddleware", - tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], - relation: "before", - toMiddleware: "awsAuthMiddleware", - override: true, -}; -const getOmitRetryHeadersPlugin = (options) => ({ - applyToStack: (clientStack) => { - clientStack.addRelativeTo((0, exports.omitRetryHeadersMiddleware)(), exports.omitRetryHeadersMiddlewareOptions); - }, -}); -exports.getOmitRetryHeadersPlugin = getOmitRetryHeadersPlugin; +const tty = __nccwpck_require__(76224); +const util = __nccwpck_require__(73837); +/** + * This is the Node.js implementation of `debug()`. + */ -/***/ }), +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); -/***/ 67653: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/** + * Colors. + */ -"use strict"; +exports.colors = [6, 2, 3, 4, 5, 1]; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.defaultRetryDecider = void 0; -const service_error_classification_1 = __nccwpck_require__(6375); -const defaultRetryDecider = (error) => { - if (!error) { - return false; - } - return (0, service_error_classification_1.isRetryableByTrait)(error) || (0, service_error_classification_1.isClockSkewError)(error) || (0, service_error_classification_1.isThrottlingError)(error) || (0, service_error_classification_1.isTransientError)(error); -}; -exports.defaultRetryDecider = defaultRetryDecider; +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = __nccwpck_require__(59318); + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} -/***/ }), +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ -/***/ 81434: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); -"use strict"; + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getRetryAfterHint = exports.getRetryPlugin = exports.retryMiddlewareOptions = exports.retryMiddleware = void 0; -const protocol_http_1 = __nccwpck_require__(64418); -const service_error_classification_1 = __nccwpck_require__(6375); -const smithy_client_1 = __nccwpck_require__(63570); -const util_retry_1 = __nccwpck_require__(84902); -const uuid_1 = __nccwpck_require__(7761); -const isStreamingPayload_1 = __nccwpck_require__(18977); -const util_1 = __nccwpck_require__(42827); -const retryMiddleware = (options) => (next, context) => async (args) => { - var _a; - let retryStrategy = await options.retryStrategy(); - const maxAttempts = await options.maxAttempts(); - if (isRetryStrategyV2(retryStrategy)) { - retryStrategy = retryStrategy; - let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); - let lastError = new Error(); - let attempts = 0; - let totalRetryDelay = 0; - const { request } = args; - const isRequest = protocol_http_1.HttpRequest.isInstance(request); - if (isRequest) { - request.headers[util_retry_1.INVOCATION_ID_HEADER] = (0, uuid_1.v4)(); - } - while (true) { - try { - if (isRequest) { - request.headers[util_retry_1.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; - } - const { response, output } = await next(args); - retryStrategy.recordSuccess(retryToken); - output.$metadata.attempts = attempts + 1; - output.$metadata.totalRetryDelay = totalRetryDelay; - return { response, output }; - } - catch (e) { - const retryErrorInfo = getRetryErrorInfo(e); - lastError = (0, util_1.asSdkError)(e); - if (isRequest && (0, isStreamingPayload_1.isStreamingPayload)(request)) { - (_a = (context.logger instanceof smithy_client_1.NoOpLogger ? console : context.logger)) === null || _a === void 0 ? void 0 : _a.warn("An error was encountered in a non-retryable streaming request."); - throw lastError; - } - try { - retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); - } - catch (refreshError) { - if (!lastError.$metadata) { - lastError.$metadata = {}; - } - lastError.$metadata.attempts = attempts + 1; - lastError.$metadata.totalRetryDelay = totalRetryDelay; - throw lastError; - } - attempts = retryToken.getRetryCount(); - const delay = retryToken.getRetryDelay(); - totalRetryDelay += delay; - await new Promise((resolve) => setTimeout(resolve, delay)); - } - } - } - else { - retryStrategy = retryStrategy; - if (retryStrategy === null || retryStrategy === void 0 ? void 0 : retryStrategy.mode) - context.userAgent = [...(context.userAgent || []), ["cfg/retry-mode", retryStrategy.mode]]; - return retryStrategy.retry(next, args); - } -}; -exports.retryMiddleware = retryMiddleware; -const isRetryStrategyV2 = (retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && - typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && - typeof retryStrategy.recordSuccess !== "undefined"; -const getRetryErrorInfo = (error) => { - const errorInfo = { - errorType: getRetryErrorType(error), - }; - const retryAfterHint = (0, exports.getRetryAfterHint)(error.$response); - if (retryAfterHint) { - errorInfo.retryAfterHint = retryAfterHint; - } - return errorInfo; -}; -const getRetryErrorType = (error) => { - if ((0, service_error_classification_1.isThrottlingError)(error)) - return "THROTTLING"; - if ((0, service_error_classification_1.isTransientError)(error)) - return "TRANSIENT"; - if ((0, service_error_classification_1.isServerError)(error)) - return "SERVER_ERROR"; - return "CLIENT_ERROR"; -}; -exports.retryMiddlewareOptions = { - name: "retryMiddleware", - tags: ["RETRY"], - step: "finalizeRequest", - priority: "high", - override: true, -}; -const getRetryPlugin = (options) => ({ - applyToStack: (clientStack) => { - clientStack.add((0, exports.retryMiddleware)(options), exports.retryMiddlewareOptions); - }, -}); -exports.getRetryPlugin = getRetryPlugin; -const getRetryAfterHint = (response) => { - if (!protocol_http_1.HttpResponse.isInstance(response)) - return; - const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); - if (!retryAfterHeaderName) - return; - const retryAfter = response.headers[retryAfterHeaderName]; - const retryAfterSeconds = Number(retryAfter); - if (!Number.isNaN(retryAfterSeconds)) - return new Date(retryAfterSeconds * 1000); - const retryAfterDate = new Date(retryAfter); - return retryAfterDate; -}; -exports.getRetryAfterHint = getRetryAfterHint; + obj[prop] = val; + return obj; +}, {}); +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ -/***/ }), +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} -/***/ 42827: -/***/ ((__unused_webpack_module, exports) => { +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ -"use strict"; +function formatArgs(args) { + const {namespace: name, useColors} = this; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.asSdkError = void 0; -const asSdkError = (error) => { - if (error instanceof Error) - return error; - if (error instanceof Object) - return Object.assign(new Error(), error); - if (typeof error === "string") - return new Error(error); - return new Error(`AWS SDK error wrapper for ${error}`); -}; -exports.asSdkError = asSdkError; + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} -/***/ }), +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} -/***/ 7761: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ -"use strict"; +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ -var _v = _interopRequireDefault(__nccwpck_require__(36310)); +function load() { + return process.env.DEBUG; +} -var _v2 = _interopRequireDefault(__nccwpck_require__(9465)); +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ -var _v3 = _interopRequireDefault(__nccwpck_require__(86001)); +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} -var _v4 = _interopRequireDefault(__nccwpck_require__(38310)); +module.exports = __nccwpck_require__(46243)(exports); -var _nil = _interopRequireDefault(__nccwpck_require__(3436)); +const {formatters} = module.exports; -var _version = _interopRequireDefault(__nccwpck_require__(17780)); +/** + * Map %o to `util.inspect()`, all on a single line. + */ -var _validate = _interopRequireDefault(__nccwpck_require__(66992)); +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; -var _stringify = _interopRequireDefault(__nccwpck_require__(79618)); +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ -var _parse = _interopRequireDefault(__nccwpck_require__(40086)); +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /***/ }), -/***/ 11380: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 961: +/***/ ((module) => { "use strict"; +module.exports = (object, propertyName, fn) => { + const define = value => Object.defineProperty(object, propertyName, {value, enumerable: true, writable: true}); -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } + Object.defineProperty(object, propertyName, { + configurable: true, + enumerable: true, + get() { + const result = fn(); + define(result); + return result; + }, + set(value) { + define(value); + } + }); - return _crypto.default.createHash('md5').update(bytes).digest(); -} + return object; +}; -var _default = md5; -exports["default"] = _default; /***/ }), -/***/ 3436: +/***/ 58932: /***/ ((__unused_webpack_module, exports) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; +Object.defineProperty(exports, "__esModule", ({ value: true })); -/***/ }), +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) -/***/ 40086: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + /* istanbul ignore next */ -"use strict"; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = 'Deprecation'; + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +} -var _validate = _interopRequireDefault(__nccwpck_require__(66992)); +exports.Deprecation = Deprecation; -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } +/***/ }), - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ +/***/ 76599: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ +var stream = __nccwpck_require__(51642) +var eos = __nccwpck_require__(81205) +var inherits = __nccwpck_require__(44124) +var shift = __nccwpck_require__(66121) - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } } -var _default = parse; -exports["default"] = _default; +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} -/***/ }), +var noop = function() {} -/***/ 3194: -/***/ ((__unused_webpack_module, exports) => { +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} -"use strict"; +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) + this._writable = null + this._readable = null + this._readable2 = null -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false -/***/ }), + this.destroyed = false -/***/ 68136: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} -"use strict"; +inherits(Duplexify, stream.Duplex) +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } -let poolPtr = rnds8Pool.length; + if (writable === null || writable === false) { + this.end() + return + } -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) - poolPtr = 0; + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() } - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() + } -/***/ }), + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks -/***/ 46679: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear -"use strict"; + this.uncork() // always uncork setWritable +} +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return + } -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); + var onreadable = function() { + self._forward() } - return _crypto.default.createHash('sha1').update(bytes).digest(); -} + var onend = function() { + self.push(null) + } -var _default = sha1; -exports["default"] = _default; + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } -/***/ }), + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear -/***/ 79618: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + this._forward() +} -"use strict"; +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + var data -var _validate = _interopRequireDefault(__nccwpck_require__(66992)); + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + this._forwarding = false +} -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) } -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() } - return uuid; + this.emit('close') } -var _default = stringify; -exports["default"] = _default; +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() -/***/ }), + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} -/***/ 36310: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} -"use strict"; +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} +module.exports = Duplexify -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _rng = _interopRequireDefault(__nccwpck_require__(68136)); +/***/ }), -var _stringify = _interopRequireDefault(__nccwpck_require__(79618)); +/***/ 11728: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +"use strict"; -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; -let _clockseq; // Previous uuid creation time +var Buffer = (__nccwpck_require__(21867).Buffer); +var getParamBytesForAlg = __nccwpck_require__(30528); -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details +var MAX_OCTET = 0x80, + CLASS_UNIVERSAL = 0, + PRIMITIVE_BIT = 0x20, + TAG_SEQ = 0x10, + TAG_INT = 0x02, + ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), + ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 +function base64Url(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); +function signatureAsBuffer(signature) { + if (Buffer.isBuffer(signature)) { + return signature; + } else if ('string' === typeof signature) { + return Buffer.from(signature, 'base64'); + } - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } + throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +} - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. +function derToJose(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + // the DER encoded param should at most be the param size, plus a padding + // zero, since due to being a signed integer + var maxEncodedParamLength = paramBytes + 1; - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock + var inputLength = signature.length; - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + var offset = 0; + if (signature[offset++] !== ENCODED_TAG_SEQ) { + throw new Error('Could not find expected "seq"'); + } - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + var seqLength = signature[offset++]; + if (seqLength === (MAX_OCTET | 1)) { + seqLength = signature[offset++]; + } - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval + if (inputLength - offset < seqLength) { + throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); + } + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "r"'); + } - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested + var rLength = signature[offset++]; + if (inputLength - offset - 2 < rLength) { + throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); + } - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } + if (maxEncodedParamLength < rLength) { + throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + var rOffset = offset; + offset += rLength; - msecs += 12219292800000; // `time_low` + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "s"'); + } - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` + var sLength = signature[offset++]; - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` + if (inputLength - offset !== sLength) { + throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); + } - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + if (maxEncodedParamLength < sLength) { + throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + var sOffset = offset; + offset += sLength; - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + if (offset !== inputLength) { + throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); + } - b[i++] = clockseq & 0xff; // `node` + var rPadding = paramBytes - rLength, + sPadding = paramBytes - sLength; - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } + var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); - return buf || (0, _stringify.default)(b); -} + for (offset = 0; offset < rPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); -var _default = v1; -exports["default"] = _default; + offset = paramBytes; -/***/ }), + for (var o = offset; offset < o + sPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); -/***/ 9465: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + dst = dst.toString('base64'); + dst = base64Url(dst); -"use strict"; + return dst; +} +function countPadding(buf, start, stop) { + var padding = 0; + while (start + padding < stop && buf[start + padding] === 0) { + ++padding; + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + var needsSign = buf[start + padding] >= MAX_OCTET; + if (needsSign) { + --padding; + } -var _v = _interopRequireDefault(__nccwpck_require__(2568)); + return padding; +} -var _md = _interopRequireDefault(__nccwpck_require__(11380)); +function joseToDer(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + var signatureBytes = signature.length; + if (signatureBytes !== paramBytes * 2) { + throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); + } -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; + var rPadding = countPadding(signature, 0, paramBytes); + var sPadding = countPadding(signature, paramBytes, signature.length); + var rLength = paramBytes - rPadding; + var sLength = paramBytes - sPadding; -/***/ }), + var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; -/***/ 2568: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var shortLength = rsBytes < MAX_OCTET; -"use strict"; + var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); + var offset = 0; + dst[offset++] = ENCODED_TAG_SEQ; + if (shortLength) { + // Bit 8 has value "0" + // bits 7-1 give the length. + dst[offset++] = rsBytes; + } else { + // Bit 8 of first octet has value "1" + // bits 7-1 give the number of additional length octets. + dst[offset++] = MAX_OCTET | 1; + // length, base 256 + dst[offset++] = rsBytes & 0xff; + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = rLength; + if (rPadding < 0) { + dst[offset++] = 0; + offset += signature.copy(dst, offset, 0, paramBytes); + } else { + offset += signature.copy(dst, offset, rPadding, paramBytes); + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = sLength; + if (sPadding < 0) { + dst[offset++] = 0; + signature.copy(dst, offset, paramBytes); + } else { + signature.copy(dst, offset, paramBytes + sPadding); + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; + return dst; +} -var _stringify = _interopRequireDefault(__nccwpck_require__(79618)); +module.exports = { + derToJose: derToJose, + joseToDer: joseToDer +}; -var _parse = _interopRequireDefault(__nccwpck_require__(40086)); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ }), -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape +/***/ 30528: +/***/ ((module) => { - const bytes = []; +"use strict"; - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } - return bytes; +function getParamSize(keySize) { + var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); + return result; } -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); - } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); - } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; +var paramBytesForAlg = { + ES256: getParamSize(256), + ES384: getParamSize(384), + ES512: getParamSize(521) +}; - if (buf) { - offset = offset || 0; +function getParamBytesForAlg(alg) { + var paramBytes = paramBytesForAlg[alg]; + if (paramBytes) { + return paramBytes; + } - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } + throw new Error('Unknown algorithm "' + alg + '"'); +} - return buf; - } +module.exports = getParamBytesForAlg; - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) +/***/ }), - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support +/***/ 81205: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var once = __nccwpck_require__(1223); - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} +var noop = function() {}; -/***/ }), +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; -/***/ 86001: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; -"use strict"; +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; -var _rng = _interopRequireDefault(__nccwpck_require__(68136)); + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; -var _stringify = _interopRequireDefault(__nccwpck_require__(79618)); + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; -function v4(options, buf, offset) { - options = options || {}; + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + var onerror = function(err) { + callback.call(stream, err); + }; + var onclose = function() { + process.nextTick(onclosenexttick); + }; - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; - if (buf) { - offset = offset || 0; + var onrequest = function() { + stream.req.on('finish', onfinish); + }; - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } - return buf; - } + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; - return (0, _stringify.default)(rnds); -} +module.exports = eos; -var _default = v4; -exports["default"] = _default; /***/ }), -/***/ 38310: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 84697: +/***/ ((module, exports) => { "use strict"; +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(2568)); - -var _sha = _interopRequireDefault(__nccwpck_require__(46679)); +Object.defineProperty(exports, "__esModule", ({ value: true })); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); -/***/ }), +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); -/***/ 66992: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} -"use strict"; +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} -var _regex = _interopRequireDefault(__nccwpck_require__(3194)); +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); -} + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, -var _default = validate; -exports["default"] = _default; + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, -/***/ }), + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, -/***/ 17780: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, -"use strict"; + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, -var _validate = _interopRequireDefault(__nccwpck_require__(66992)); + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); - return parseInt(uuid.substr(14, 1), 16); -} + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, -var _default = version; -exports["default"] = _default; + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, -/***/ }), + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, -/***/ 21595: -/***/ ((__unused_webpack_module, exports) => { + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, -"use strict"; + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.deserializerMiddleware = void 0; -const deserializerMiddleware = (options, deserializer) => (next, context) => async (args) => { - const { response } = await next(args); - try { - const parsed = await deserializer(response, options); - return { - response, - output: parsed, - }; - } - catch (error) { - Object.defineProperty(error, "$response", { - value: response, - }); - if (!("$metadata" in error)) { - const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; - error.message += "\n " + hint; - } - throw error; - } -}; -exports.deserializerMiddleware = deserializerMiddleware; + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, -/***/ }), + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, -/***/ 81238: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); -"use strict"; + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(21595), exports); -tslib_1.__exportStar(__nccwpck_require__(72338), exports); -tslib_1.__exportStar(__nccwpck_require__(23566), exports); + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; -/***/ }), +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); -/***/ 72338: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); -"use strict"; + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSerdePlugin = exports.serializerMiddlewareOption = exports.deserializerMiddlewareOption = void 0; -const deserializerMiddleware_1 = __nccwpck_require__(21595); -const serializerMiddleware_1 = __nccwpck_require__(23566); -exports.deserializerMiddlewareOption = { - name: "deserializerMiddleware", - step: "deserialize", - tags: ["DESERIALIZER"], - override: true, -}; -exports.serializerMiddlewareOption = { - name: "serializerMiddleware", - step: "serialize", - tags: ["SERIALIZER"], - override: true, -}; -function getSerdePlugin(config, serializer, deserializer) { +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { return { - applyToStack: (commandStack) => { - commandStack.add((0, deserializerMiddleware_1.deserializerMiddleware)(config, deserializer), exports.deserializerMiddlewareOption); - commandStack.add((0, serializerMiddleware_1.serializerMiddleware)(config, serializer), exports.serializerMiddlewareOption); + get() { + return pd(this).event[key] }, - }; + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } } -exports.getSerdePlugin = getSerdePlugin; - - -/***/ }), -/***/ 23566: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.serializerMiddleware = void 0; -const serializerMiddleware = (options, serializer) => (next, context) => async (args) => { - var _a; - const endpoint = ((_a = context.endpointV2) === null || _a === void 0 ? void 0 : _a.url) && options.urlParser - ? async () => options.urlParser(context.endpointV2.url) - : options.endpoint; - if (!endpoint) { - throw new Error("No valid endpoint provider available."); +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, } - const request = await serializer(args.input, { ...options, endpoint }); - return next({ - ...args, - request, - }); -}; -exports.serializerMiddleware = serializerMiddleware; - +} -/***/ }), +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } -/***/ 2404: -/***/ ((__unused_webpack_module, exports) => { + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } -"use strict"; + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.constructStack = void 0; -const getAllAliases = (name, aliases) => { - const _aliases = []; - if (name) { - _aliases.push(name); - } - if (aliases) { - for (const alias of aliases) { - _aliases.push(alias); + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); } } - return _aliases; -}; -const getMiddlewareNameWithAliases = (name, aliases) => { - return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; -}; -const constructStack = () => { - let absoluteEntries = []; - let relativeEntries = []; - let identifyOnResolve = false; - const entriesNameSet = new Set(); - const sort = (entries) => entries.sort((a, b) => stepWeights[b.step] - stepWeights[a.step] || - priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"]); - const removeByName = (toRemove) => { - let isRemoved = false; - const filterCb = (entry) => { - const aliases = getAllAliases(entry.name, entry.aliases); - if (aliases.includes(toRemove)) { - isRemoved = true; - for (const alias of aliases) { - entriesNameSet.delete(alias); - } - return false; - } - return true; - }; - absoluteEntries = absoluteEntries.filter(filterCb); - relativeEntries = relativeEntries.filter(filterCb); - return isRemoved; - }; - const removeByReference = (toRemove) => { - let isRemoved = false; - const filterCb = (entry) => { - if (entry.middleware === toRemove) { - isRemoved = true; - for (const alias of getAllAliases(entry.name, entry.aliases)) { - entriesNameSet.delete(alias); - } - return false; - } - return true; - }; - absoluteEntries = absoluteEntries.filter(filterCb); - relativeEntries = relativeEntries.filter(filterCb); - return isRemoved; - }; - const cloneTo = (toStack) => { - var _a; - absoluteEntries.forEach((entry) => { - toStack.add(entry.middleware, { ...entry }); - }); - relativeEntries.forEach((entry) => { - toStack.addRelativeTo(entry.middleware, { ...entry }); - }); - (_a = toStack.identifyOnResolve) === null || _a === void 0 ? void 0 : _a.call(toStack, stack.identifyOnResolve()); - return toStack; - }; - const expandRelativeMiddlewareList = (from) => { - const expandedMiddlewareList = []; - from.before.forEach((entry) => { - if (entry.before.length === 0 && entry.after.length === 0) { - expandedMiddlewareList.push(entry); - } - else { - expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); - } - }); - expandedMiddlewareList.push(from); - from.after.reverse().forEach((entry) => { - if (entry.before.length === 0 && entry.after.length === 0) { - expandedMiddlewareList.push(entry); - } - else { - expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); - } - }); - return expandedMiddlewareList; - }; - const getMiddlewareList = (debug = false) => { - const normalizedAbsoluteEntries = []; - const normalizedRelativeEntries = []; - const normalizedEntriesNameMap = {}; - absoluteEntries.forEach((entry) => { - const normalizedEntry = { - ...entry, - before: [], - after: [], - }; - for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { - normalizedEntriesNameMap[alias] = normalizedEntry; - } - normalizedAbsoluteEntries.push(normalizedEntry); - }); - relativeEntries.forEach((entry) => { - const normalizedEntry = { - ...entry, - before: [], - after: [], - }; - for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { - normalizedEntriesNameMap[alias] = normalizedEntry; - } - normalizedRelativeEntries.push(normalizedEntry); - }); - normalizedRelativeEntries.forEach((entry) => { - if (entry.toMiddleware) { - const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; - if (toMiddleware === undefined) { - if (debug) { - return; - } - throw new Error(`${entry.toMiddleware} is not found when adding ` + - `${getMiddlewareNameWithAliases(entry.name, entry.aliases)} ` + - `middleware ${entry.relation} ${entry.toMiddleware}`); - } - if (entry.relation === "after") { - toMiddleware.after.push(entry); - } - if (entry.relation === "before") { - toMiddleware.before.push(entry); + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener } + node = node.next; } - }); - const mainChain = sort(normalizedAbsoluteEntries) - .map(expandRelativeMiddlewareList) - .reduce((wholeList, expandedMiddlewareList) => { - wholeList.push(...expandedMiddlewareList); - return wholeList; - }, []); - return mainChain; - }; - const stack = { - add: (middleware, options = {}) => { - const { name, override, aliases: _aliases } = options; - const entry = { - step: "initialize", - priority: "normal", - middleware, - ...options, - }; - const aliases = getAllAliases(name, _aliases); - if (aliases.length > 0) { - if (aliases.some((alias) => entriesNameSet.has(alias))) { - if (!override) - throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); - for (const alias of aliases) { - const toOverrideIndex = absoluteEntries.findIndex((entry) => { var _a; return entry.name === alias || ((_a = entry.aliases) === null || _a === void 0 ? void 0 : _a.some((a) => a === alias)); }); - if (toOverrideIndex === -1) { - continue; - } - const toOverride = absoluteEntries[toOverrideIndex]; - if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { - throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ` + - `${toOverride.priority} priority in ${toOverride.step} step cannot ` + - `be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ` + - `${entry.priority} priority in ${entry.step} step.`); - } - absoluteEntries.splice(toOverrideIndex, 1); + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); } + } else { + prev = node; } - for (const alias of aliases) { - entriesNameSet.add(alias); + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; } } - absoluteEntries.push(entry); }, - addRelativeTo: (middleware, options) => { - const { name, override, aliases: _aliases } = options; - const entry = { - middleware, - ...options, - }; - const aliases = getAllAliases(name, _aliases); - if (aliases.length > 0) { - if (aliases.some((alias) => entriesNameSet.has(alias))) { - if (!override) - throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); - for (const alias of aliases) { - const toOverrideIndex = relativeEntries.findIndex((entry) => { var _a; return entry.name === alias || ((_a = entry.aliases) === null || _a === void 0 ? void 0 : _a.some((a) => a === alias)); }); - if (toOverrideIndex === -1) { - continue; - } - const toOverride = relativeEntries[toOverrideIndex]; - if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { - throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ` + - `${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden ` + - `by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} ` + - `"${entry.toMiddleware}" middleware.`); - } - relativeEntries.splice(toOverrideIndex, 1); - } + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); } - for (const alias of aliases) { - entriesNameSet.add(alias); + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); } + } else { + prev = node; } - relativeEntries.push(entry); - }, - clone: () => cloneTo((0, exports.constructStack)()), - use: (plugin) => { - plugin.applyToStack(stack); - }, - remove: (toRemove) => { - if (typeof toRemove === "string") - return removeByName(toRemove); - else - return removeByReference(toRemove); - }, - removeByTag: (toRemove) => { - let isRemoved = false; - const filterCb = (entry) => { - const { tags, name, aliases: _aliases } = entry; - if (tags && tags.includes(toRemove)) { - const aliases = getAllAliases(name, _aliases); - for (const alias of aliases) { - entriesNameSet.delete(alias); + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); } - isRemoved = true; - return false; } - return true; - }; - absoluteEntries = absoluteEntries.filter(filterCb); - relativeEntries = relativeEntries.filter(filterCb); - return isRemoved; - }, - concat: (from) => { - var _a, _b; - const cloned = cloneTo((0, exports.constructStack)()); - cloned.use(from); - cloned.identifyOnResolve(identifyOnResolve || cloned.identifyOnResolve() || ((_b = (_a = from.identifyOnResolve) === null || _a === void 0 ? void 0 : _a.call(from)) !== null && _b !== void 0 ? _b : false)); - return cloned; - }, - applyToStack: cloneTo, - identify: () => { - return getMiddlewareList(true).map((mw) => { - var _a; - const step = (_a = mw.step) !== null && _a !== void 0 ? _a : mw.relation + - " " + - mw.toMiddleware; - return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; - }); - }, - identifyOnResolve(toggle) { - if (typeof toggle === "boolean") - identifyOnResolve = toggle; - return identifyOnResolve; - }, - resolve: (handler, context) => { - for (const middleware of getMiddlewareList() - .map((entry) => entry.middleware) - .reverse()) { - handler = middleware(handler, context); + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); } - if (identifyOnResolve) { - console.log(stack.identify()); + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break } - return handler; - }, - }; - return stack; -}; -exports.constructStack = constructStack; -const stepWeights = { - initialize: 5, - serialize: 4, - build: 3, - finalizeRequest: 2, - deserialize: 1, -}; -const priorityWeights = { - high: 3, - normal: 2, - low: 1, + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, }; +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports["default"] = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map + /***/ }), -/***/ 97911: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 38171: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(2404), exports); +var hasOwn = Object.prototype.hasOwnProperty; +var toStr = Object.prototype.toString; +var defineProperty = Object.defineProperty; +var gOPD = Object.getOwnPropertyDescriptor; -/***/ }), +var isArray = function isArray(arr) { + if (typeof Array.isArray === 'function') { + return Array.isArray(arr); + } -/***/ 54766: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return toStr.call(arr) === '[object Array]'; +}; -"use strict"; +var isPlainObject = function isPlainObject(obj) { + if (!obj || toStr.call(obj) !== '[object Object]') { + return false; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.loadConfig = void 0; -const property_provider_1 = __nccwpck_require__(79721); -const fromEnv_1 = __nccwpck_require__(15606); -const fromSharedConfigFiles_1 = __nccwpck_require__(45784); -const fromStatic_1 = __nccwpck_require__(23091); -const loadConfig = ({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, property_provider_1.memoize)((0, property_provider_1.chain)((0, fromEnv_1.fromEnv)(environmentVariableSelector), (0, fromSharedConfigFiles_1.fromSharedConfigFiles)(configFileSelector, configuration), (0, fromStatic_1.fromStatic)(defaultValue))); -exports.loadConfig = loadConfig; + var hasOwnConstructor = hasOwn.call(obj, 'constructor'); + var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); + // Not own constructor property must be Object + if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { + return false; + } + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + var key; + for (key in obj) { /**/ } -/***/ }), + return typeof key === 'undefined' || hasOwn.call(obj, key); +}; -/***/ 15606: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target +var setProperty = function setProperty(target, options) { + if (defineProperty && options.name === '__proto__') { + defineProperty(target, options.name, { + enumerable: true, + configurable: true, + value: options.newValue, + writable: true + }); + } else { + target[options.name] = options.newValue; + } +}; -"use strict"; +// Return undefined instead of __proto__ if '__proto__' is not an own property +var getProperty = function getProperty(obj, name) { + if (name === '__proto__') { + if (!hasOwn.call(obj, name)) { + return void 0; + } else if (gOPD) { + // In early versions of node, obj['__proto__'] is buggy when obj has + // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. + return gOPD(obj, name).value; + } + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromEnv = void 0; -const property_provider_1 = __nccwpck_require__(79721); -const fromEnv = (envVarSelector) => async () => { - try { - const config = envVarSelector(process.env); - if (config === undefined) { - throw new Error(); - } - return config; - } - catch (e) { - throw new property_provider_1.CredentialsProviderError(e.message || `Cannot load config from environment variables with getter: ${envVarSelector}`); - } + return obj[name]; +}; + +module.exports = function extend() { + var options, name, src, copy, copyIsArray, clone; + var target = arguments[0]; + var i = 1; + var length = arguments.length; + var deep = false; + + // Handle a deep copy situation + if (typeof target === 'boolean') { + deep = target; + target = arguments[1] || {}; + // skip the boolean and the target + i = 2; + } + if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { + target = {}; + } + + for (; i < length; ++i) { + options = arguments[i]; + // Only deal with non-null/undefined values + if (options != null) { + // Extend the base object + for (name in options) { + src = getProperty(target, name); + copy = getProperty(options, name); + + // Prevent never-ending loop + if (target !== copy) { + // Recurse if we're merging plain objects or arrays + if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { + if (copyIsArray) { + copyIsArray = false; + clone = src && isArray(src) ? src : []; + } else { + clone = src && isPlainObject(src) ? src : {}; + } + + // Never move original objects, clone them + setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); + + // Don't bring in undefined values + } else if (typeof copy !== 'undefined') { + setProperty(target, { name: name, newValue: copy }); + } + } + } + } + } + + // Return the modified object + return target; }; -exports.fromEnv = fromEnv; /***/ }), -/***/ 45784: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 28206: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromSharedConfigFiles = void 0; -const property_provider_1 = __nccwpck_require__(79721); -const shared_ini_file_loader_1 = __nccwpck_require__(43507); -const fromSharedConfigFiles = (configSelector, { preferredFile = "config", ...init } = {}) => async () => { - const profile = (0, shared_ini_file_loader_1.getProfileName)(init); - const { configFile, credentialsFile } = await (0, shared_ini_file_loader_1.loadSharedConfigFiles)(init); - const profileFromCredentials = credentialsFile[profile] || {}; - const profileFromConfig = configFile[profile] || {}; - const mergedProfile = preferredFile === "config" - ? { ...profileFromCredentials, ...profileFromConfig } - : { ...profileFromConfig, ...profileFromCredentials }; - try { - const cfgFile = preferredFile === "config" ? configFile : credentialsFile; - const configValue = configSelector(mergedProfile, cfgFile); - if (configValue === undefined) { - throw new Error(); - } - return configValue; + +// do not edit .js files directly - edit src/index.jst + + + +module.exports = function equal(a, b) { + if (a === b) return true; + + if (a && b && typeof a == 'object' && typeof b == 'object') { + if (a.constructor !== b.constructor) return false; + + var length, i, keys; + if (Array.isArray(a)) { + length = a.length; + if (length != b.length) return false; + for (i = length; i-- !== 0;) + if (!equal(a[i], b[i])) return false; + return true; } - catch (e) { - throw new property_provider_1.CredentialsProviderError(e.message || `Cannot load config for profile ${profile} in SDK configuration files with getter: ${configSelector}`); + + + + if (a.constructor === RegExp) return a.source === b.source && a.flags === b.flags; + if (a.valueOf !== Object.prototype.valueOf) return a.valueOf() === b.valueOf(); + if (a.toString !== Object.prototype.toString) return a.toString() === b.toString(); + + keys = Object.keys(a); + length = keys.length; + if (length !== Object.keys(b).length) return false; + + for (i = length; i-- !== 0;) + if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false; + + for (i = length; i-- !== 0;) { + var key = keys[i]; + + if (!equal(a[key], b[key])) return false; } + + return true; + } + + // true if both NaN, false otherwise + return a!==a && b!==b; }; -exports.fromSharedConfigFiles = fromSharedConfigFiles; /***/ }), -/***/ 23091: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 30969: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromStatic = void 0; -const property_provider_1 = __nccwpck_require__(79721); -const isFunction = (func) => typeof func === "function"; -const fromStatic = (defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, property_provider_1.fromStatic)(defaultValue); -exports.fromStatic = fromStatic; +module.exports = function (data, opts) { + if (!opts) opts = {}; + if (typeof opts === 'function') opts = { cmp: opts }; + var cycles = (typeof opts.cycles === 'boolean') ? opts.cycles : false; -/***/ }), + var cmp = opts.cmp && (function (f) { + return function (node) { + return function (a, b) { + var aobj = { key: a, value: node[a] }; + var bobj = { key: b, value: node[b] }; + return f(aobj, bobj); + }; + }; + })(opts.cmp); -/***/ 33461: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var seen = []; + return (function stringify (node) { + if (node && node.toJSON && typeof node.toJSON === 'function') { + node = node.toJSON(); + } -"use strict"; + if (node === undefined) return; + if (typeof node == 'number') return isFinite(node) ? '' + node : 'null'; + if (typeof node !== 'object') return JSON.stringify(node); -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(54766), exports); + var i, out; + if (Array.isArray(node)) { + out = '['; + for (i = 0; i < node.length; i++) { + if (i) out += ','; + out += stringify(node[i]) || 'null'; + } + return out + ']'; + } + + if (node === null) return 'null'; + + if (seen.indexOf(node) !== -1) { + if (cycles) return JSON.stringify('__cycle__'); + throw new TypeError('Converting circular structure to JSON'); + } + + var seenIndex = seen.push(node) - 1; + var keys = Object.keys(node).sort(cmp && cmp(node)); + out = ''; + for (i = 0; i < keys.length; i++) { + var key = keys[i]; + var value = stringify(node[key]); + + if (!value) continue; + if (out) out += ','; + out += JSON.stringify(key) + ':' + value; + } + seen.splice(seenIndex, 1); + return '{' + out + '}'; + })(data); +}; /***/ }), -/***/ 33946: -/***/ ((__unused_webpack_module, exports) => { +/***/ 12603: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NODEJS_TIMEOUT_ERROR_CODES = void 0; -exports.NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; +const validator = __nccwpck_require__(61739); +const XMLParser = __nccwpck_require__(42380); +const XMLBuilder = __nccwpck_require__(80660); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} /***/ }), -/***/ 70508: +/***/ 38280: /***/ ((__unused_webpack_module, exports) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getTransformedHeaders = void 0; -const getTransformedHeaders = (headers) => { - const transformedHeaders = {}; - for (const name of Object.keys(headers)) { - const headerValues = headers[name]; - transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); } - return transformedHeaders; + matches.push(allmatches); + match = regex.exec(string); + } + return matches; }; -exports.getTransformedHeaders = getTransformedHeaders; +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; -/***/ }), +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; -/***/ 20258: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; -"use strict"; +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(96948), exports); -tslib_1.__exportStar(__nccwpck_require__(46999), exports); -tslib_1.__exportStar(__nccwpck_require__(81030), exports); +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; /***/ }), -/***/ 96948: +/***/ 61739: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NodeHttpHandler = exports.DEFAULT_REQUEST_TIMEOUT = void 0; -const protocol_http_1 = __nccwpck_require__(64418); -const querystring_builder_1 = __nccwpck_require__(32768); -const http_1 = __nccwpck_require__(13685); -const https_1 = __nccwpck_require__(95687); -const constants_1 = __nccwpck_require__(33946); -const get_transformed_headers_1 = __nccwpck_require__(70508); -const set_connection_timeout_1 = __nccwpck_require__(25545); -const set_socket_keep_alive_1 = __nccwpck_require__(83751); -const set_socket_timeout_1 = __nccwpck_require__(42618); -const write_request_body_1 = __nccwpck_require__(73766); -exports.DEFAULT_REQUEST_TIMEOUT = 0; -class NodeHttpHandler { - static create(instanceOrOptions) { - if (typeof (instanceOrOptions === null || instanceOrOptions === void 0 ? void 0 : instanceOrOptions.handle) === "function") { - return instanceOrOptions; - } - return new NodeHttpHandler(instanceOrOptions); - } - constructor(options) { - this.metadata = { handlerProtocol: "http/1.1" }; - this.configProvider = new Promise((resolve, reject) => { - if (typeof options === "function") { - options() - .then((_options) => { - resolve(this.resolveDefaultConfig(_options)); - }) - .catch(reject); - } - else { - resolve(this.resolveDefaultConfig(options)); - } - }); - } - resolveDefaultConfig(options) { - const { requestTimeout, connectionTimeout, socketTimeout, httpAgent, httpsAgent } = options || {}; - const keepAlive = true; - const maxSockets = 50; - return { - connectionTimeout, - requestTimeout: requestTimeout !== null && requestTimeout !== void 0 ? requestTimeout : socketTimeout, - httpAgent: httpAgent || new http_1.Agent({ keepAlive, maxSockets }), - httpsAgent: httpsAgent || new https_1.Agent({ keepAlive, maxSockets }), - }; - } - destroy() { - var _a, _b, _c, _d; - (_b = (_a = this.config) === null || _a === void 0 ? void 0 : _a.httpAgent) === null || _b === void 0 ? void 0 : _b.destroy(); - (_d = (_c = this.config) === null || _c === void 0 ? void 0 : _c.httpsAgent) === null || _d === void 0 ? void 0 : _d.destroy(); - } - async handle(request, { abortSignal } = {}) { - if (!this.config) { - this.config = await this.configProvider; - } - return new Promise((_resolve, _reject) => { - var _a, _b; - let writeRequestBodyPromise = undefined; - const resolve = async (arg) => { - await writeRequestBodyPromise; - _resolve(arg); - }; - const reject = async (arg) => { - await writeRequestBodyPromise; - _reject(arg); - }; - if (!this.config) { - throw new Error("Node HTTP request handler config is not resolved"); - } - if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { - const abortError = new Error("Request aborted"); - abortError.name = "AbortError"; - reject(abortError); - return; - } - const isSSL = request.protocol === "https:"; - const queryString = (0, querystring_builder_1.buildQueryString)(request.query || {}); - let auth = undefined; - if (request.username != null || request.password != null) { - const username = (_a = request.username) !== null && _a !== void 0 ? _a : ""; - const password = (_b = request.password) !== null && _b !== void 0 ? _b : ""; - auth = `${username}:${password}`; - } - let path = request.path; - if (queryString) { - path += `?${queryString}`; - } - if (request.fragment) { - path += `#${request.fragment}`; - } - const nodeHttpsOptions = { - headers: request.headers, - host: request.hostname, - method: request.method, - path, - port: request.port, - agent: isSSL ? this.config.httpsAgent : this.config.httpAgent, - auth, - }; - const requestFunc = isSSL ? https_1.request : http_1.request; - const req = requestFunc(nodeHttpsOptions, (res) => { - const httpResponse = new protocol_http_1.HttpResponse({ - statusCode: res.statusCode || -1, - reason: res.statusMessage, - headers: (0, get_transformed_headers_1.getTransformedHeaders)(res.headers), - body: res, - }); - resolve({ response: httpResponse }); - }); - req.on("error", (err) => { - if (constants_1.NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { - reject(Object.assign(err, { name: "TimeoutError" })); - } - else { - reject(err); - } - }); - (0, set_connection_timeout_1.setConnectionTimeout)(req, reject, this.config.connectionTimeout); - (0, set_socket_timeout_1.setSocketTimeout)(req, reject, this.config.requestTimeout); - if (abortSignal) { - abortSignal.onabort = () => { - req.abort(); - const abortError = new Error("Request aborted"); - abortError.name = "AbortError"; - reject(abortError); - }; - } - const httpAgent = nodeHttpsOptions.agent; - if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { - (0, set_socket_keep_alive_1.setSocketKeepAlive)(req, { - keepAlive: httpAgent.keepAlive, - keepAliveMsecs: httpAgent.keepAliveMsecs, - }); - } - writeRequestBodyPromise = (0, write_request_body_1.writeRequestBody)(req, request, this.config.requestTimeout).catch(_reject); - }); - } - updateHttpClientConfig(key, value) { - this.config = undefined; - this.configProvider = this.configProvider.then((config) => { - return { - ...config, - [key]: value, - }; - }); - } - httpHandlerConfigs() { - var _a; - return (_a = this.config) !== null && _a !== void 0 ? _a : {}; - } -} -exports.NodeHttpHandler = NodeHttpHandler; +const util = __nccwpck_require__(38280); -/***/ }), +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; -/***/ 5771: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); -"use strict"; + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NodeHttp2ConnectionManager = void 0; -const tslib_1 = __nccwpck_require__(4351); -const http2_1 = tslib_1.__importDefault(__nccwpck_require__(85158)); -const node_http2_connection_pool_1 = __nccwpck_require__(95157); -class NodeHttp2ConnectionManager { - constructor(config) { - this.sessionCache = new Map(); - this.config = config; - if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { - throw new RangeError("maxConcurrency must be greater than zero."); + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; } - } - lease(requestContext, connectionConfiguration) { - const url = this.getUrlString(requestContext); - const existingPool = this.sessionCache.get(url); - if (existingPool) { - const existingSession = existingPool.poll(); - if (existingSession && !this.config.disableConcurrency) { - return existingSession; - } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; } - const session = http2_1.default.connect(url); - if (this.config.maxConcurrency) { - session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { - if (err) { - throw new Error("Fail to set maxConcurrentStreams to " + - this.config.maxConcurrency + - "when creating new session for " + - requestContext.destination.toString()); - } - }); + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; } - session.unref(); - const destroySessionCb = () => { - session.destroy(); - this.deleteSession(url, session); - }; - session.on("goaway", destroySessionCb); - session.on("error", destroySessionCb); - session.on("frameError", destroySessionCb); - session.on("close", () => this.deleteSession(url, session)); - if (connectionConfiguration.requestTimeout) { - session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); } - const connectionPool = this.sessionCache.get(url) || new node_http2_connection_pool_1.NodeHttp2ConnectionPool(); - connectionPool.offerLast(session); - this.sessionCache.set(url, connectionPool); - return session; - } - deleteSession(authority, session) { - const existingConnectionPool = this.sessionCache.get(authority); - if (!existingConnectionPool) { - return; + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); } - if (!existingConnectionPool.contains(session)) { - return; + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; } - existingConnectionPool.remove(session); - this.sessionCache.set(authority, existingConnectionPool); - } - release(requestContext, session) { - var _a; - const cacheKey = this.getUrlString(requestContext); - (_a = this.sessionCache.get(cacheKey)) === null || _a === void 0 ? void 0 : _a.offerLast(session); - } - destroy() { - for (const [key, connectionPool] of this.sessionCache) { - for (const session of connectionPool) { - if (!session.destroyed) { - session.destroy(); - } - connectionPool.remove(session); + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; } - this.sessionCache.delete(key); + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); } - setMaxConcurrentStreams(maxConcurrentStreams) { - if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { - throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; } - this.config.maxConcurrency = maxConcurrentStreams; + } } - setDisableConcurrentStreams(disableConcurrentStreams) { - this.config.disableConcurrency = disableConcurrentStreams; + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } } - getUrlString(request) { - return request.destination.toString(); + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; } -exports.NodeHttp2ConnectionManager = NodeHttp2ConnectionManager; +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); -/***/ }), +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" -/***/ 95157: -/***/ ((__unused_webpack_module, exports) => { +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); -"use strict"; + //if(attrStr.trim().length === 0) return true; //empty string -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NodeHttp2ConnectionPool = void 0; -class NodeHttp2ConnectionPool { - constructor(sessions) { - this.sessions = []; - this.sessions = sessions !== null && sessions !== void 0 ? sessions : []; - } - poll() { - if (this.sessions.length > 0) { - return this.sessions.shift(); - } - } - offerLast(session) { - this.sessions.push(session); - } - contains(session) { - return this.sessions.includes(session); - } - remove(session) { - this.sessions = this.sessions.filter((s) => s !== session); + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); } - [Symbol.iterator]() { - return this.sessions[Symbol.iterator](); + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); } - destroy(connection) { - for (const session of this.sessions) { - if (session === connection) { - if (!session.destroyed) { - session.destroy(); - } - } - } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); } -} -exports.NodeHttp2ConnectionPool = NodeHttp2ConnectionPool; + } + return true; +} -/***/ }), +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} -/***/ 46999: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} -"use strict"; +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NodeHttp2Handler = void 0; -const protocol_http_1 = __nccwpck_require__(64418); -const querystring_builder_1 = __nccwpck_require__(32768); -const http2_1 = __nccwpck_require__(85158); -const get_transformed_headers_1 = __nccwpck_require__(70508); -const node_http2_connection_manager_1 = __nccwpck_require__(5771); -const write_request_body_1 = __nccwpck_require__(73766); -class NodeHttp2Handler { - static create(instanceOrOptions) { - if (typeof (instanceOrOptions === null || instanceOrOptions === void 0 ? void 0 : instanceOrOptions.handle) === "function") { - return instanceOrOptions; - } - return new NodeHttp2Handler(instanceOrOptions); - } - constructor(options) { - this.metadata = { handlerProtocol: "h2" }; - this.connectionManager = new node_http2_connection_manager_1.NodeHttp2ConnectionManager({}); - this.configProvider = new Promise((resolve, reject) => { - if (typeof options === "function") { - options() - .then((opts) => { - resolve(opts || {}); - }) - .catch(reject); - } - else { - resolve(options || {}); - } - }); - } - destroy() { - this.connectionManager.destroy(); - } - async handle(request, { abortSignal } = {}) { - if (!this.config) { - this.config = await this.configProvider; - this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); - if (this.config.maxConcurrentStreams) { - this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); - } - } - const { requestTimeout, disableConcurrentStreams } = this.config; - return new Promise((_resolve, _reject) => { - var _a, _b, _c; - let fulfilled = false; - let writeRequestBodyPromise = undefined; - const resolve = async (arg) => { - await writeRequestBodyPromise; - _resolve(arg); - }; - const reject = async (arg) => { - await writeRequestBodyPromise; - _reject(arg); - }; - if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { - fulfilled = true; - const abortError = new Error("Request aborted"); - abortError.name = "AbortError"; - reject(abortError); - return; - } - const { hostname, method, port, protocol, query } = request; - let auth = ""; - if (request.username != null || request.password != null) { - const username = (_a = request.username) !== null && _a !== void 0 ? _a : ""; - const password = (_b = request.password) !== null && _b !== void 0 ? _b : ""; - auth = `${username}:${password}@`; - } - const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; - const requestContext = { destination: new URL(authority) }; - const session = this.connectionManager.lease(requestContext, { - requestTimeout: (_c = this.config) === null || _c === void 0 ? void 0 : _c.sessionTimeout, - disableConcurrentStreams: disableConcurrentStreams || false, - }); - const rejectWithDestroy = (err) => { - if (disableConcurrentStreams) { - this.destroySession(session); - } - fulfilled = true; - reject(err); - }; - const queryString = (0, querystring_builder_1.buildQueryString)(query || {}); - let path = request.path; - if (queryString) { - path += `?${queryString}`; - } - if (request.fragment) { - path += `#${request.fragment}`; - } - const req = session.request({ - ...request.headers, - [http2_1.constants.HTTP2_HEADER_PATH]: path, - [http2_1.constants.HTTP2_HEADER_METHOD]: method, - }); - session.ref(); - req.on("response", (headers) => { - const httpResponse = new protocol_http_1.HttpResponse({ - statusCode: headers[":status"] || -1, - headers: (0, get_transformed_headers_1.getTransformedHeaders)(headers), - body: req, - }); - fulfilled = true; - resolve({ response: httpResponse }); - if (disableConcurrentStreams) { - session.close(); - this.connectionManager.deleteSession(authority, session); - } - }); - if (requestTimeout) { - req.setTimeout(requestTimeout, () => { - req.close(); - const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); - timeoutError.name = "TimeoutError"; - rejectWithDestroy(timeoutError); - }); - } - if (abortSignal) { - abortSignal.onabort = () => { - req.close(); - const abortError = new Error("Request aborted"); - abortError.name = "AbortError"; - rejectWithDestroy(abortError); - }; - } - req.on("frameError", (type, code, id) => { - rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); - }); - req.on("error", rejectWithDestroy); - req.on("aborted", () => { - rejectWithDestroy(new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`)); - }); - req.on("close", () => { - session.unref(); - if (disableConcurrentStreams) { - session.destroy(); - } - if (!fulfilled) { - rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); - } - }); - writeRequestBodyPromise = (0, write_request_body_1.writeRequestBody)(req, request, requestTimeout); - }); - } - updateHttpClientConfig(key, value) { - this.config = undefined; - this.configProvider = this.configProvider.then((config) => { - return { - ...config, - [key]: value, - }; - }); - } - httpHandlerConfigs() { - var _a; - return (_a = this.config) !== null && _a !== void 0 ? _a : {}; - } - destroySession(session) { - if (!session.destroyed) { - session.destroy(); - } - } +function validateAttrName(attrName) { + return util.isName(attrName); } -exports.NodeHttp2Handler = NodeHttp2Handler; +// const startsWithXML = /^xml/i; -/***/ }), +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} -/***/ 25545: -/***/ ((__unused_webpack_module, exports) => { +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, -"use strict"; + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.setConnectionTimeout = void 0; -const setConnectionTimeout = (request, reject, timeoutInMs = 0) => { - if (!timeoutInMs) { - return; - } - const timeoutId = setTimeout(() => { - request.destroy(); - reject(Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { - name: "TimeoutError", - })); - }, timeoutInMs); - request.on("socket", (socket) => { - if (socket.connecting) { - socket.on("connect", () => { - clearTimeout(timeoutId); - }); - } - else { - clearTimeout(timeoutId); - } - }); -}; -exports.setConnectionTimeout = setConnectionTimeout; +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} /***/ }), -/***/ 83751: -/***/ ((__unused_webpack_module, exports) => { +/***/ 80660: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.setSocketKeepAlive = void 0; -const setSocketKeepAlive = (request, { keepAlive, keepAliveMsecs }) => { - if (keepAlive !== true) { - return; - } - request.on("socket", (socket) => { - socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); - }); -}; -exports.setSocketKeepAlive = setSocketKeepAlive; +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(72462); +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; -/***/ }), +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } -/***/ 42618: -/***/ ((__unused_webpack_module, exports) => { + this.processTextOrObjNode = processTextOrObjNode -"use strict"; + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.setSocketTimeout = void 0; -const setSocketTimeout = (request, reject, timeoutInMs = 0) => { - request.setTimeout(timeoutInMs, () => { - request.destroy(); - reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); - }); +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; + } }; -exports.setSocketTimeout = setSocketTimeout; +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if (typeof jObj[key] === 'undefined') { + // supress undefined node + } else if (jObj[key] === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup ){ + listTagVal += this.j2x(item, level + 1).val; + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) + } + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, '', level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) + } + } + } + return {attrStr: attrStr, val: val}; +}; -/***/ }), +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} -/***/ 23211: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} -"use strict"; +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Collector = void 0; -const stream_1 = __nccwpck_require__(12781); -class Collector extends stream_1.Writable { - constructor() { - super(...arguments); - this.bufferedBytes = []; + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); } + } } -exports.Collector = Collector; +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `> { +Builder.prototype.buildTextValNode = function(val, key, attrStr, level) { + if (this.options.cdataPropName !== false && key === this.options.cdataPropName) { + return this.indentate(level) + `` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i new Promise((resolve, reject) => { - const collector = new collector_1.Collector(); - stream.pipe(collector); - stream.on("error", (err) => { - collector.end(); - reject(err); - }); - collector.on("error", reject); - collector.on("finish", function () { - const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); - resolve(bytes); - }); -}); -exports.streamCollector = streamCollector; +function indentate(level) { + return this.options.indentBy.repeat(level); +} + +function isAttribute(name /*, options*/) { + if (name.startsWith(this.options.attributeNamePrefix)) { + return name.substr(this.attrPrefixLen); + } else { + return false; + } +} + +module.exports = Builder; /***/ }), -/***/ 73766: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 72462: +/***/ ((module) => { -"use strict"; +const EOL = "\n"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.writeRequestBody = void 0; -const stream_1 = __nccwpck_require__(12781); -const MIN_WAIT_TIME = 1000; -async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { - var _a; - const headers = (_a = request.headers) !== null && _a !== void 0 ? _a : {}; - const expect = headers["Expect"] || headers["expect"]; - let timeoutId = -1; - let hasError = false; - if (expect === "100-continue") { - await Promise.race([ - new Promise((resolve) => { - timeoutId = Number(setTimeout(resolve, Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); - }), - new Promise((resolve) => { - httpRequest.on("continue", () => { - clearTimeout(timeoutId); - resolve(); - }); - httpRequest.on("error", () => { - hasError = true; - clearTimeout(timeoutId); - resolve(); - }); - }), - ]); - } - if (!hasError) { - writeBody(httpRequest, request.body); - } -} -exports.writeRequestBody = writeRequestBody; -function writeBody(httpRequest, body) { - if (body instanceof stream_1.Readable) { - body.pipe(httpRequest); - } - else if (body) { - httpRequest.end(Buffer.from(body)); - } - else { - httpRequest.end(); +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; } + return arrToStr(jArray, options, "", indentation); } +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; -/***/ }), - -/***/ 32768: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.buildQueryString = void 0; -const util_uri_escape_1 = __nccwpck_require__(54197); -function buildQueryString(query) { - const parts = []; - for (let key of Object.keys(query).sort()) { - const value = query[key]; - key = (0, util_uri_escape_1.escapeUri)(key); - if (Array.isArray(value)) { - for (let i = 0, iLen = value.length; i < iLen; i++) { - parts.push(`${key}=${(0, util_uri_escape_1.escapeUri)(value[i])}`); + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; } - else { - let qsEntry = key; - if (value || typeof value === "string") { - qsEntry += `=${(0, util_uri_escape_1.escapeUri)(value)}`; + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; } + isPreviousElementTag = true; } - return parts.join("&"); -} -exports.buildQueryString = buildQueryString; + return xmlStr; +} -/***/ }), +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if (key !== ":@") return key; + } +} -/***/ 63936: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} -"use strict"; +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CredentialsProviderError = void 0; -const ProviderError_1 = __nccwpck_require__(23324); -class CredentialsProviderError extends ProviderError_1.ProviderError { - constructor(message, tryNextLink = true) { - super(message, tryNextLink); - this.tryNextLink = tryNextLink; - this.name = "CredentialsProviderError"; - Object.setPrototypeOf(this, CredentialsProviderError.prototype); +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } } + return textValue; } -exports.CredentialsProviderError = CredentialsProviderError; +module.exports = toXml; /***/ }), -/***/ 23324: -/***/ ((__unused_webpack_module, exports) => { +/***/ 6072: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +const util = __nccwpck_require__(38280); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ProviderError = void 0; -class ProviderError extends Error { - constructor(message, tryNextLink = true) { - super(message); - this.tryNextLink = tryNextLink; - this.name = "ProviderError"; - Object.setPrototypeOf(this, ProviderError.prototype); - } - static from(error, tryNextLink = true) { - return Object.assign(new this(error.message, tryNextLink), error); +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); } + return {entities, i}; } -exports.ProviderError = ProviderError; +function readEntityExp(xmlData,i){ + //External entities are not supported + // -/***/ }), - -/***/ 50429: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + //Parameter entities are not supported + // -"use strict"; + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TokenProviderError = void 0; -const ProviderError_1 = __nccwpck_require__(23324); -class TokenProviderError extends ProviderError_1.ProviderError { - constructor(message, tryNextLink = true) { - super(message, tryNextLink); - this.tryNextLink = tryNextLink; - this.name = "TokenProviderError"; - Object.setPrototypeOf(this, TokenProviderError.prototype); + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false } -exports.TokenProviderError = TokenProviderError; +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} -/***/ }), - -/***/ 45079: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.chain = void 0; -const ProviderError_1 = __nccwpck_require__(23324); -const chain = (...providers) => async () => { - if (providers.length === 0) { - throw new ProviderError_1.ProviderError("No providers in chain"); - } - let lastProviderError; - for (const provider of providers) { - try { - const credentials = await provider(); - return credentials; - } - catch (err) { - lastProviderError = err; - if (err === null || err === void 0 ? void 0 : err.tryNextLink) { - continue; - } - throw err; - } - } - throw lastProviderError; -}; -exports.chain = chain; +module.exports = readDocType; /***/ }), -/***/ 51322: +/***/ 86993: /***/ ((__unused_webpack_module, exports) => { -"use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromStatic = void 0; -const fromStatic = (staticValue) => () => Promise.resolve(staticValue); -exports.fromStatic = fromStatic; +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; /***/ }), -/***/ 79721: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 25832: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(63936), exports); -tslib_1.__exportStar(__nccwpck_require__(23324), exports); -tslib_1.__exportStar(__nccwpck_require__(50429), exports); -tslib_1.__exportStar(__nccwpck_require__(45079), exports); -tslib_1.__exportStar(__nccwpck_require__(51322), exports); -tslib_1.__exportStar(__nccwpck_require__(49762), exports); - +///@ts-check -/***/ }), +const util = __nccwpck_require__(38280); +const xmlNode = __nccwpck_require__(7462); +const readDocType = __nccwpck_require__(6072); +const toNumber = __nccwpck_require__(14526); -/***/ 49762: -/***/ ((__unused_webpack_module, exports) => { +const regx = + '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' + .replace(/NAME/g, util.nameRegexp); -"use strict"; +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.memoize = void 0; -const memoize = (provider, isExpired, requiresRefresh) => { - let resolved; - let pending; - let hasResult; - let isConstant = false; - const coalesceProvider = async () => { - if (!pending) { - pending = provider(); - } - try { - resolved = await pending; - hasResult = true; - isConstant = false; - } - finally { - pending = undefined; - } - return resolved; +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, }; - if (isExpired === undefined) { - return async (options) => { - if (!hasResult || (options === null || options === void 0 ? void 0 : options.forceRefresh)) { - resolved = await coalesceProvider(); - } - return resolved; - }; - } - return async (options) => { - if (!hasResult || (options === null || options === void 0 ? void 0 : options.forceRefresh)) { - resolved = await coalesceProvider(); - } - if (isConstant) { - return resolved; - } - if (requiresRefresh && !requiresRefresh(resolved)) { - isConstant = true; - return resolved; - } - if (isExpired(resolved)) { - await coalesceProvider(); - return resolved; - } - return resolved; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, }; -}; -exports.memoize = memoize; - - -/***/ }), - -/***/ 89179: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + } -"use strict"; +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Field = void 0; -const types_1 = __nccwpck_require__(55756); -class Field { - constructor({ name, kind = types_1.FieldPosition.HEADER, values = [] }) { - this.name = name; - this.kind = kind; - this.values = values; - } - add(value) { - this.values.push(value); +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] } - set(values) { - this.values = values; + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); } - remove(value) { - this.values = this.values.filter((v) => v !== value); + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } } - toString() { - return this.values.map((v) => (v.includes(",") || v.includes(" ") ? `"${v}"` : v)).join(", "); + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; } - get() { - return this.values; + if (tags.length === 2) { + tagname = prefix + tags[1]; } + } + return tagname; } -exports.Field = Field; - -/***/ }), - -/***/ 99242: -/***/ ((__unused_webpack_module, exports) => { +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); -"use strict"; +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Fields = void 0; -class Fields { - constructor({ fields = [], encoding = "utf-8" }) { - this.entries = {}; - fields.forEach(this.setField.bind(this)); - this.encoding = encoding; - } - setField(field) { - this.entries[field.name.toLowerCase()] = field; - } - getField(name) { - return this.entries[name.toLowerCase()]; + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } } - removeField(name) { - delete this.entries[name.toLowerCase()]; + if (!Object.keys(attrs).length) { + return; } - getByType(kind) { - return Object.values(this.entries).filter((field) => field.kind === kind); + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; } + return attrs + } } -exports.Fields = Fields; +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); -/***/ }), + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } -/***/ 22474: -/***/ ((__unused_webpack_module, exports) => { + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } -"use strict"; + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveHttpHandlerRuntimeConfig = exports.getHttpHandlerExtensionConfiguration = void 0; -const getHttpHandlerExtensionConfiguration = (runtimeConfig) => { - let httpHandler = runtimeConfig.httpHandler; - return { - setHttpHandler(handler) { - httpHandler = handler; - }, - httpHandler() { - return httpHandler; - }, - updateHttpClientConfig(key, value) { - httpHandler.updateHttpClientConfig(key, value); - }, - httpHandlerConfigs() { - return httpHandler.httpHandlerConfigs(); - }, - }; -}; -exports.getHttpHandlerExtensionConfiguration = getHttpHandlerExtensionConfiguration; -const resolveHttpHandlerRuntimeConfig = (httpHandlerExtensionConfiguration) => { - return { - httpHandler: httpHandlerExtensionConfiguration.httpHandler(), - }; -}; -exports.resolveHttpHandlerRuntimeConfig = resolveHttpHandlerRuntimeConfig; + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { -/***/ }), + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); -/***/ 91654: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ -"use strict"; + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(22474), exports); + } -/***/ }), + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); -/***/ 63206: -/***/ ((__unused_webpack_module, exports) => { + textData = this.saveTextToParentTag(textData, currentNode, jPath); -"use strict"; + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); -Object.defineProperty(exports, "__esModule", ({ value: true })); + textData = this.saveTextToParentTag(textData, currentNode, jPath); + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + // let val = this.parseTextData(tagExp, this.options.cdataPropName, jPath + "." + this.options.cdataPropName, true, false, true); + // if(!val) val = ""; + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true); + if(val == undefined) val = ""; + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; -/***/ }), + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } -/***/ 38746: -/***/ ((__unused_webpack_module, exports) => { + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { //TODO: namespace + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, tagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${tagName}`); + i = result.i; + tagContent = result.tagContent; + } -"use strict"; + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpRequest = void 0; -class HttpRequest { - constructor(options) { - this.method = options.method || "GET"; - this.hostname = options.hostname || "localhost"; - this.port = options.port; - this.query = options.query || {}; - this.headers = options.headers || {}; - this.body = options.body; - this.protocol = options.protocol - ? options.protocol.slice(-1) !== ":" - ? `${options.protocol}:` - : options.protocol - : "https:"; - this.path = options.path ? (options.path.charAt(0) !== "/" ? `/${options.path}` : options.path) : "/"; - this.username = options.username; - this.password = options.password; - this.fragment = options.fragment; - } - static isInstance(request) { - if (!request) - return false; - const req = request; - return ("method" in req && - "protocol" in req && - "hostname" in req && - "path" in req && - typeof req["query"] === "object" && - typeof req["headers"] === "object"); - } - clone() { - const cloned = new HttpRequest({ - ...this, - headers: { ...this.headers }, - }); - if (cloned.query) - cloned.query = cloneQuery(cloned.query); - return cloned; + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; } + } + return xmlObj.child; } -exports.HttpRequest = HttpRequest; -function cloneQuery(query) { - return Object.keys(query).reduce((carry, paramName) => { - const param = query[paramName]; - return { - ...carry, - [paramName]: Array.isArray(param) ? [...param] : param, - }; - }, {}); -} - - -/***/ }), -/***/ 26322: -/***/ ((__unused_webpack_module, exports) => { +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} -"use strict"; +const replaceEntitiesValue = function(val){ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpResponse = void 0; -class HttpResponse { - constructor(options) { - this.statusCode = options.statusCode; - this.reason = options.reason; - this.headers = options.headers || {}; - this.body = options.body; + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); } - static isInstance(response) { - if (!response) - return false; - const resp = response; - return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; } -exports.HttpResponse = HttpResponse; - - -/***/ }), - -/***/ 64418: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); -"use strict"; + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(91654), exports); -tslib_1.__exportStar(__nccwpck_require__(89179), exports); -tslib_1.__exportStar(__nccwpck_require__(99242), exports); -tslib_1.__exportStar(__nccwpck_require__(63206), exports); -tslib_1.__exportStar(__nccwpck_require__(38746), exports); -tslib_1.__exportStar(__nccwpck_require__(26322), exports); -tslib_1.__exportStar(__nccwpck_require__(61466), exports); -tslib_1.__exportStar(__nccwpck_require__(19135), exports); +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} -/***/ }), +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} -/***/ 61466: -/***/ ((__unused_webpack_module, exports) => { +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substr(0, separatorIndex).replace(/\s\s*$/, ''); + tagExp = tagExp.substr(separatorIndex + 1); + } -"use strict"; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isValidHostname = void 0; -function isValidHostname(hostname) { - const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; - return hostPattern.test(hostname); + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + } } -exports.isValidHostname = isValidHostname; +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') -/***/ }), + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} -/***/ 19135: -/***/ ((__unused_webpack_module, exports) => { +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} -"use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); +module.exports = OrderedObjParser; /***/ }), -/***/ 4769: -/***/ ((__unused_webpack_module, exports) => { +/***/ 42380: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +const { buildOptions} = __nccwpck_require__(86993); +const OrderedObjParser = __nccwpck_require__(25832); +const { prettify} = __nccwpck_require__(42882); +const validator = __nccwpck_require__(61739); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseQueryString = void 0; -function parseQueryString(querystring) { - const query = {}; - querystring = querystring.replace(/^\?/, ""); - if (querystring) { - for (const pair of querystring.split("&")) { - let [key, value = null] = pair.split("="); - key = decodeURIComponent(key); - if (value) { - value = decodeURIComponent(value); - } - if (!(key in query)) { - query[key] = value; - } - else if (Array.isArray(query[key])) { - query[key].push(value); - } - else { - query[key] = [query[key], value]; +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; } } - return query; } -exports.parseQueryString = parseQueryString; +module.exports = XMLParser; /***/ }), -/***/ 68415: +/***/ 42882: /***/ ((__unused_webpack_module, exports) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NODEJS_TIMEOUT_ERROR_CODES = exports.TRANSIENT_ERROR_STATUS_CODES = exports.TRANSIENT_ERROR_CODES = exports.THROTTLING_ERROR_CODES = exports.CLOCK_SKEW_ERROR_CODES = void 0; -exports.CLOCK_SKEW_ERROR_CODES = [ - "AuthFailure", - "InvalidSignatureException", - "RequestExpired", - "RequestInTheFuture", - "RequestTimeTooSkewed", - "SignatureDoesNotMatch", -]; -exports.THROTTLING_ERROR_CODES = [ - "BandwidthLimitExceeded", - "EC2ThrottledException", - "LimitExceededException", - "PriorRequestNotComplete", - "ProvisionedThroughputExceededException", - "RequestLimitExceeded", - "RequestThrottled", - "RequestThrottledException", - "SlowDown", - "ThrottledException", - "Throttling", - "ThrottlingException", - "TooManyRequestsException", - "TransactionInProgressException", -]; -exports.TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; -exports.TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; -exports.NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} -/***/ }), +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; -/***/ 6375: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); -"use strict"; + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isServerError = exports.isTransientError = exports.isThrottlingError = exports.isClockSkewError = exports.isRetryableByTrait = void 0; -const constants_1 = __nccwpck_require__(68415); -const isRetryableByTrait = (error) => error.$retryable !== undefined; -exports.isRetryableByTrait = isRetryableByTrait; -const isClockSkewError = (error) => constants_1.CLOCK_SKEW_ERROR_CODES.includes(error.name); -exports.isClockSkewError = isClockSkewError; -const isThrottlingError = (error) => { - var _a, _b; - return ((_a = error.$metadata) === null || _a === void 0 ? void 0 : _a.httpStatusCode) === 429 || - constants_1.THROTTLING_ERROR_CODES.includes(error.name) || - ((_b = error.$retryable) === null || _b === void 0 ? void 0 : _b.throttling) == true; -}; -exports.isThrottlingError = isThrottlingError; -const isTransientError = (error) => { - var _a; - return constants_1.TRANSIENT_ERROR_CODES.includes(error.name) || - constants_1.NODEJS_TIMEOUT_ERROR_CODES.includes((error === null || error === void 0 ? void 0 : error.code) || "") || - constants_1.TRANSIENT_ERROR_STATUS_CODES.includes(((_a = error.$metadata) === null || _a === void 0 ? void 0 : _a.httpStatusCode) || 0); -}; -exports.isTransientError = isTransientError; -const isServerError = (error) => { - var _a; - if (((_a = error.$metadata) === null || _a === void 0 ? void 0 : _a.httpStatusCode) !== undefined) { - const statusCode = error.$metadata.httpStatusCode; - if (500 <= statusCode && statusCode <= 599 && !(0, exports.isTransientError)(error)) { - return true; + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; } - return false; + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } } - return false; -}; -exports.isServerError = isServerError; - - -/***/ }), - -/***/ 46062: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} -"use strict"; +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getConfigData = void 0; -const types_1 = __nccwpck_require__(55756); -const loadSharedConfigFiles_1 = __nccwpck_require__(41879); -const getConfigData = (data) => Object.entries(data) - .filter(([key]) => { - const indexOfSeparator = key.indexOf(loadSharedConfigFiles_1.CONFIG_PREFIX_SEPARATOR); - if (indexOfSeparator === -1) { - return false; +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } } - return Object.values(types_1.IniSectionType).includes(key.substring(0, indexOfSeparator)); -}) - .reduce((acc, [key, value]) => { - const indexOfSeparator = key.indexOf(loadSharedConfigFiles_1.CONFIG_PREFIX_SEPARATOR); - const updatedKey = key.substring(0, indexOfSeparator) === types_1.IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; - acc[updatedKey] = value; - return acc; -}, { - ...(data.default && { default: data.default }), -}); -exports.getConfigData = getConfigData; - - -/***/ }), - -/***/ 47237: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getConfigFilepath = exports.ENV_CONFIG_PATH = void 0; -const path_1 = __nccwpck_require__(71017); -const getHomeDir_1 = __nccwpck_require__(68340); -exports.ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; -const getConfigFilepath = () => process.env[exports.ENV_CONFIG_PATH] || (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "config"); -exports.getConfigFilepath = getConfigFilepath; - - -/***/ }), + } +} -/***/ 99036: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } -"use strict"; + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getCredentialsFilepath = exports.ENV_CREDENTIALS_PATH = void 0; -const path_1 = __nccwpck_require__(71017); -const getHomeDir_1 = __nccwpck_require__(68340); -exports.ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; -const getCredentialsFilepath = () => process.env[exports.ENV_CREDENTIALS_PATH] || (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "credentials"); -exports.getCredentialsFilepath = getCredentialsFilepath; + return false; +} +exports.prettify = prettify; /***/ }), -/***/ 68340: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7462: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getHomeDir = void 0; -const os_1 = __nccwpck_require__(22037); -const path_1 = __nccwpck_require__(71017); -const homeDirCache = {}; -const getHomeDirCacheKey = () => { - if (process && process.geteuid) { - return `${process.geteuid()}`; + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); } - return "DEFAULT"; -}; -const getHomeDir = () => { - const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${path_1.sep}` } = process.env; - if (HOME) - return HOME; - if (USERPROFILE) - return USERPROFILE; - if (HOMEPATH) - return `${HOMEDRIVE}${HOMEPATH}`; - const homeDirCacheKey = getHomeDirCacheKey(); - if (!homeDirCache[homeDirCacheKey]) - homeDirCache[homeDirCacheKey] = (0, os_1.homedir)(); - return homeDirCache[homeDirCacheKey]; + }; }; -exports.getHomeDir = getHomeDir; +module.exports = XmlNode; + /***/ }), -/***/ 52802: -/***/ ((__unused_webpack_module, exports) => { +/***/ 38699: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getProfileName = exports.DEFAULT_PROFILE = exports.ENV_PROFILE = void 0; -exports.ENV_PROFILE = "AWS_PROFILE"; -exports.DEFAULT_PROFILE = "default"; -const getProfileName = (init) => init.profile || process.env[exports.ENV_PROFILE] || exports.DEFAULT_PROFILE; -exports.getProfileName = getProfileName; - -/***/ }), +module.exports = createRBTree -/***/ 24740: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +var RED = 0 +var BLACK = 1 -"use strict"; +function RBNode(color, key, value, left, right, count) { + this._color = color + this.key = key + this.value = value + this.left = left + this.right = right + this._count = count +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSSOTokenFilepath = void 0; -const crypto_1 = __nccwpck_require__(6113); -const path_1 = __nccwpck_require__(71017); -const getHomeDir_1 = __nccwpck_require__(68340); -const getSSOTokenFilepath = (id) => { - const hasher = (0, crypto_1.createHash)("sha1"); - const cacheName = hasher.update(id).digest("hex"); - return (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "sso", "cache", `${cacheName}.json`); -}; -exports.getSSOTokenFilepath = getSSOTokenFilepath; +function cloneNode(node) { + return new RBNode(node._color, node.key, node.value, node.left, node.right, node._count) +} +function repaint(color, node) { + return new RBNode(color, node.key, node.value, node.left, node.right, node._count) +} -/***/ }), +function recount(node) { + node._count = 1 + (node.left ? node.left._count : 0) + (node.right ? node.right._count : 0) +} -/***/ 69678: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function RedBlackTree(compare, root) { + this._compare = compare + this.root = root +} -"use strict"; +var proto = RedBlackTree.prototype -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSSOTokenFromFile = void 0; -const fs_1 = __nccwpck_require__(57147); -const getSSOTokenFilepath_1 = __nccwpck_require__(24740); -const { readFile } = fs_1.promises; -const getSSOTokenFromFile = async (id) => { - const ssoTokenFilepath = (0, getSSOTokenFilepath_1.getSSOTokenFilepath)(id); - const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); - return JSON.parse(ssoTokenText); -}; -exports.getSSOTokenFromFile = getSSOTokenFromFile; +Object.defineProperty(proto, "keys", { + get: function() { + var result = [] + this.forEach(function(k,v) { + result.push(k) + }) + return result + } +}) +Object.defineProperty(proto, "values", { + get: function() { + var result = [] + this.forEach(function(k,v) { + result.push(v) + }) + return result + } +}) -/***/ }), +//Returns the number of nodes in the tree +Object.defineProperty(proto, "length", { + get: function() { + if(this.root) { + return this.root._count + } + return 0 + } +}) -/***/ 82820: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +//Insert a new item into the tree +proto.insert = function(key, value) { + var cmp = this._compare + //Find point to insert new node at + var n = this.root + var n_stack = [] + var d_stack = [] + while(n) { + var d = cmp(key, n.key) + n_stack.push(n) + d_stack.push(d) + if(d <= 0) { + n = n.left + } else { + n = n.right + } + } + //Rebuild path to leaf node + n_stack.push(new RBNode(RED, key, value, null, null, 1)) + for(var s=n_stack.length-2; s>=0; --s) { + var n = n_stack[s] + if(d_stack[s] <= 0) { + n_stack[s] = new RBNode(n._color, n.key, n.value, n_stack[s+1], n.right, n._count+1) + } else { + n_stack[s] = new RBNode(n._color, n.key, n.value, n.left, n_stack[s+1], n._count+1) + } + } + //Rebalance tree using rotations + //console.log("start insert", key, d_stack) + for(var s=n_stack.length-1; s>1; --s) { + var p = n_stack[s-1] + var n = n_stack[s] + if(p._color === BLACK || n._color === BLACK) { + break + } + var pp = n_stack[s-2] + if(pp.left === p) { + if(p.left === n) { + var y = pp.right + if(y && y._color === RED) { + //console.log("LLr") + p._color = BLACK + pp.right = repaint(BLACK, y) + pp._color = RED + s -= 1 + } else { + //console.log("LLb") + pp._color = RED + pp.left = p.right + p._color = BLACK + p.right = pp + n_stack[s-2] = p + n_stack[s-1] = n + recount(pp) + recount(p) + if(s >= 3) { + var ppp = n_stack[s-3] + if(ppp.left === pp) { + ppp.left = p + } else { + ppp.right = p + } + } + break + } + } else { + var y = pp.right + if(y && y._color === RED) { + //console.log("LRr") + p._color = BLACK + pp.right = repaint(BLACK, y) + pp._color = RED + s -= 1 + } else { + //console.log("LRb") + p.right = n.left + pp._color = RED + pp.left = n.right + n._color = BLACK + n.left = p + n.right = pp + n_stack[s-2] = n + n_stack[s-1] = p + recount(pp) + recount(p) + recount(n) + if(s >= 3) { + var ppp = n_stack[s-3] + if(ppp.left === pp) { + ppp.left = n + } else { + ppp.right = n + } + } + break + } + } + } else { + if(p.right === n) { + var y = pp.left + if(y && y._color === RED) { + //console.log("RRr", y.key) + p._color = BLACK + pp.left = repaint(BLACK, y) + pp._color = RED + s -= 1 + } else { + //console.log("RRb") + pp._color = RED + pp.right = p.left + p._color = BLACK + p.left = pp + n_stack[s-2] = p + n_stack[s-1] = n + recount(pp) + recount(p) + if(s >= 3) { + var ppp = n_stack[s-3] + if(ppp.right === pp) { + ppp.right = p + } else { + ppp.left = p + } + } + break + } + } else { + var y = pp.left + if(y && y._color === RED) { + //console.log("RLr") + p._color = BLACK + pp.left = repaint(BLACK, y) + pp._color = RED + s -= 1 + } else { + //console.log("RLb") + p.left = n.right + pp._color = RED + pp.right = n.left + n._color = BLACK + n.right = p + n.left = pp + n_stack[s-2] = n + n_stack[s-1] = p + recount(pp) + recount(p) + recount(n) + if(s >= 3) { + var ppp = n_stack[s-3] + if(ppp.right === pp) { + ppp.right = n + } else { + ppp.left = n + } + } + break + } + } + } + } + //Return new tree + n_stack[0]._color = BLACK + return new RedBlackTree(cmp, n_stack[0]) +} -"use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSsoSessionData = void 0; -const types_1 = __nccwpck_require__(55756); -const loadSharedConfigFiles_1 = __nccwpck_require__(41879); -const getSsoSessionData = (data) => Object.entries(data) - .filter(([key]) => key.startsWith(types_1.IniSectionType.SSO_SESSION + loadSharedConfigFiles_1.CONFIG_PREFIX_SEPARATOR)) - .reduce((acc, [key, value]) => ({ ...acc, [key.split(loadSharedConfigFiles_1.CONFIG_PREFIX_SEPARATOR)[1]]: value }), {}); -exports.getSsoSessionData = getSsoSessionData; +//Visit all nodes inorder +function doVisitFull(visit, node) { + if(node.left) { + var v = doVisitFull(visit, node.left) + if(v) { return v } + } + var v = visit(node.key, node.value) + if(v) { return v } + if(node.right) { + return doVisitFull(visit, node.right) + } +} +//Visit half nodes in order +function doVisitHalf(lo, compare, visit, node) { + var l = compare(lo, node.key) + if(l <= 0) { + if(node.left) { + var v = doVisitHalf(lo, compare, visit, node.left) + if(v) { return v } + } + var v = visit(node.key, node.value) + if(v) { return v } + } + if(node.right) { + return doVisitHalf(lo, compare, visit, node.right) + } +} -/***/ }), +//Visit all nodes within a range +function doVisit(lo, hi, compare, visit, node) { + var l = compare(lo, node.key) + var h = compare(hi, node.key) + var v + if(l <= 0) { + if(node.left) { + v = doVisit(lo, hi, compare, visit, node.left) + if(v) { return v } + } + if(h > 0) { + v = visit(node.key, node.value) + if(v) { return v } + } + } + if(h > 0 && node.right) { + return doVisit(lo, hi, compare, visit, node.right) + } +} -/***/ 43507: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; +proto.forEach = function rbTreeForEach(visit, lo, hi) { + if(!this.root) { + return + } + switch(arguments.length) { + case 1: + return doVisitFull(visit, this.root) + break -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(68340), exports); -tslib_1.__exportStar(__nccwpck_require__(52802), exports); -tslib_1.__exportStar(__nccwpck_require__(24740), exports); -tslib_1.__exportStar(__nccwpck_require__(69678), exports); -tslib_1.__exportStar(__nccwpck_require__(41879), exports); -tslib_1.__exportStar(__nccwpck_require__(34649), exports); -tslib_1.__exportStar(__nccwpck_require__(2546), exports); -tslib_1.__exportStar(__nccwpck_require__(63191), exports); + case 2: + return doVisitHalf(lo, this._compare, visit, this.root) + break + case 3: + if(this._compare(lo, hi) >= 0) { + return + } + return doVisit(lo, hi, this._compare, visit, this.root) + break + } +} -/***/ }), +//First item in list +Object.defineProperty(proto, "begin", { + get: function() { + var stack = [] + var n = this.root + while(n) { + stack.push(n) + n = n.left + } + return new RedBlackTreeIterator(this, stack) + } +}) -/***/ 41879: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +//Last item in list +Object.defineProperty(proto, "end", { + get: function() { + var stack = [] + var n = this.root + while(n) { + stack.push(n) + n = n.right + } + return new RedBlackTreeIterator(this, stack) + } +}) -"use strict"; +//Find the ith item in the tree +proto.at = function(idx) { + if(idx < 0) { + return new RedBlackTreeIterator(this, []) + } + var n = this.root + var stack = [] + while(true) { + stack.push(n) + if(n.left) { + if(idx < n.left._count) { + n = n.left + continue + } + idx -= n.left._count + } + if(!idx) { + return new RedBlackTreeIterator(this, stack) + } + idx -= 1 + if(n.right) { + if(idx >= n.right._count) { + break + } + n = n.right + } else { + break + } + } + return new RedBlackTreeIterator(this, []) +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.loadSharedConfigFiles = exports.CONFIG_PREFIX_SEPARATOR = void 0; -const getConfigData_1 = __nccwpck_require__(46062); -const getConfigFilepath_1 = __nccwpck_require__(47237); -const getCredentialsFilepath_1 = __nccwpck_require__(99036); -const parseIni_1 = __nccwpck_require__(54262); -const slurpFile_1 = __nccwpck_require__(19155); -const swallowError = () => ({}); -exports.CONFIG_PREFIX_SEPARATOR = "."; -const loadSharedConfigFiles = async (init = {}) => { - const { filepath = (0, getCredentialsFilepath_1.getCredentialsFilepath)(), configFilepath = (0, getConfigFilepath_1.getConfigFilepath)() } = init; - const parsedFiles = await Promise.all([ - (0, slurpFile_1.slurpFile)(configFilepath, { - ignoreCache: init.ignoreCache, - }) - .then(parseIni_1.parseIni) - .then(getConfigData_1.getConfigData) - .catch(swallowError), - (0, slurpFile_1.slurpFile)(filepath, { - ignoreCache: init.ignoreCache, - }) - .then(parseIni_1.parseIni) - .catch(swallowError), - ]); - return { - configFile: parsedFiles[0], - credentialsFile: parsedFiles[1], - }; -}; -exports.loadSharedConfigFiles = loadSharedConfigFiles; +proto.ge = function(key) { + var cmp = this._compare + var n = this.root + var stack = [] + var last_ptr = 0 + while(n) { + var d = cmp(key, n.key) + stack.push(n) + if(d <= 0) { + last_ptr = stack.length + } + if(d <= 0) { + n = n.left + } else { + n = n.right + } + } + stack.length = last_ptr + return new RedBlackTreeIterator(this, stack) +} +proto.gt = function(key) { + var cmp = this._compare + var n = this.root + var stack = [] + var last_ptr = 0 + while(n) { + var d = cmp(key, n.key) + stack.push(n) + if(d < 0) { + last_ptr = stack.length + } + if(d < 0) { + n = n.left + } else { + n = n.right + } + } + stack.length = last_ptr + return new RedBlackTreeIterator(this, stack) +} -/***/ }), +proto.lt = function(key) { + var cmp = this._compare + var n = this.root + var stack = [] + var last_ptr = 0 + while(n) { + var d = cmp(key, n.key) + stack.push(n) + if(d > 0) { + last_ptr = stack.length + } + if(d <= 0) { + n = n.left + } else { + n = n.right + } + } + stack.length = last_ptr + return new RedBlackTreeIterator(this, stack) +} -/***/ 34649: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +proto.le = function(key) { + var cmp = this._compare + var n = this.root + var stack = [] + var last_ptr = 0 + while(n) { + var d = cmp(key, n.key) + stack.push(n) + if(d >= 0) { + last_ptr = stack.length + } + if(d < 0) { + n = n.left + } else { + n = n.right + } + } + stack.length = last_ptr + return new RedBlackTreeIterator(this, stack) +} -"use strict"; +//Finds the item with key if it exists +proto.find = function(key) { + var cmp = this._compare + var n = this.root + var stack = [] + while(n) { + var d = cmp(key, n.key) + stack.push(n) + if(d === 0) { + return new RedBlackTreeIterator(this, stack) + } + if(d <= 0) { + n = n.left + } else { + n = n.right + } + } + return new RedBlackTreeIterator(this, []) +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.loadSsoSessionData = void 0; -const getConfigFilepath_1 = __nccwpck_require__(47237); -const getSsoSessionData_1 = __nccwpck_require__(82820); -const parseIni_1 = __nccwpck_require__(54262); -const slurpFile_1 = __nccwpck_require__(19155); -const swallowError = () => ({}); -const loadSsoSessionData = async (init = {}) => { - var _a; - return (0, slurpFile_1.slurpFile)((_a = init.configFilepath) !== null && _a !== void 0 ? _a : (0, getConfigFilepath_1.getConfigFilepath)()) - .then(parseIni_1.parseIni) - .then(getSsoSessionData_1.getSsoSessionData) - .catch(swallowError); -}; -exports.loadSsoSessionData = loadSsoSessionData; +//Removes item with key from tree +proto.remove = function(key) { + var iter = this.find(key) + if(iter) { + return iter.remove() + } + return this +} +//Returns the item at `key` +proto.get = function(key) { + var cmp = this._compare + var n = this.root + while(n) { + var d = cmp(key, n.key) + if(d === 0) { + return n.value + } + if(d <= 0) { + n = n.left + } else { + n = n.right + } + } + return +} -/***/ }), +//Iterator for red black tree +function RedBlackTreeIterator(tree, stack) { + this.tree = tree + this._stack = stack +} -/***/ 19447: -/***/ ((__unused_webpack_module, exports) => { +var iproto = RedBlackTreeIterator.prototype -"use strict"; +//Test if iterator is valid +Object.defineProperty(iproto, "valid", { + get: function() { + return this._stack.length > 0 + } +}) -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.mergeConfigFiles = void 0; -const mergeConfigFiles = (...files) => { - const merged = {}; - for (const file of files) { - for (const [key, values] of Object.entries(file)) { - if (merged[key] !== undefined) { - Object.assign(merged[key], values); - } - else { - merged[key] = values; - } - } +//Node of the iterator +Object.defineProperty(iproto, "node", { + get: function() { + if(this._stack.length > 0) { + return this._stack[this._stack.length-1] } - return merged; -}; -exports.mergeConfigFiles = mergeConfigFiles; - - -/***/ }), + return null + }, + enumerable: true +}) -/***/ 54262: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +//Makes a copy of an iterator +iproto.clone = function() { + return new RedBlackTreeIterator(this.tree, this._stack.slice()) +} -"use strict"; +//Swaps two nodes +function swapNode(n, v) { + n.key = v.key + n.value = v.value + n.left = v.left + n.right = v.right + n._color = v._color + n._count = v._count +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseIni = void 0; -const types_1 = __nccwpck_require__(55756); -const loadSharedConfigFiles_1 = __nccwpck_require__(41879); -const prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; -const profileNameBlockList = ["__proto__", "profile __proto__"]; -const parseIni = (iniData) => { - const map = {}; - let currentSection; - let currentSubSection; - for (const iniLine of iniData.split(/\r?\n/)) { - const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); - const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; - if (isSection) { - currentSection = undefined; - currentSubSection = undefined; - const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); - const matches = prefixKeyRegex.exec(sectionName); - if (matches) { - const [, prefix, , name] = matches; - if (Object.values(types_1.IniSectionType).includes(prefix)) { - currentSection = [prefix, name].join(loadSharedConfigFiles_1.CONFIG_PREFIX_SEPARATOR); - } - } - else { - currentSection = sectionName; - } - if (profileNameBlockList.includes(sectionName)) { - throw new Error(`Found invalid profile name "${sectionName}"`); - } +//Fix up a double black node in a tree +function fixDoubleBlack(stack) { + var n, p, s, z + for(var i=stack.length-1; i>=0; --i) { + n = stack[i] + if(i === 0) { + n._color = BLACK + return + } + //console.log("visit node:", n.key, i, stack[i].key, stack[i-1].key) + p = stack[i-1] + if(p.left === n) { + //console.log("left child") + s = p.right + if(s.right && s.right._color === RED) { + //console.log("case 1: right sibling child red") + s = p.right = cloneNode(s) + z = s.right = cloneNode(s.right) + p.right = s.left + s.left = p + s.right = z + s._color = p._color + n._color = BLACK + p._color = BLACK + z._color = BLACK + recount(p) + recount(s) + if(i > 1) { + var pp = stack[i-2] + if(pp.left === p) { + pp.left = s + } else { + pp.right = s + } } - else if (currentSection) { - const indexOfEqualsSign = trimmedLine.indexOf("="); - if (![0, -1].includes(indexOfEqualsSign)) { - const [name, value] = [ - trimmedLine.substring(0, indexOfEqualsSign).trim(), - trimmedLine.substring(indexOfEqualsSign + 1).trim(), - ]; - if (value === "") { - currentSubSection = name; - } - else { - if (currentSubSection && iniLine.trimStart() === iniLine) { - currentSubSection = undefined; - } - map[currentSection] = map[currentSection] || {}; - const key = currentSubSection ? [currentSubSection, name].join(loadSharedConfigFiles_1.CONFIG_PREFIX_SEPARATOR) : name; - map[currentSection][key] = value; - } - } + stack[i-1] = s + return + } else if(s.left && s.left._color === RED) { + //console.log("case 1: left sibling child red") + s = p.right = cloneNode(s) + z = s.left = cloneNode(s.left) + p.right = z.left + s.left = z.right + z.left = p + z.right = s + z._color = p._color + p._color = BLACK + s._color = BLACK + n._color = BLACK + recount(p) + recount(s) + recount(z) + if(i > 1) { + var pp = stack[i-2] + if(pp.left === p) { + pp.left = z + } else { + pp.right = z + } + } + stack[i-1] = z + return + } + if(s._color === BLACK) { + if(p._color === RED) { + //console.log("case 2: black sibling, red parent", p.right.value) + p._color = BLACK + p.right = repaint(RED, s) + return + } else { + //console.log("case 2: black sibling, black parent", p.right.value) + p.right = repaint(RED, s) + continue + } + } else { + //console.log("case 3: red sibling") + s = cloneNode(s) + p.right = s.left + s.left = p + s._color = p._color + p._color = RED + recount(p) + recount(s) + if(i > 1) { + var pp = stack[i-2] + if(pp.left === p) { + pp.left = s + } else { + pp.right = s + } + } + stack[i-1] = s + stack[i] = p + if(i+1 < stack.length) { + stack[i+1] = n + } else { + stack.push(n) + } + i = i+2 + } + } else { + //console.log("right child") + s = p.left + if(s.left && s.left._color === RED) { + //console.log("case 1: left sibling child red", p.value, p._color) + s = p.left = cloneNode(s) + z = s.left = cloneNode(s.left) + p.left = s.right + s.right = p + s.left = z + s._color = p._color + n._color = BLACK + p._color = BLACK + z._color = BLACK + recount(p) + recount(s) + if(i > 1) { + var pp = stack[i-2] + if(pp.right === p) { + pp.right = s + } else { + pp.left = s + } + } + stack[i-1] = s + return + } else if(s.right && s.right._color === RED) { + //console.log("case 1: right sibling child red") + s = p.left = cloneNode(s) + z = s.right = cloneNode(s.right) + p.left = z.right + s.right = z.left + z.right = p + z.left = s + z._color = p._color + p._color = BLACK + s._color = BLACK + n._color = BLACK + recount(p) + recount(s) + recount(z) + if(i > 1) { + var pp = stack[i-2] + if(pp.right === p) { + pp.right = z + } else { + pp.left = z + } + } + stack[i-1] = z + return + } + if(s._color === BLACK) { + if(p._color === RED) { + //console.log("case 2: black sibling, red parent") + p._color = BLACK + p.left = repaint(RED, s) + return + } else { + //console.log("case 2: black sibling, black parent") + p.left = repaint(RED, s) + continue + } + } else { + //console.log("case 3: red sibling") + s = cloneNode(s) + p.left = s.right + s.right = p + s._color = p._color + p._color = RED + recount(p) + recount(s) + if(i > 1) { + var pp = stack[i-2] + if(pp.right === p) { + pp.right = s + } else { + pp.left = s + } + } + stack[i-1] = s + stack[i] = p + if(i+1 < stack.length) { + stack[i+1] = n + } else { + stack.push(n) } + i = i+2 + } } - return map; -}; -exports.parseIni = parseIni; + } +} +//Removes item at iterator from tree +iproto.remove = function() { + var stack = this._stack + if(stack.length === 0) { + return this.tree + } + //First copy path to node + var cstack = new Array(stack.length) + var n = stack[stack.length-1] + cstack[cstack.length-1] = new RBNode(n._color, n.key, n.value, n.left, n.right, n._count) + for(var i=stack.length-2; i>=0; --i) { + var n = stack[i] + if(n.left === stack[i+1]) { + cstack[i] = new RBNode(n._color, n.key, n.value, cstack[i+1], n.right, n._count) + } else { + cstack[i] = new RBNode(n._color, n.key, n.value, n.left, cstack[i+1], n._count) + } + } -/***/ }), + //Get node + n = cstack[cstack.length-1] + //console.log("start remove: ", n.value) -/***/ 2546: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + //If not leaf, then swap with previous node + if(n.left && n.right) { + //console.log("moving to leaf") -"use strict"; + //First walk to previous leaf + var split = cstack.length + n = n.left + while(n.right) { + cstack.push(n) + n = n.right + } + //Copy path to leaf + var v = cstack[split-1] + cstack.push(new RBNode(n._color, v.key, v.value, n.left, n.right, n._count)) + cstack[split-1].key = n.key + cstack[split-1].value = n.value -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseKnownFiles = void 0; -const loadSharedConfigFiles_1 = __nccwpck_require__(41879); -const mergeConfigFiles_1 = __nccwpck_require__(19447); -const parseKnownFiles = async (init) => { - const parsedFiles = await (0, loadSharedConfigFiles_1.loadSharedConfigFiles)(init); - return (0, mergeConfigFiles_1.mergeConfigFiles)(parsedFiles.configFile, parsedFiles.credentialsFile); -}; -exports.parseKnownFiles = parseKnownFiles; + //Fix up stack + for(var i=cstack.length-2; i>=split; --i) { + n = cstack[i] + cstack[i] = new RBNode(n._color, n.key, n.value, n.left, cstack[i+1], n._count) + } + cstack[split-1].left = cstack[split] + } + //console.log("stack=", cstack.map(function(v) { return v.value })) + //Remove leaf node + n = cstack[cstack.length-1] + if(n._color === RED) { + //Easy case: removing red leaf + //console.log("RED leaf") + var p = cstack[cstack.length-2] + if(p.left === n) { + p.left = null + } else if(p.right === n) { + p.right = null + } + cstack.pop() + for(var i=0; i 0) { + return this._stack[this._stack.length-1].key + } + return + }, + enumerable: true +}) -/***/ 19155: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +//Returns value +Object.defineProperty(iproto, "value", { + get: function() { + if(this._stack.length > 0) { + return this._stack[this._stack.length-1].value + } + return + }, + enumerable: true +}) -"use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.slurpFile = void 0; -const fs_1 = __nccwpck_require__(57147); -const { readFile } = fs_1.promises; -const filePromisesHash = {}; -const slurpFile = (path, options) => { - if (!filePromisesHash[path] || (options === null || options === void 0 ? void 0 : options.ignoreCache)) { - filePromisesHash[path] = readFile(path, "utf8"); +//Returns the position of this iterator in the sorted list +Object.defineProperty(iproto, "index", { + get: function() { + var idx = 0 + var stack = this._stack + if(stack.length === 0) { + var r = this.tree.root + if(r) { + return r._count + } + return 0 + } else if(stack[stack.length-1].left) { + idx = stack[stack.length-1].left._count } - return filePromisesHash[path]; -}; -exports.slurpFile = slurpFile; + for(var s=stack.length-2; s>=0; --s) { + if(stack[s+1] === stack[s].right) { + ++idx + if(stack[s].left) { + idx += stack[s].left._count + } + } + } + return idx + }, + enumerable: true +}) +//Advances iterator to next element in list +iproto.next = function() { + var stack = this._stack + if(stack.length === 0) { + return + } + var n = stack[stack.length-1] + if(n.right) { + n = n.right + while(n) { + stack.push(n) + n = n.left + } + } else { + stack.pop() + while(stack.length > 0 && stack[stack.length-1].right === n) { + n = stack[stack.length-1] + stack.pop() + } + } +} -/***/ }), +//Checks if iterator is at end of tree +Object.defineProperty(iproto, "hasNext", { + get: function() { + var stack = this._stack + if(stack.length === 0) { + return false + } + if(stack[stack.length-1].right) { + return true + } + for(var s=stack.length-1; s>0; --s) { + if(stack[s-1].left === stack[s]) { + return true + } + } + return false + } +}) -/***/ 63191: -/***/ ((__unused_webpack_module, exports) => { +//Update value +iproto.update = function(value) { + var stack = this._stack + if(stack.length === 0) { + throw new Error("Can't update empty node!") + } + var cstack = new Array(stack.length) + var n = stack[stack.length-1] + cstack[cstack.length-1] = new RBNode(n._color, n.key, value, n.left, n.right, n._count) + for(var i=stack.length-2; i>=0; --i) { + n = stack[i] + if(n.left === stack[i+1]) { + cstack[i] = new RBNode(n._color, n.key, n.value, cstack[i+1], n.right, n._count) + } else { + cstack[i] = new RBNode(n._color, n.key, n.value, n.left, cstack[i+1], n._count) + } + } + return new RedBlackTree(this.tree._compare, cstack[0]) +} -"use strict"; +//Moves iterator backward one element +iproto.prev = function() { + var stack = this._stack + if(stack.length === 0) { + return + } + var n = stack[stack.length-1] + if(n.left) { + n = n.left + while(n) { + stack.push(n) + n = n.right + } + } else { + stack.pop() + while(stack.length > 0 && stack[stack.length-1].left === n) { + n = stack[stack.length-1] + stack.pop() + } + } +} -Object.defineProperty(exports, "__esModule", ({ value: true })); +//Checks if iterator is at start of tree +Object.defineProperty(iproto, "hasPrev", { + get: function() { + var stack = this._stack + if(stack.length === 0) { + return false + } + if(stack[stack.length-1].left) { + return true + } + for(var s=stack.length-1; s>0; --s) { + if(stack[s-1].right === stack[s]) { + return true + } + } + return false + } +}) + +//Default comparison function +function defaultCompare(a, b) { + if(a < b) { + return -1 + } + if(a > b) { + return 1 + } + return 0 +} +//Build a tree +function createRBTree(compare) { + return new RedBlackTree(compare || defaultCompare, null) +} /***/ }), -/***/ 39733: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 66129: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SignatureV4 = void 0; -const eventstream_codec_1 = __nccwpck_require__(56459); -const util_hex_encoding_1 = __nccwpck_require__(45364); -const util_middleware_1 = __nccwpck_require__(2390); -const util_utf8_1 = __nccwpck_require__(41895); -const constants_1 = __nccwpck_require__(48644); -const credentialDerivation_1 = __nccwpck_require__(19623); -const getCanonicalHeaders_1 = __nccwpck_require__(51393); -const getCanonicalQuery_1 = __nccwpck_require__(33243); -const getPayloadHash_1 = __nccwpck_require__(48545); -const headerUtil_1 = __nccwpck_require__(62179); -const moveHeadersToQuery_1 = __nccwpck_require__(49828); -const prepareRequest_1 = __nccwpck_require__(60075); -const utilDate_1 = __nccwpck_require__(39299); -class SignatureV4 { - constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { - this.headerMarshaller = new eventstream_codec_1.HeaderMarshaller(util_utf8_1.toUtf8, util_utf8_1.fromUtf8); - this.service = service; - this.sha256 = sha256; - this.uriEscapePath = uriEscapePath; - this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; - this.regionProvider = (0, util_middleware_1.normalizeProvider)(region); - this.credentialProvider = (0, util_middleware_1.normalizeProvider)(credentials); - } - async presign(originalRequest, options = {}) { - const { signingDate = new Date(), expiresIn = 3600, unsignableHeaders, unhoistableHeaders, signableHeaders, signingRegion, signingService, } = options; - const credentials = await this.credentialProvider(); - this.validateResolvedCredentials(credentials); - const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); - const { longDate, shortDate } = formatDate(signingDate); - if (expiresIn > constants_1.MAX_PRESIGNED_TTL) { - return Promise.reject("Signature version 4 presigned URLs" + " must have an expiration date less than one week in" + " the future"); - } - const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); - const request = (0, moveHeadersToQuery_1.moveHeadersToQuery)((0, prepareRequest_1.prepareRequest)(originalRequest), { unhoistableHeaders }); - if (credentials.sessionToken) { - request.query[constants_1.TOKEN_QUERY_PARAM] = credentials.sessionToken; +exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; +exports.defaultErrorRedactor = defaultErrorRedactor; +const url_1 = __nccwpck_require__(57310); +const util_1 = __nccwpck_require__(21980); +const extend_1 = __importDefault(__nccwpck_require__(38171)); +/** + * Support `instanceof` operator for `GaxiosError`s in different versions of this library. + * + * @see {@link GaxiosError[Symbol.hasInstance]} + */ +exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); +/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ +class GaxiosError extends Error { + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} + */ + static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { + if (instance && + typeof instance === 'object' && + exports.GAXIOS_ERROR_SYMBOL in instance && + instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { + return true; } - request.query[constants_1.ALGORITHM_QUERY_PARAM] = constants_1.ALGORITHM_IDENTIFIER; - request.query[constants_1.CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; - request.query[constants_1.AMZ_DATE_QUERY_PARAM] = longDate; - request.query[constants_1.EXPIRES_QUERY_PARAM] = expiresIn.toString(10); - const canonicalHeaders = (0, getCanonicalHeaders_1.getCanonicalHeaders)(request, unsignableHeaders, signableHeaders); - request.query[constants_1.SIGNED_HEADERS_QUERY_PARAM] = getCanonicalHeaderList(canonicalHeaders); - request.query[constants_1.SIGNATURE_QUERY_PARAM] = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, await (0, getPayloadHash_1.getPayloadHash)(originalRequest, this.sha256))); - return request; + // fallback to native + return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); } - async sign(toSign, options) { - if (typeof toSign === "string") { - return this.signString(toSign, options); - } - else if (toSign.headers && toSign.payload) { - return this.signEvent(toSign, options); + constructor(message, config, response, error) { + var _b; + super(message); + this.config = config; + this.response = response; + this.error = error; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[Symbol.hasInstance]} + * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} + * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} + */ + this[_a] = util_1.pkg.version; + // deep-copy config as we do not want to mutate + // the existing config for future retries/use + this.config = (0, extend_1.default)(true, {}, config); + if (this.response) { + this.response.config = (0, extend_1.default)(true, {}, this.response.config); + } + if (this.response) { + try { + this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); + } + catch (_c) { + // best effort - don't throw an error within an error + // we could set `this.response.config.responseType = 'unknown'`, but + // that would mutate future calls with this config object. + } + this.status = this.response.status; } - else if (toSign.message) { - return this.signMessage(toSign, options); + if (error && 'code' in error && error.code) { + this.code = error.code; } - else { - return this.signRequest(toSign, options); + if (config.errorRedactor) { + config.errorRedactor({ + config: this.config, + response: this.response, + }); } } - async signEvent({ headers, payload }, { signingDate = new Date(), priorSignature, signingRegion, signingService }) { - const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); - const { shortDate, longDate } = formatDate(signingDate); - const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); - const hashedPayload = await (0, getPayloadHash_1.getPayloadHash)({ headers: {}, body: payload }, this.sha256); - const hash = new this.sha256(); - hash.update(headers); - const hashedHeaders = (0, util_hex_encoding_1.toHex)(await hash.digest()); - const stringToSign = [ - constants_1.EVENT_ALGORITHM_IDENTIFIER, - longDate, - scope, - priorSignature, - hashedHeaders, - hashedPayload, - ].join("\n"); - return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); - } - async signMessage(signableMessage, { signingDate = new Date(), signingRegion, signingService }) { - const promise = this.signEvent({ - headers: this.headerMarshaller.format(signableMessage.message.headers), - payload: signableMessage.message.body, - }, { - signingDate, - signingRegion, - signingService, - priorSignature: signableMessage.priorSignature, - }); - return promise.then((signature) => { - return { message: signableMessage.message, signature }; - }); - } - async signString(stringToSign, { signingDate = new Date(), signingRegion, signingService } = {}) { - const credentials = await this.credentialProvider(); - this.validateResolvedCredentials(credentials); - const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); - const { shortDate } = formatDate(signingDate); - const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); - hash.update((0, util_utf8_1.toUint8Array)(stringToSign)); - return (0, util_hex_encoding_1.toHex)(await hash.digest()); +} +exports.GaxiosError = GaxiosError; +function translateData(responseType, data) { + switch (responseType) { + case 'stream': + return data; + case 'json': + return JSON.parse(JSON.stringify(data)); + case 'arraybuffer': + return JSON.parse(Buffer.from(data).toString('utf8')); + case 'blob': + return JSON.parse(data.text()); + default: + return data; } - async signRequest(requestToSign, { signingDate = new Date(), signableHeaders, unsignableHeaders, signingRegion, signingService, } = {}) { - const credentials = await this.credentialProvider(); - this.validateResolvedCredentials(credentials); - const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); - const request = (0, prepareRequest_1.prepareRequest)(requestToSign); - const { longDate, shortDate } = formatDate(signingDate); - const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); - request.headers[constants_1.AMZ_DATE_HEADER] = longDate; - if (credentials.sessionToken) { - request.headers[constants_1.TOKEN_HEADER] = credentials.sessionToken; - } - const payloadHash = await (0, getPayloadHash_1.getPayloadHash)(request, this.sha256); - if (!(0, headerUtil_1.hasHeader)(constants_1.SHA256_HEADER, request.headers) && this.applyChecksum) { - request.headers[constants_1.SHA256_HEADER] = payloadHash; +} +/** + * An experimental error redactor. + * + * @param config Config to potentially redact properties of + * @param response Config to potentially redact properties of + * + * @experimental + */ +function defaultErrorRedactor(data) { + const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; + function redactHeaders(headers) { + if (!headers) + return; + for (const key of Object.keys(headers)) { + // any casing of `Authentication` + if (/^authentication$/i.test(key)) { + headers[key] = REDACT; + } + // any casing of `Authorization` + if (/^authorization$/i.test(key)) { + headers[key] = REDACT; + } + // anything containing secret, such as 'client secret' + if (/secret/i.test(key)) { + headers[key] = REDACT; + } } - const canonicalHeaders = (0, getCanonicalHeaders_1.getCanonicalHeaders)(request, unsignableHeaders, signableHeaders); - const signature = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, payloadHash)); - request.headers[constants_1.AUTH_HEADER] = - `${constants_1.ALGORITHM_IDENTIFIER} ` + - `Credential=${credentials.accessKeyId}/${scope}, ` + - `SignedHeaders=${getCanonicalHeaderList(canonicalHeaders)}, ` + - `Signature=${signature}`; - return request; - } - createCanonicalRequest(request, canonicalHeaders, payloadHash) { - const sortedHeaders = Object.keys(canonicalHeaders).sort(); - return `${request.method} -${this.getCanonicalPath(request)} -${(0, getCanonicalQuery_1.getCanonicalQuery)(request)} -${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} - -${sortedHeaders.join(";")} -${payloadHash}`; - } - async createStringToSign(longDate, credentialScope, canonicalRequest) { - const hash = new this.sha256(); - hash.update((0, util_utf8_1.toUint8Array)(canonicalRequest)); - const hashedRequest = await hash.digest(); - return `${constants_1.ALGORITHM_IDENTIFIER} -${longDate} -${credentialScope} -${(0, util_hex_encoding_1.toHex)(hashedRequest)}`; } - getCanonicalPath({ path }) { - if (this.uriEscapePath) { - const normalizedPathSegments = []; - for (const pathSegment of path.split("/")) { - if ((pathSegment === null || pathSegment === void 0 ? void 0 : pathSegment.length) === 0) - continue; - if (pathSegment === ".") - continue; - if (pathSegment === "..") { - normalizedPathSegments.pop(); - } - else { - normalizedPathSegments.push(pathSegment); - } + function redactString(obj, key) { + if (typeof obj === 'object' && + obj !== null && + typeof obj[key] === 'string') { + const text = obj[key]; + if (/grant_type=/i.test(text) || + /assertion=/i.test(text) || + /secret/i.test(text)) { + obj[key] = REDACT; } - const normalizedPath = `${(path === null || path === void 0 ? void 0 : path.startsWith("/")) ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && (path === null || path === void 0 ? void 0 : path.endsWith("/")) ? "/" : ""}`; - const doubleEncoded = encodeURIComponent(normalizedPath); - return doubleEncoded.replace(/%2F/g, "/"); } - return path; - } - async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { - const stringToSign = await this.createStringToSign(longDate, credentialScope, canonicalRequest); - const hash = new this.sha256(await keyPromise); - hash.update((0, util_utf8_1.toUint8Array)(stringToSign)); - return (0, util_hex_encoding_1.toHex)(await hash.digest()); } - getSigningKey(credentials, region, shortDate, service) { - return (0, credentialDerivation_1.getSigningKey)(this.sha256, credentials, shortDate, region, service || this.service); + function redactObject(obj) { + if (typeof obj === 'object' && obj !== null) { + if ('grant_type' in obj) { + obj['grant_type'] = REDACT; + } + if ('assertion' in obj) { + obj['assertion'] = REDACT; + } + if ('client_secret' in obj) { + obj['client_secret'] = REDACT; + } + } } - validateResolvedCredentials(credentials) { - if (typeof credentials !== "object" || - typeof credentials.accessKeyId !== "string" || - typeof credentials.secretAccessKey !== "string") { - throw new Error("Resolved credential object is not valid"); + if (data.config) { + redactHeaders(data.config.headers); + redactString(data.config, 'data'); + redactObject(data.config.data); + redactString(data.config, 'body'); + redactObject(data.config.body); + try { + const url = new url_1.URL('', data.config.url); + if (url.searchParams.has('token')) { + url.searchParams.set('token', REDACT); + } + if (url.searchParams.has('client_secret')) { + url.searchParams.set('client_secret', REDACT); + } + data.config.url = url.toString(); + } + catch (_b) { + // ignore error - no need to parse an invalid URL } } + if (data.response) { + defaultErrorRedactor({ config: data.response.config }); + redactHeaders(data.response.headers); + redactString(data.response, 'data'); + redactObject(data.response.data); + } + return data; } -exports.SignatureV4 = SignatureV4; -const formatDate = (now) => { - const longDate = (0, utilDate_1.iso8601)(now).replace(/[\-:]/g, ""); - return { - longDate, - shortDate: longDate.slice(0, 8), - }; -}; -const getCanonicalHeaderList = (headers) => Object.keys(headers).sort().join(";"); - +//# sourceMappingURL=common.js.map /***/ }), -/***/ 69098: -/***/ ((__unused_webpack_module, exports) => { +/***/ 28133: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.cloneQuery = exports.cloneRequest = void 0; -const cloneRequest = ({ headers, query, ...rest }) => ({ - ...rest, - headers: { ...headers }, - query: query ? (0, exports.cloneQuery)(query) : undefined, +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; }); -exports.cloneRequest = cloneRequest; -const cloneQuery = (query) => Object.keys(query).reduce((carry, paramName) => { - const param = query[paramName]; - return { - ...carry, - [paramName]: Array.isArray(param) ? [...param] : param, - }; -}, {}); -exports.cloneQuery = cloneQuery; - - -/***/ }), - -/***/ 48644: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MAX_PRESIGNED_TTL = exports.KEY_TYPE_IDENTIFIER = exports.MAX_CACHE_SIZE = exports.UNSIGNED_PAYLOAD = exports.EVENT_ALGORITHM_IDENTIFIER = exports.ALGORITHM_IDENTIFIER_V4A = exports.ALGORITHM_IDENTIFIER = exports.UNSIGNABLE_PATTERNS = exports.SEC_HEADER_PATTERN = exports.PROXY_HEADER_PATTERN = exports.ALWAYS_UNSIGNABLE_HEADERS = exports.HOST_HEADER = exports.TOKEN_HEADER = exports.SHA256_HEADER = exports.SIGNATURE_HEADER = exports.GENERATED_HEADERS = exports.DATE_HEADER = exports.AMZ_DATE_HEADER = exports.AUTH_HEADER = exports.REGION_SET_PARAM = exports.TOKEN_QUERY_PARAM = exports.SIGNATURE_QUERY_PARAM = exports.EXPIRES_QUERY_PARAM = exports.SIGNED_HEADERS_QUERY_PARAM = exports.AMZ_DATE_QUERY_PARAM = exports.CREDENTIAL_QUERY_PARAM = exports.ALGORITHM_QUERY_PARAM = void 0; -exports.ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; -exports.CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; -exports.AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; -exports.SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; -exports.EXPIRES_QUERY_PARAM = "X-Amz-Expires"; -exports.SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; -exports.TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; -exports.REGION_SET_PARAM = "X-Amz-Region-Set"; -exports.AUTH_HEADER = "authorization"; -exports.AMZ_DATE_HEADER = exports.AMZ_DATE_QUERY_PARAM.toLowerCase(); -exports.DATE_HEADER = "date"; -exports.GENERATED_HEADERS = [exports.AUTH_HEADER, exports.AMZ_DATE_HEADER, exports.DATE_HEADER]; -exports.SIGNATURE_HEADER = exports.SIGNATURE_QUERY_PARAM.toLowerCase(); -exports.SHA256_HEADER = "x-amz-content-sha256"; -exports.TOKEN_HEADER = exports.TOKEN_QUERY_PARAM.toLowerCase(); -exports.HOST_HEADER = "host"; -exports.ALWAYS_UNSIGNABLE_HEADERS = { - authorization: true, - "cache-control": true, - connection: true, - expect: true, - from: true, - "keep-alive": true, - "max-forwards": true, - pragma: true, - referer: true, - te: true, - trailer: true, - "transfer-encoding": true, - upgrade: true, - "user-agent": true, - "x-amzn-trace-id": true, +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; }; -exports.PROXY_HEADER_PATTERN = /^proxy-/; -exports.SEC_HEADER_PATTERN = /^sec-/; -exports.UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; -exports.ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; -exports.ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; -exports.EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; -exports.UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; -exports.MAX_CACHE_SIZE = 50; -exports.KEY_TYPE_IDENTIFIER = "aws4_request"; -exports.MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; - - -/***/ }), - -/***/ 19623: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.clearCredentialCache = exports.getSigningKey = exports.createScope = void 0; -const util_hex_encoding_1 = __nccwpck_require__(45364); -const util_utf8_1 = __nccwpck_require__(41895); -const constants_1 = __nccwpck_require__(48644); -const signingKeyCache = {}; -const cacheQueue = []; -const createScope = (shortDate, region, service) => `${shortDate}/${region}/${service}/${constants_1.KEY_TYPE_IDENTIFIER}`; -exports.createScope = createScope; -const getSigningKey = async (sha256Constructor, credentials, shortDate, region, service) => { - const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); - const cacheKey = `${shortDate}:${region}:${service}:${(0, util_hex_encoding_1.toHex)(credsHash)}:${credentials.sessionToken}`; - if (cacheKey in signingKeyCache) { - return signingKeyCache[cacheKey]; - } - cacheQueue.push(cacheKey); - while (cacheQueue.length > constants_1.MAX_CACHE_SIZE) { - delete signingKeyCache[cacheQueue.shift()]; - } - let key = `AWS4${credentials.secretAccessKey}`; - for (const signable of [shortDate, region, service, constants_1.KEY_TYPE_IDENTIFIER]) { - key = await hmac(sha256Constructor, key, signable); - } - return (signingKeyCache[cacheKey] = key); +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; -exports.getSigningKey = getSigningKey; -const clearCredentialCache = () => { - cacheQueue.length = 0; - Object.keys(signingKeyCache).forEach((cacheKey) => { - delete signingKeyCache[cacheKey]; - }); +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; -exports.clearCredentialCache = clearCredentialCache; -const hmac = (ctor, secret, data) => { - const hash = new ctor(secret); - hash.update((0, util_utf8_1.toUint8Array)(data)); - return hash.digest(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; }; - - -/***/ }), - -/***/ 51393: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - +var _Gaxios_instances, _a, _Gaxios_urlMayUseProxy, _Gaxios_applyRequestInterceptors, _Gaxios_applyResponseInterceptors, _Gaxios_prepareRequest, _Gaxios_proxyAgent, _Gaxios_getProxyAgent; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getCanonicalHeaders = void 0; -const constants_1 = __nccwpck_require__(48644); -const getCanonicalHeaders = ({ headers }, unsignableHeaders, signableHeaders) => { - const canonical = {}; - for (const headerName of Object.keys(headers).sort()) { - if (headers[headerName] == undefined) { - continue; +exports.Gaxios = void 0; +const extend_1 = __importDefault(__nccwpck_require__(38171)); +const https_1 = __nccwpck_require__(95687); +const node_fetch_1 = __importDefault(__nccwpck_require__(80467)); +const querystring_1 = __importDefault(__nccwpck_require__(63477)); +const is_stream_1 = __importDefault(__nccwpck_require__(41554)); +const url_1 = __nccwpck_require__(57310); +const common_1 = __nccwpck_require__(66129); +const retry_1 = __nccwpck_require__(31052); +const stream_1 = __nccwpck_require__(12781); +const uuid_1 = __nccwpck_require__(75840); +const interceptor_1 = __nccwpck_require__(14309); +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fetch = hasFetch() ? window.fetch : node_fetch_1.default; +function hasWindow() { + return typeof window !== 'undefined' && !!window; +} +function hasFetch() { + return hasWindow() && !!window.fetch; +} +function hasBuffer() { + return typeof Buffer !== 'undefined'; +} +function hasHeader(options, header) { + return !!getHeader(options, header); +} +function getHeader(options, header) { + header = header.toLowerCase(); + for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { + if (header === key.toLowerCase()) { + return options.headers[key]; } - const canonicalHeaderName = headerName.toLowerCase(); - if (canonicalHeaderName in constants_1.ALWAYS_UNSIGNABLE_HEADERS || - (unsignableHeaders === null || unsignableHeaders === void 0 ? void 0 : unsignableHeaders.has(canonicalHeaderName)) || - constants_1.PROXY_HEADER_PATTERN.test(canonicalHeaderName) || - constants_1.SEC_HEADER_PATTERN.test(canonicalHeaderName)) { - if (!signableHeaders || (signableHeaders && !signableHeaders.has(canonicalHeaderName))) { - continue; + } + return undefined; +} +class Gaxios { + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults) { + _Gaxios_instances.add(this); + this.agentCache = new Map(); + this.defaults = defaults || {}; + this.interceptors = { + request: new interceptor_1.GaxiosInterceptorManager(), + response: new interceptor_1.GaxiosInterceptorManager(), + }; + } + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async request(opts = {}) { + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_prepareRequest).call(this, opts); + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyRequestInterceptors).call(this, opts); + return __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyResponseInterceptors).call(this, this._request(opts)); + } + async _defaultAdapter(opts) { + const fetchImpl = opts.fetchImplementation || fetch; + const res = (await fetchImpl(opts.url, opts)); + const data = await this.getResponseData(opts, res); + return this.translateResponse(opts, res, data); + } + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async _request(opts = {}) { + var _b; + try { + let translatedResponse; + if (opts.adapter) { + translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); } + else { + translatedResponse = await this._defaultAdapter(opts); + } + if (!opts.validateStatus(translatedResponse.status)) { + if (opts.responseType === 'stream') { + let response = ''; + await new Promise(resolve => { + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { + response += chunk; + }); + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); + }); + translatedResponse.data = response; + } + throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + } + return translatedResponse; + } + catch (e) { + const err = e instanceof common_1.GaxiosError + ? e + : new common_1.GaxiosError(e.message, opts, undefined, e); + const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); + if (shouldRetry && config) { + err.config.retryConfig.currentRetryAttempt = + config.retryConfig.currentRetryAttempt; + // The error's config could be redacted - therefore we only want to + // copy the retry state over to the existing config + opts.retryConfig = (_b = err.config) === null || _b === void 0 ? void 0 : _b.retryConfig; + return this._request(opts); + } + throw err; } - canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); } - return canonical; -}; -exports.getCanonicalHeaders = getCanonicalHeaders; - - -/***/ }), - -/***/ 33243: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getCanonicalQuery = void 0; -const util_uri_escape_1 = __nccwpck_require__(54197); -const constants_1 = __nccwpck_require__(48644); -const getCanonicalQuery = ({ query = {} }) => { - const keys = []; - const serialized = {}; - for (const key of Object.keys(query).sort()) { - if (key.toLowerCase() === constants_1.SIGNATURE_HEADER) { - continue; + async getResponseData(opts, res) { + switch (opts.responseType) { + case 'stream': + return res.body; + case 'json': { + let data = await res.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + case 'arraybuffer': + return res.arrayBuffer(); + case 'blob': + return res.blob(); + case 'text': + return res.text(); + default: + return this.getResponseDataFromContentType(res); } - keys.push(key); - const value = query[key]; - if (typeof value === "string") { - serialized[key] = `${(0, util_uri_escape_1.escapeUri)(key)}=${(0, util_uri_escape_1.escapeUri)(value)}`; + } + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + validateStatus(status) { + return status >= 200 && status < 300; + } + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + paramsSerializer(params) { + return querystring_1.default.stringify(params); + } + translateResponse(opts, res, data) { + // headers need to be converted from a map to an obj + const headers = {}; + res.headers.forEach((value, key) => { + headers[key] = value; + }); + return { + config: opts, + data: data, + headers, + status: res.status, + statusText: res.statusText, + // XMLHttpRequestLike + request: { + responseURL: res.url, + }, + }; + } + /** + * Attempts to parse a response by looking at the Content-Type header. + * @param {FetchResponse} response the HTTP response. + * @returns {Promise} a promise that resolves to the response data. + */ + async getResponseDataFromContentType(response) { + let contentType = response.headers.get('Content-Type'); + if (contentType === null) { + // Maintain existing functionality by calling text() + return response.text(); } - else if (Array.isArray(value)) { - serialized[key] = value - .slice(0) - .reduce((encoded, value) => encoded.concat([`${(0, util_uri_escape_1.escapeUri)(key)}=${(0, util_uri_escape_1.escapeUri)(value)}`]), []) - .sort() - .join("&"); + contentType = contentType.toLowerCase(); + if (contentType.includes('application/json')) { + let data = await response.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + else if (contentType.match(/^text\//)) { + return response.text(); + } + else { + // If the content type is something not easily handled, just return the raw data (blob) + return response.blob(); } } - return keys - .map((key) => serialized[key]) - .filter((serialized) => serialized) - .join("&"); -}; -exports.getCanonicalQuery = getCanonicalQuery; - - -/***/ }), - -/***/ 48545: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getPayloadHash = void 0; -const is_array_buffer_1 = __nccwpck_require__(10780); -const util_hex_encoding_1 = __nccwpck_require__(45364); -const util_utf8_1 = __nccwpck_require__(41895); -const constants_1 = __nccwpck_require__(48644); -const getPayloadHash = async ({ headers, body }, hashConstructor) => { - for (const headerName of Object.keys(headers)) { - if (headerName.toLowerCase() === constants_1.SHA256_HEADER) { - return headers[headerName]; + /** + * Creates an async generator that yields the pieces of a multipart/related request body. + * This implementation follows the spec: https://www.ietf.org/rfc/rfc2387.txt. However, recursive + * multipart/related requests are not currently supported. + * + * @param {GaxioMultipartOptions[]} multipartOptions the pieces to turn into a multipart/related body. + * @param {string} boundary the boundary string to be placed between each part. + */ + async *getMultipartRequest(multipartOptions, boundary) { + const finale = `--${boundary}--`; + for (const currentPart of multipartOptions) { + const partContentType = currentPart.headers['Content-Type'] || 'application/octet-stream'; + const preamble = `--${boundary}\r\nContent-Type: ${partContentType}\r\n\r\n`; + yield preamble; + if (typeof currentPart.content === 'string') { + yield currentPart.content; + } + else { + yield* currentPart.content; + } + yield '\r\n'; } + yield finale; } - if (body == undefined) { - return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; +} +exports.Gaxios = Gaxios; +_a = Gaxios, _Gaxios_instances = new WeakSet(), _Gaxios_urlMayUseProxy = function _Gaxios_urlMayUseProxy(url, noProxy = []) { + var _b, _c; + const candidate = new url_1.URL(url); + const noProxyList = [...noProxy]; + const noProxyEnvList = ((_c = ((_b = process.env.NO_PROXY) !== null && _b !== void 0 ? _b : process.env.no_proxy)) === null || _c === void 0 ? void 0 : _c.split(',')) || []; + for (const rule of noProxyEnvList) { + noProxyList.push(rule.trim()); } - else if (typeof body === "string" || ArrayBuffer.isView(body) || (0, is_array_buffer_1.isArrayBuffer)(body)) { - const hashCtor = new hashConstructor(); - hashCtor.update((0, util_utf8_1.toUint8Array)(body)); - return (0, util_hex_encoding_1.toHex)(await hashCtor.digest()); + for (const rule of noProxyList) { + // Match regex + if (rule instanceof RegExp) { + if (rule.test(candidate.toString())) { + return false; + } + } + // Match URL + else if (rule instanceof url_1.URL) { + if (rule.origin === candidate.origin) { + return false; + } + } + // Match string regex + else if (rule.startsWith('*.') || rule.startsWith('.')) { + const cleanedRule = rule.replace(/^\*\./, '.'); + if (candidate.hostname.endsWith(cleanedRule)) { + return false; + } + } + // Basic string match + else if (rule === candidate.origin || + rule === candidate.hostname || + rule === candidate.href) { + return false; + } } - return constants_1.UNSIGNED_PAYLOAD; -}; -exports.getPayloadHash = getPayloadHash; - - -/***/ }), - -/***/ 62179: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.deleteHeader = exports.getHeaderValue = exports.hasHeader = void 0; -const hasHeader = (soughtHeader, headers) => { - soughtHeader = soughtHeader.toLowerCase(); - for (const headerName of Object.keys(headers)) { - if (soughtHeader === headerName.toLowerCase()) { - return true; + return true; +}, _Gaxios_applyRequestInterceptors = +/** + * Applies the request interceptors. The request interceptors are applied after the + * call to prepareRequest is completed. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyRequestInterceptors(options) { + let promiseChain = Promise.resolve(options); + for (const interceptor of this.interceptors.request.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); } } - return false; -}; -exports.hasHeader = hasHeader; -const getHeaderValue = (soughtHeader, headers) => { - soughtHeader = soughtHeader.toLowerCase(); - for (const headerName of Object.keys(headers)) { - if (soughtHeader === headerName.toLowerCase()) { - return headers[headerName]; + return promiseChain; +}, _Gaxios_applyResponseInterceptors = +/** + * Applies the response interceptors. The response interceptors are applied after the + * call to request is made. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyResponseInterceptors(response) { + let promiseChain = Promise.resolve(response); + for (const interceptor of this.interceptors.response.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } + } + return promiseChain; +}, _Gaxios_prepareRequest = +/** + * Validates the options, merges them with defaults, and prepare request. + * + * @param options The original options passed from the client. + * @returns Prepared options, ready to make a request + */ +async function _Gaxios_prepareRequest(options) { + var _b, _c, _d, _e; + const opts = (0, extend_1.default)(true, {}, this.defaults, options); + if (!opts.url) { + throw new Error('URL is required.'); + } + // baseUrl has been deprecated, remove in 2.0 + const baseUrl = opts.baseUrl || opts.baseURL; + if (baseUrl) { + opts.url = baseUrl.toString() + opts.url; + } + opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; + if (opts.params && Object.keys(opts.params).length > 0) { + let additionalQueryParams = opts.paramsSerializer(opts.params); + if (additionalQueryParams.startsWith('?')) { + additionalQueryParams = additionalQueryParams.slice(1); + } + const prefix = opts.url.toString().includes('?') ? '&' : '?'; + opts.url = opts.url + prefix + additionalQueryParams; + } + if (typeof options.maxContentLength === 'number') { + opts.size = options.maxContentLength; + } + if (typeof options.maxRedirects === 'number') { + opts.follow = options.maxRedirects; + } + opts.headers = opts.headers || {}; + if (opts.multipart === undefined && opts.data) { + const isFormData = typeof FormData === 'undefined' + ? false + : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; + if (is_stream_1.default.readable(opts.data)) { + opts.body = opts.data; + } + else if (hasBuffer() && Buffer.isBuffer(opts.data)) { + // Do not attempt to JSON.stringify() a Buffer: + opts.body = opts.data; + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + } + else if (typeof opts.data === 'object') { + // If www-form-urlencoded content type has been set, but data is + // provided as an object, serialize the content using querystring: + if (!isFormData) { + if (getHeader(opts, 'content-type') === + 'application/x-www-form-urlencoded') { + opts.body = opts.paramsSerializer(opts.data); + } + else { + // } else if (!(opts.data instanceof FormData)) { + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + opts.body = JSON.stringify(opts.data); + } + } + } + else { + opts.body = opts.data; + } + } + else if (opts.multipart && opts.multipart.length > 0) { + // note: once the minimum version reaches Node 16, + // this can be replaced with randomUUID() function from crypto + // and the dependency on UUID removed + const boundary = (0, uuid_1.v4)(); + opts.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + const bodyStream = new stream_1.PassThrough(); + opts.body = bodyStream; + (0, stream_1.pipeline)(this.getMultipartRequest(opts.multipart, boundary), bodyStream, () => { }); + } + opts.validateStatus = opts.validateStatus || this.validateStatus; + opts.responseType = opts.responseType || 'unknown'; + if (!opts.headers['Accept'] && opts.responseType === 'json') { + opts.headers['Accept'] = 'application/json'; + } + opts.method = opts.method || 'GET'; + const proxy = opts.proxy || + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.HTTPS_PROXY) || + ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.https_proxy) || + ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.HTTP_PROXY) || + ((_e = process === null || process === void 0 ? void 0 : process.env) === null || _e === void 0 ? void 0 : _e.http_proxy); + const urlMayUseProxy = __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_urlMayUseProxy).call(this, opts.url, opts.noProxy); + if (opts.agent) { + // don't do any of the following options - use the user-provided agent. + } + else if (proxy && urlMayUseProxy) { + const HttpsProxyAgent = await __classPrivateFieldGet(_a, _a, "m", _Gaxios_getProxyAgent).call(_a); + if (this.agentCache.has(proxy)) { + opts.agent = this.agentCache.get(proxy); + } + else { + opts.agent = new HttpsProxyAgent(proxy, { + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(proxy, opts.agent); } } - return undefined; -}; -exports.getHeaderValue = getHeaderValue; -const deleteHeader = (soughtHeader, headers) => { - soughtHeader = soughtHeader.toLowerCase(); - for (const headerName of Object.keys(headers)) { - if (soughtHeader === headerName.toLowerCase()) { - delete headers[headerName]; + else if (opts.cert && opts.key) { + // Configure client for mTLS + if (this.agentCache.has(opts.key)) { + opts.agent = this.agentCache.get(opts.key); } + else { + opts.agent = new https_1.Agent({ + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(opts.key, opts.agent); + } + } + if (typeof opts.errorRedactor !== 'function' && + opts.errorRedactor !== false) { + opts.errorRedactor = common_1.defaultErrorRedactor; } + return opts; +}, _Gaxios_getProxyAgent = async function _Gaxios_getProxyAgent() { + __classPrivateFieldSet(this, _a, __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent) || (await Promise.resolve().then(() => __importStar(__nccwpck_require__(74522)))).HttpsProxyAgent, "f", _Gaxios_proxyAgent); + return __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent); }; -exports.deleteHeader = deleteHeader; - - -/***/ }), - -/***/ 11528: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.prepareRequest = exports.moveHeadersToQuery = exports.getPayloadHash = exports.getCanonicalQuery = exports.getCanonicalHeaders = void 0; -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(39733), exports); -var getCanonicalHeaders_1 = __nccwpck_require__(51393); -Object.defineProperty(exports, "getCanonicalHeaders", ({ enumerable: true, get: function () { return getCanonicalHeaders_1.getCanonicalHeaders; } })); -var getCanonicalQuery_1 = __nccwpck_require__(33243); -Object.defineProperty(exports, "getCanonicalQuery", ({ enumerable: true, get: function () { return getCanonicalQuery_1.getCanonicalQuery; } })); -var getPayloadHash_1 = __nccwpck_require__(48545); -Object.defineProperty(exports, "getPayloadHash", ({ enumerable: true, get: function () { return getPayloadHash_1.getPayloadHash; } })); -var moveHeadersToQuery_1 = __nccwpck_require__(49828); -Object.defineProperty(exports, "moveHeadersToQuery", ({ enumerable: true, get: function () { return moveHeadersToQuery_1.moveHeadersToQuery; } })); -var prepareRequest_1 = __nccwpck_require__(60075); -Object.defineProperty(exports, "prepareRequest", ({ enumerable: true, get: function () { return prepareRequest_1.prepareRequest; } })); -tslib_1.__exportStar(__nccwpck_require__(19623), exports); - +/** + * A cache for the lazily-loaded proxy agent. + * + * Should use {@link Gaxios[#getProxyAgent]} to retrieve. + */ +// using `import` to dynamically import the types here +_Gaxios_proxyAgent = { value: void 0 }; +//# sourceMappingURL=gaxios.js.map /***/ }), -/***/ 49828: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 59555: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.moveHeadersToQuery = void 0; -const cloneRequest_1 = __nccwpck_require__(69098); -const moveHeadersToQuery = (request, options = {}) => { - var _a; - const { headers, query = {} } = typeof request.clone === "function" ? request.clone() : (0, cloneRequest_1.cloneRequest)(request); - for (const name of Object.keys(headers)) { - const lname = name.toLowerCase(); - if (lname.slice(0, 6) === "x-amz-" && !((_a = options.unhoistableHeaders) === null || _a === void 0 ? void 0 : _a.has(lname))) { - query[name] = headers[name]; - delete headers[name]; - } +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; } - return { - ...request, - headers, - query, - }; + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; -exports.moveHeadersToQuery = moveHeadersToQuery; - +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.instance = exports.Gaxios = exports.GaxiosError = void 0; +exports.request = request; +const gaxios_1 = __nccwpck_require__(28133); +Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); +var common_1 = __nccwpck_require__(66129); +Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); +__exportStar(__nccwpck_require__(14309), exports); +/** + * The default instance used when the `request` method is directly + * invoked. + */ +exports.instance = new gaxios_1.Gaxios(); +/** + * Make an HTTP request using the given options. + * @param opts Options for the request + */ +async function request(opts) { + return exports.instance.request(opts); +} +//# sourceMappingURL=index.js.map /***/ }), -/***/ 60075: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 14309: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +// Copyright 2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.prepareRequest = void 0; -const cloneRequest_1 = __nccwpck_require__(69098); -const constants_1 = __nccwpck_require__(48644); -const prepareRequest = (request) => { - request = typeof request.clone === "function" ? request.clone() : (0, cloneRequest_1.cloneRequest)(request); - for (const headerName of Object.keys(request.headers)) { - if (constants_1.GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { - delete request.headers[headerName]; - } - } - return request; -}; -exports.prepareRequest = prepareRequest; - +exports.GaxiosInterceptorManager = void 0; +/** + * Class to manage collections of GaxiosInterceptors for both requests and responses. + */ +class GaxiosInterceptorManager extends Set { +} +exports.GaxiosInterceptorManager = GaxiosInterceptorManager; +//# sourceMappingURL=interceptor.js.map /***/ }), -/***/ 39299: +/***/ 31052: /***/ ((__unused_webpack_module, exports) => { "use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toDate = exports.iso8601 = void 0; -const iso8601 = (time) => (0, exports.toDate)(time) - .toISOString() - .replace(/\.\d{3}Z$/, "Z"); -exports.iso8601 = iso8601; -const toDate = (time) => { - if (typeof time === "number") { - return new Date(time * 1000); +exports.getRetryConfig = getRetryConfig; +async function getRetryConfig(err) { + let config = getConfig(err); + if (!err || !err.config || (!config && !err.config.retry)) { + return { shouldRetry: false }; + } + config = config || {}; + config.currentRetryAttempt = config.currentRetryAttempt || 0; + config.retry = + config.retry === undefined || config.retry === null ? 3 : config.retry; + config.httpMethodsToRetry = config.httpMethodsToRetry || [ + 'GET', + 'HEAD', + 'PUT', + 'OPTIONS', + 'DELETE', + ]; + config.noResponseRetries = + config.noResponseRetries === undefined || config.noResponseRetries === null + ? 2 + : config.noResponseRetries; + config.retryDelayMultiplier = config.retryDelayMultiplier + ? config.retryDelayMultiplier + : 2; + config.timeOfFirstRequest = config.timeOfFirstRequest + ? config.timeOfFirstRequest + : Date.now(); + config.totalTimeout = config.totalTimeout + ? config.totalTimeout + : Number.MAX_SAFE_INTEGER; + config.maxRetryDelay = config.maxRetryDelay + ? config.maxRetryDelay + : Number.MAX_SAFE_INTEGER; + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 408 - Retry ("Request Timeout") + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [408, 408], + [429, 429], + [500, 599], + ]; + config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; + // Put the config back into the err + err.config.retryConfig = config; + // Determine if we should retry the request + const shouldRetryFn = config.shouldRetry || shouldRetryRequest; + if (!(await shouldRetryFn(err))) { + return { shouldRetry: false, config: err.config }; + } + const delay = getNextRetryDelay(config); + // We're going to retry! Incremenent the counter. + err.config.retryConfig.currentRetryAttempt += 1; + // Create a promise that invokes the retry after the backOffDelay + const backoff = config.retryBackoff + ? config.retryBackoff(err, delay) + : new Promise(resolve => { + setTimeout(resolve, delay); + }); + // Notify the user if they added an `onRetryAttempt` handler + if (config.onRetryAttempt) { + config.onRetryAttempt(err); } - if (typeof time === "string") { - if (Number(time)) { - return new Date(Number(time) * 1000); + // Return the promise in which recalls Gaxios to retry the request + await backoff; + return { shouldRetry: true, config: err.config }; +} +/** + * Determine based on config if we should retry the request. + * @param err The GaxiosError passed to the interceptor. + */ +function shouldRetryRequest(err) { + var _a; + const config = getConfig(err); + // node-fetch raises an AbortError if signaled: + // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { + return false; + } + // If there's no config, or retries are disabled, return. + if (!config || config.retry === 0) { + return false; + } + // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) + if (!err.response && + (config.currentRetryAttempt || 0) >= config.noResponseRetries) { + return false; + } + // Only retry with configured HttpMethods. + if (!err.config.method || + config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { + return false; + } + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + if (err.response && err.response.status) { + let isInRange = false; + for (const [min, max] of config.statusCodesToRetry) { + const status = err.response.status; + if (status >= min && status <= max) { + isInRange = true; + break; + } + } + if (!isInRange) { + return false; } - return new Date(time); } - return time; -}; -exports.toDate = toDate; - + // If we are out of retry attempts, return + config.currentRetryAttempt = config.currentRetryAttempt || 0; + if (config.currentRetryAttempt >= config.retry) { + return false; + } + return true; +} +/** + * Acquire the raxConfig object from an GaxiosError if available. + * @param err The Gaxios error with a config object. + */ +function getConfig(err) { + if (err && err.config && err.config.retryConfig) { + return err.config.retryConfig; + } + return; +} +/** + * Gets the delay to wait before the next retry. + * + * @param {RetryConfig} config The current set of retry options + * @returns {number} the amount of ms to wait before the next retry attempt. + */ +function getNextRetryDelay(config) { + var _a; + // Calculate time to wait with exponential backoff. + // If this is the first retry, look for a configured retryDelay. + const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; + // Formula: retryDelay + ((retryDelayMultiplier^currentRetryAttempt - 1 / 2) * 1000) + const calculatedDelay = retryDelay + + ((Math.pow(config.retryDelayMultiplier, config.currentRetryAttempt) - 1) / + 2) * + 1000; + const maxAllowableDelay = config.totalTimeout - (Date.now() - config.timeOfFirstRequest); + return Math.min(calculatedDelay, maxAllowableDelay, config.maxRetryDelay); +} +//# sourceMappingURL=retry.js.map /***/ }), -/***/ 70438: -/***/ ((__unused_webpack_module, exports) => { +/***/ 21980: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2023 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoOpLogger = void 0; -class NoOpLogger { - trace() { } - debug() { } - info() { } - warn() { } - error() { } -} -exports.NoOpLogger = NoOpLogger; - +exports.pkg = void 0; +exports.pkg = __nccwpck_require__(6318); +//# sourceMappingURL=util.js.map /***/ }), -/***/ 61600: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 79910: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Client = void 0; -const middleware_stack_1 = __nccwpck_require__(97911); -class Client { - constructor(config) { - this.middlewareStack = (0, middleware_stack_1.constructStack)(); - this.config = config; +exports.req = exports.json = exports.toBuffer = void 0; +const http = __importStar(__nccwpck_require__(13685)); +const https = __importStar(__nccwpck_require__(95687)); +async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); } - send(command, optionsOrCb, cb) { - const options = typeof optionsOrCb !== "function" ? optionsOrCb : undefined; - const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; - const handler = command.resolveMiddleware(this.middlewareStack, this.config, options); - if (callback) { - handler(command) - .then((result) => callback(null, result.output), (err) => callback(err)) - .catch(() => { }); - } - else { - return handler(command).then((result) => result.output); - } + return Buffer.concat(chunks, length); +} +exports.toBuffer = toBuffer; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function json(stream) { + const buf = await toBuffer(stream); + const str = buf.toString('utf8'); + try { + return JSON.parse(str); } - destroy() { - if (this.config.requestHandler.destroy) - this.config.requestHandler.destroy(); + catch (_err) { + const err = _err; + err.message += ` (input: ${str})`; + throw err; } } -exports.Client = Client; - +exports.json = json; +function req(url, opts = {}) { + const href = typeof url === 'string' ? url : url.href; + const req = (href.startsWith('https:') ? https : http).request(url, opts); + const promise = new Promise((resolve, reject) => { + req + .once('response', resolve) + .once('error', reject) + .end(); + }); + req.then = promise.then.bind(promise); + return req; +} +exports.req = req; +//# sourceMappingURL=helpers.js.map /***/ }), -/***/ 32813: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7863: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.collectBody = void 0; -const util_stream_1 = __nccwpck_require__(96607); -const collectBody = async (streamBody = new Uint8Array(), context) => { - if (streamBody instanceof Uint8Array) { - return util_stream_1.Uint8ArrayBlobAdapter.mutate(streamBody); +exports.Agent = void 0; +const net = __importStar(__nccwpck_require__(41808)); +const http = __importStar(__nccwpck_require__(13685)); +const https_1 = __nccwpck_require__(95687); +__exportStar(__nccwpck_require__(79910), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; } - if (!streamBody) { - return util_stream_1.Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + // In order to support async signatures in `connect()` and Node's native + // connection pooling in `http.Agent`, the array of sockets for each origin + // has to be updated synchronously. This is so the length of the array is + // accurate when `addRequest()` is next called. We achieve this by creating a + // fake socket and adding it to `sockets[origin]` and incrementing + // `totalSocketCount`. + incrementSockets(name) { + // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no + // need to create a fake socket because Node.js native connection pooling + // will never be invoked. + if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { + return null; + } + // All instances of `sockets` are expected TypeScript errors. The + // alternative is to add it as a private property of this class but that + // will break TypeScript subclassing. + if (!this.sockets[name]) { + // @ts-expect-error `sockets` is readonly in `@types/node` + this.sockets[name] = []; + } + const fakeSocket = new net.Socket({ writable: false }); + this.sockets[name].push(fakeSocket); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount++; + return fakeSocket; + } + decrementSockets(name, socket) { + if (!this.sockets[name] || socket === null) { + return; + } + const sockets = this.sockets[name]; + const index = sockets.indexOf(socket); + if (index !== -1) { + sockets.splice(index, 1); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount--; + if (sockets.length === 0) { + // @ts-expect-error `sockets` is readonly in `@types/node` + delete this.sockets[name]; + } + } + } + // In order to properly update the socket pool, we need to call `getName()` on + // the core `https.Agent` if it is a secureEndpoint. + getName(options) { + const secureEndpoint = typeof options.secureEndpoint === 'boolean' + ? options.secureEndpoint + : this.isSecureEndpoint(options); + if (secureEndpoint) { + // @ts-expect-error `getName()` isn't defined in `@types/node` + return https_1.Agent.prototype.getName.call(this, options); + } + // @ts-expect-error `getName()` isn't defined in `@types/node` + return super.getName(options); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + const name = this.getName(connectOpts); + const fakeSocket = this.incrementSockets(name); + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + this.decrementSockets(name, fakeSocket); + if (socket instanceof http.Agent) { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, (err) => { + this.decrementSockets(name, fakeSocket); + cb(err); + }); } - const fromContext = context.streamCollector(streamBody); - return util_stream_1.Uint8ArrayBlobAdapter.mutate(await fromContext); -}; -exports.collectBody = collectBody; - + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); + } + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; + } + } + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; + } + } +} +exports.Agent = Agent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 75414: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 74522: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Command = void 0; -const middleware_stack_1 = __nccwpck_require__(97911); -class Command { - constructor() { - this.middlewareStack = (0, middleware_stack_1.constructStack)(); +exports.HttpsProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(41808)); +const tls = __importStar(__nccwpck_require__(24404)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const agent_base_1 = __nccwpck_require__(7863); +const url_1 = __nccwpck_require__(57310); +const parse_proxy_response_1 = __nccwpck_require__(67224); +const debug = (0, debug_1.default)('https-proxy-agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + const servername = this.connectOpts.servername || this.connectOpts.host; + socket = tls.connect({ + ...this.connectOpts, + servername, + }); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls.connect({ + ...omit(opts, 'host', 'path', 'port'), + socket, + servername, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; } } -exports.Command = Command; - +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map /***/ }), -/***/ 92541: -/***/ ((__unused_webpack_module, exports) => { +/***/ 67224: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SENSITIVE_STRING = void 0; -exports.SENSITIVE_STRING = "***SensitiveInformation***"; - +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map /***/ }), -/***/ 56929: -/***/ ((__unused_webpack_module, exports) => { +/***/ 51904: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createAggregatedClient = void 0; -const createAggregatedClient = (commands, Client) => { - for (const command of Object.keys(commands)) { - const CommandCtor = commands[command]; - const methodImpl = async function (args, optionsOrCb, cb) { - const command = new CommandCtor(args); - if (typeof optionsOrCb === "function") { - this.send(command, optionsOrCb); - } - else if (typeof cb === "function") { - if (typeof optionsOrCb !== "object") - throw new Error(`Expected http options but got ${typeof optionsOrCb}`); - this.send(command, optionsOrCb || {}, cb); - } - else { - return this.send(command, optionsOrCb); +exports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0; +const fs_1 = __nccwpck_require__(57147); +const os_1 = __nccwpck_require__(22037); +/** + * Known paths unique to Google Compute Engine Linux instances + */ +exports.GCE_LINUX_BIOS_PATHS = { + BIOS_DATE: '/sys/class/dmi/id/bios_date', + BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', +}; +const GCE_MAC_ADDRESS_REGEX = /^42:01/; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +function isGoogleCloudServerless() { + /** + * `CLOUD_RUN_JOB` is used for Cloud Run Jobs + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * + * `FUNCTION_NAME` is used in older Cloud Functions environments: + * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * + * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + */ + const isGFEnvironment = process.env.CLOUD_RUN_JOB || + process.env.FUNCTION_NAME || + process.env.K_SERVICE; + return !!isGFEnvironment; +} +exports.isGoogleCloudServerless = isGoogleCloudServerless; +/** + * Determines if the process is running on a Linux Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. + */ +function isGoogleComputeEngineLinux() { + if ((0, os_1.platform)() !== 'linux') + return false; + try { + // ensure this file exist + (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); + // ensure this file exist and matches + const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); + return /Google/.test(biosVendor); + } + catch (_a) { + return false; + } +} +exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; +/** + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. + * + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. + */ +function isGoogleComputeEngineMACAddress() { + const interfaces = (0, os_1.networkInterfaces)(); + for (const item of Object.values(interfaces)) { + if (!item) + continue; + for (const { mac } of item) { + if (GCE_MAC_ADDRESS_REGEX.test(mac)) { + return true; } - }; - const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); - Client.prototype[methodName] = methodImpl; + } } -}; -exports.createAggregatedClient = createAggregatedClient; - + return false; +} +exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; +/** + * Determines if the process is running on a Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. + */ +function isGoogleComputeEngine() { + return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); +} +exports.isGoogleComputeEngine = isGoogleComputeEngine; +/** + * Determines if the process is running on Google Cloud Platform. + * + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. + */ +function detectGCPResidency() { + return isGoogleCloudServerless() || isGoogleComputeEngine(); +} +exports.detectGCPResidency = detectGCPResidency; +//# sourceMappingURL=gcp-residency.js.map /***/ }), -/***/ 21737: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 3563: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseEpochTimestamp = exports.parseRfc7231DateTime = exports.parseRfc3339DateTimeWithOffset = exports.parseRfc3339DateTime = exports.dateToUtcString = void 0; -const parse_utils_1 = __nccwpck_require__(74857); -const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; -const MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; -function dateToUtcString(date) { - const year = date.getUTCFullYear(); - const month = date.getUTCMonth(); - const dayOfWeek = date.getUTCDay(); - const dayOfMonthInt = date.getUTCDate(); - const hoursInt = date.getUTCHours(); - const minutesInt = date.getUTCMinutes(); - const secondsInt = date.getUTCSeconds(); - const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; - const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; - const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; - const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; - return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; -} -exports.dateToUtcString = dateToUtcString; -const RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); -const parseRfc3339DateTime = (value) => { - if (value === null || value === undefined) { - return undefined; - } - if (typeof value !== "string") { - throw new TypeError("RFC-3339 date-times must be expressed as strings"); - } - const match = RFC3339.exec(value); - if (!match) { - throw new TypeError("Invalid RFC-3339 date-time value"); - } - const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; - const year = (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)); - const month = parseDateValue(monthStr, "month", 1, 12); - const day = parseDateValue(dayStr, "day", 1, 31); - return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); -}; -exports.parseRfc3339DateTime = parseRfc3339DateTime; -const RFC3339_WITH_OFFSET = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/); -const parseRfc3339DateTimeWithOffset = (value) => { - if (value === null || value === undefined) { - return undefined; - } - if (typeof value !== "string") { - throw new TypeError("RFC-3339 date-times must be expressed as strings"); - } - const match = RFC3339_WITH_OFFSET.exec(value); - if (!match) { - throw new TypeError("Invalid RFC-3339 date-time value"); - } - const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; - const year = (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)); - const month = parseDateValue(monthStr, "month", 1, 12); - const day = parseDateValue(dayStr, "day", 1, 31); - const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); - if (offsetStr.toUpperCase() != "Z") { - date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); - } - return date; -}; -exports.parseRfc3339DateTimeWithOffset = parseRfc3339DateTimeWithOffset; -const IMF_FIXDATE = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); -const RFC_850_DATE = new RegExp(/^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); -const ASC_TIME = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/); -const parseRfc7231DateTime = (value) => { - if (value === null || value === undefined) { - return undefined; - } - if (typeof value !== "string") { - throw new TypeError("RFC-7231 date-times must be expressed as strings"); - } - let match = IMF_FIXDATE.exec(value); - if (match) { - const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; - return buildDate((0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); - } - match = RFC_850_DATE.exec(value); - if (match) { - const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; - return adjustRfc850Year(buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { - hours, - minutes, - seconds, - fractionalMilliseconds, - })); - } - match = ASC_TIME.exec(value); - if (match) { - const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; - return buildDate((0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr.trimLeft(), "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; } - throw new TypeError("Invalid RFC-7231 date-time value"); + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; -exports.parseRfc7231DateTime = parseRfc7231DateTime; -const parseEpochTimestamp = (value) => { - if (value === null || value === undefined) { - return undefined; - } - let valueAsDouble; - if (typeof value === "number") { - valueAsDouble = value; - } - else if (typeof value === "string") { - valueAsDouble = (0, parse_utils_1.strictParseDouble)(value); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.requestTimeout = exports.setGCPResidency = exports.getGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.bulk = exports.universe = exports.project = exports.instance = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; +const gaxios_1 = __nccwpck_require__(59555); +const jsonBigint = __nccwpck_require__(55031); +const gcp_residency_1 = __nccwpck_require__(51904); +exports.BASE_PATH = '/computeMetadata/v1'; +exports.HOST_ADDRESS = 'http://169.254.169.254'; +exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; +exports.HEADER_NAME = 'Metadata-Flavor'; +exports.HEADER_VALUE = 'Google'; +exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +/** + * Metadata server detection override options. + * + * Available via `process.env.METADATA_SERVER_DETECTION`. + */ +exports.METADATA_SERVER_DETECTION = Object.freeze({ + 'assume-present': "don't try to ping the metadata server, but assume it's present", + none: "don't try to ping the metadata server, but don't try to use it either", + 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", + 'ping-only': 'skip the BIOS probe, and go straight to pinging', +}); +/** + * Returns the base URL while taking into account the GCE_METADATA_HOST + * environment variable if it exists. + * + * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + */ +function getBaseUrl(baseUrl) { + if (!baseUrl) { + baseUrl = + process.env.GCE_METADATA_IP || + process.env.GCE_METADATA_HOST || + exports.HOST_ADDRESS; + } + // If no scheme is provided default to HTTP: + if (!/^https?:\/\//.test(baseUrl)) { + baseUrl = `http://${baseUrl}`; + } + return new URL(exports.BASE_PATH, baseUrl).href; +} +// Accepts an options object passed from the user to the API. In previous +// versions of the API, it referred to a `Request` or an `Axios` request +// options object. Now it refers to an object with very limited property +// names. This is here to help ensure users don't pass invalid options when +// they upgrade from 0.4 to 0.5 to 0.8. +function validate(options) { + Object.keys(options).forEach(key => { + switch (key) { + case 'params': + case 'property': + case 'headers': + break; + case 'qs': + throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); + default: + throw new Error(`'${key}' is not a valid configuration option.`); + } + }); +} +async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { + let metadataKey = ''; + let params = {}; + let headers = {}; + if (typeof type === 'object') { + const metadataAccessor = type; + metadataKey = metadataAccessor.metadataKey; + params = metadataAccessor.params || params; + headers = metadataAccessor.headers || headers; + noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; + fastFail = metadataAccessor.fastFail || fastFail; } else { - throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); - } - if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { - throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); - } - return new Date(Math.round(valueAsDouble * 1000)); -}; -exports.parseEpochTimestamp = parseEpochTimestamp; -const buildDate = (year, month, day, time) => { - const adjustedMonth = month - 1; - validateDayOfMonth(year, adjustedMonth, day); - return new Date(Date.UTC(year, adjustedMonth, day, parseDateValue(time.hours, "hour", 0, 23), parseDateValue(time.minutes, "minute", 0, 59), parseDateValue(time.seconds, "seconds", 0, 60), parseMilliseconds(time.fractionalMilliseconds))); -}; -const parseTwoDigitYear = (value) => { - const thisYear = new Date().getUTCFullYear(); - const valueInThisCentury = Math.floor(thisYear / 100) * 100 + (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(value)); - if (valueInThisCentury < thisYear) { - return valueInThisCentury + 100; - } - return valueInThisCentury; -}; -const FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1000; -const adjustRfc850Year = (input) => { - if (input.getTime() - new Date().getTime() > FIFTY_YEARS_IN_MILLIS) { - return new Date(Date.UTC(input.getUTCFullYear() - 100, input.getUTCMonth(), input.getUTCDate(), input.getUTCHours(), input.getUTCMinutes(), input.getUTCSeconds(), input.getUTCMilliseconds())); - } - return input; -}; -const parseMonthByShortName = (value) => { - const monthIdx = MONTHS.indexOf(value); - if (monthIdx < 0) { - throw new TypeError(`Invalid month: ${value}`); + metadataKey = type; } - return monthIdx + 1; -}; -const DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; -const validateDayOfMonth = (year, month, day) => { - let maxDays = DAYS_IN_MONTH[month]; - if (month === 1 && isLeapYear(year)) { - maxDays = 29; - } - if (day > maxDays) { - throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + if (typeof options === 'string') { + metadataKey += `/${options}`; } -}; -const isLeapYear = (year) => { - return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); -}; -const parseDateValue = (value, type, lower, upper) => { - const dateVal = (0, parse_utils_1.strictParseByte)(stripLeadingZeroes(value)); - if (dateVal < lower || dateVal > upper) { - throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + else { + validate(options); + if (options.property) { + metadataKey += `/${options.property}`; + } + headers = options.headers || headers; + params = options.params || params; } - return dateVal; -}; -const parseMilliseconds = (value) => { - if (value === null || value === undefined) { - return 0; + try { + const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; + const res = await requestMethod({ + url: `${getBaseUrl()}/${metadataKey}`, + headers: { ...exports.HEADERS, ...headers }, + retryConfig: { noResponseRetries }, + params, + responseType: 'text', + timeout: requestTimeout(), + }); + // NOTE: node.js converts all incoming headers to lower case. + if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { + throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`); + } + else if (!res.data) { + throw new Error('Invalid response from the metadata service'); + } + if (typeof res.data === 'string') { + try { + return jsonBigint.parse(res.data); + } + catch (_a) { + /* ignore */ + } + } + return res.data; } - return (0, parse_utils_1.strictParseFloat32)("0." + value) * 1000; -}; -const parseOffsetToMilliseconds = (value) => { - const directionStr = value[0]; - let direction = 1; - if (directionStr == "+") { - direction = 1; + catch (e) { + const err = e; + if (err.response && err.response.status !== 200) { + err.message = `Unsuccessful response status code. ${err.message}`; + } + throw e; } - else if (directionStr == "-") { - direction = -1; +} +async function fastFailMetadataRequest(options) { + const secondaryOptions = { + ...options, + url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), + }; + // We race a connection between DNS/IP to metadata server. There are a couple + // reasons for this: + // + // 1. the DNS is slow in some GCP environments; by checking both, we might + // detect the runtime environment signficantly faster. + // 2. we can't just check the IP, which is tarpitted and slow to respond + // on a user's local machine. + // + // Additional logic has been added to make sure that we don't create an + // unhandled rejection in scenarios where a failure happens sometime + // after a success. + // + // Note, however, if a failure happens prior to a success, a rejection should + // occur, this is for folks running locally. + // + let responded = false; + const r1 = (0, gaxios_1.request)(options) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r2; + } + else { + responded = true; + throw err; + } + }); + const r2 = (0, gaxios_1.request)(secondaryOptions) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r1; + } + else { + responded = true; + throw err; + } + }); + return Promise.race([r1, r2]); +} +/** + * Obtain metadata for the current GCE instance. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const serviceAccount: {} = await instance('service-accounts/'); + * const serviceAccountEmail: string = await instance('service-accounts/default/email'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function instance(options) { + return metadataAccessor('instance', options); +} +exports.instance = instance; +/** + * Obtain metadata for the current GCP project. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const projectId: string = await project('project-id'); + * const numericProjectId: number = await project('numeric-project-id'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function project(options) { + return metadataAccessor('project', options); +} +exports.project = project; +/** + * Obtain metadata for the current universe. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const universeDomain: string = await universe('universe_domain'); + * ``` + */ +function universe(options) { + return metadataAccessor('universe', options); +} +exports.universe = universe; +/** + * Retrieve metadata items in parallel. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const data = await bulk([ + * { + * metadataKey: 'instance', + * }, + * { + * metadataKey: 'project/project-id', + * }, + * ] as const); + * + * // data.instance; + * // data['project/project-id']; + * ``` + * + * @param properties The metadata properties to retrieve + * @returns The metadata in `metadatakey:value` format + */ +async function bulk(properties) { + const r = {}; + await Promise.all(properties.map(item => { + return (async () => { + const res = await metadataAccessor(item); + const key = item.metadataKey; + r[key] = res; + })(); + })); + return r; +} +exports.bulk = bulk; +/* + * How many times should we retry detecting GCP environment. + */ +function detectGCPAvailableRetries() { + return process.env.DETECT_GCP_RETRIES + ? Number(process.env.DETECT_GCP_RETRIES) + : 0; +} +let cachedIsAvailableResponse; +/** + * Determine if the metadata server is currently available. + */ +async function isAvailable() { + if (process.env.METADATA_SERVER_DETECTION) { + const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); + if (!(value in exports.METADATA_SERVER_DETECTION)) { + throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); + } + switch (value) { + case 'assume-present': + return true; + case 'none': + return false; + case 'bios-only': + return getGCPResidency(); + case 'ping-only': + // continue, we want to ping the server + } } - else { - throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + try { + // If a user is instantiating several GCP libraries at the same time, + // this may result in multiple calls to isAvailable(), to detect the + // runtime environment. We use the same promise for each of these calls + // to reduce the network load. + if (cachedIsAvailableResponse === undefined) { + cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), + // If the default HOST_ADDRESS has been overridden, we should not + // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in + // a non-GCP environment): + !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + } + await cachedIsAvailableResponse; + return true; } - const hour = Number(value.substring(1, 3)); - const minute = Number(value.substring(4, 6)); - return direction * (hour * 60 + minute) * 60 * 1000; -}; -const stripLeadingZeroes = (value) => { - let idx = 0; - while (idx < value.length - 1 && value.charAt(idx) === "0") { - idx++; + catch (e) { + const err = e; + if (process.env.DEBUG_AUTH) { + console.info(err); + } + if (err.type === 'request-timeout') { + // If running in a GCP environment, metadata endpoint should return + // within ms. + return false; + } + if (err.response && err.response.status === 404) { + return false; + } + else { + if (!(err.response && err.response.status === 404) && + // A warning is emitted if we see an unexpected err.code, or err.code + // is not populated: + (!err.code || + ![ + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'ENETUNREACH', + 'ENOENT', + 'ENOTFOUND', + 'ECONNREFUSED', + ].includes(err.code))) { + let code = 'UNKNOWN'; + if (err.code) + code = err.code; + process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); + } + // Failure to resolve the metadata service means that it is not available. + return false; + } } - if (idx === 0) { - return value; +} +exports.isAvailable = isAvailable; +/** + * reset the memoized isAvailable() lookup. + */ +function resetIsAvailableCache() { + cachedIsAvailableResponse = undefined; +} +exports.resetIsAvailableCache = resetIsAvailableCache; +/** + * A cache for the detected GCP Residency. + */ +exports.gcpResidencyCache = null; +/** + * Detects GCP Residency. + * Caches results to reduce costs for subsequent calls. + * + * @see setGCPResidency for setting + */ +function getGCPResidency() { + if (exports.gcpResidencyCache === null) { + setGCPResidency(); } - return value.slice(idx); -}; - + return exports.gcpResidencyCache; +} +exports.getGCPResidency = getGCPResidency; +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + * @see getGCPResidency for getting + */ +function setGCPResidency(value = null) { + exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); +} +exports.setGCPResidency = setGCPResidency; +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +function requestTimeout() { + return getGCPResidency() ? 0 : 3000; +} +exports.requestTimeout = requestTimeout; +__exportStar(__nccwpck_require__(51904), exports); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 9681: +/***/ 44627: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2012 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.withBaseException = exports.throwDefaultError = void 0; -const exceptions_1 = __nccwpck_require__(88074); -const throwDefaultError = ({ output, parsedBody, exceptionCtor, errorCode }) => { - const $metadata = deserializeMetadata(output); - const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : undefined; - const response = new exceptionCtor({ - name: (parsedBody === null || parsedBody === void 0 ? void 0 : parsedBody.code) || (parsedBody === null || parsedBody === void 0 ? void 0 : parsedBody.Code) || errorCode || statusCode || "UnknownError", - $fault: "client", - $metadata, - }); - throw (0, exceptions_1.decorateServiceException)(response, parsedBody); -}; -exports.throwDefaultError = throwDefaultError; -const withBaseException = (ExceptionCtor) => { - return ({ output, parsedBody, errorCode }) => { - (0, exports.throwDefaultError)({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); - }; -}; -exports.withBaseException = withBaseException; -const deserializeMetadata = (output) => { - var _a, _b; - return ({ - httpStatusCode: output.statusCode, - requestId: (_b = (_a = output.headers["x-amzn-requestid"]) !== null && _a !== void 0 ? _a : output.headers["x-amzn-request-id"]) !== null && _b !== void 0 ? _b : output.headers["x-amz-request-id"], - extendedRequestId: output.headers["x-amz-id-2"], - cfId: output.headers["x-amz-cf-id"], - }); -}; - - -/***/ }), - -/***/ 11163: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.loadConfigsForDefaultMode = void 0; -const loadConfigsForDefaultMode = (mode) => { - switch (mode) { - case "standard": - return { - retryMode: "standard", - connectionTimeout: 3100, - }; - case "in-region": - return { - retryMode: "standard", - connectionTimeout: 1100, - }; - case "cross-region": - return { - retryMode: "standard", - connectionTimeout: 3100, - }; - case "mobile": - return { - retryMode: "standard", - connectionTimeout: 30000, - }; - default: - return {}; +exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; +const events_1 = __nccwpck_require__(82361); +const gaxios_1 = __nccwpck_require__(59555); +const transporters_1 = __nccwpck_require__(72649); +const util_1 = __nccwpck_require__(68905); +/** + * The default cloud universe + * + * @see {@link AuthJSONOptions.universe_domain} + */ +exports.DEFAULT_UNIVERSE = 'googleapis.com'; +/** + * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} + */ +exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; +class AuthClient extends events_1.EventEmitter { + constructor(opts = {}) { + var _a, _b, _c, _d, _e; + super(); + this.credentials = {}; + this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; + this.forceRefreshOnFailure = false; + this.universeDomain = exports.DEFAULT_UNIVERSE; + const options = (0, util_1.originalOrCamelOptions)(opts); + // Shared auth options + this.apiKey = opts.apiKey; + this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; + this.quotaProjectId = options.get('quota_project_id'); + this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; + this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; + // Shared client options + this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); + if (opts.transporterOptions) { + this.transporter.defaults = opts.transporterOptions; + } + if (opts.eagerRefreshThresholdMillis) { + this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; } -}; -exports.loadConfigsForDefaultMode = loadConfigsForDefaultMode; + /** + * Return the {@link Gaxios `Gaxios`} instance from the {@link AuthClient.transporter}. + * + * @expiremental + */ + get gaxios() { + if (this.transporter instanceof gaxios_1.Gaxios) { + return this.transporter; + } + else if (this.transporter instanceof transporters_1.DefaultTransporter) { + return this.transporter.instance; + } + else if ('instance' in this.transporter && + this.transporter.instance instanceof gaxios_1.Gaxios) { + return this.transporter.instance; + } + return null; + } + /** + * Sets the auth credentials. + */ + setCredentials(credentials) { + this.credentials = credentials; + } + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + addSharedMetadataHeaders(headers) { + // quota_project_id, stored in application_default_credentials.json, is set in + // the x-goog-user-project header, to indicate an alternate account for + // billing and quota: + if (!headers['x-goog-user-project'] && // don't override a value the user sets. + this.quotaProjectId) { + headers['x-goog-user-project'] = this.quotaProjectId; + } + return headers; + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; + } +} +exports.AuthClient = AuthClient; /***/ }), -/***/ 91809: -/***/ ((__unused_webpack_module, exports) => { +/***/ 71569: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _a, _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.emitWarningIfUnsupportedVersion = void 0; -let warningEmitted = false; -const emitWarningIfUnsupportedVersion = (version) => { - if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 14) { - warningEmitted = true; +exports.AwsClient = void 0; +const awsrequestsigner_1 = __nccwpck_require__(1754); +const baseexternalclient_1 = __nccwpck_require__(40810); +const defaultawssecuritycredentialssupplier_1 = __nccwpck_require__(89799); +const util_1 = __nccwpck_require__(68905); +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const awsSecurityCredentialsSupplier = opts.get('aws_security_credentials_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !awsSecurityCredentialsSupplier) { + throw new Error('A credential source or AWS security credentials supplier must be specified.'); + } + if (credentialSource && awsSecurityCredentialsSupplier) { + throw new Error('Only one of credential source or AWS security credentials supplier can be specified.'); + } + if (awsSecurityCredentialsSupplier) { + this.awsSecurityCredentialsSupplier = awsSecurityCredentialsSupplier; + this.regionalCredVerificationUrl = + __classPrivateFieldGet(_a, _a, "f", _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL); + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + this.environmentId = credentialSourceOpts.get('environment_id'); + // This is only required if the AWS region is not available in the + // AWS_REGION or AWS_DEFAULT_REGION environment variables. + const regionUrl = credentialSourceOpts.get('region_url'); + // This is only required if AWS security credentials are not available in + // environment variables. + const securityCredentialsUrl = credentialSourceOpts.get('url'); + const imdsV2SessionTokenUrl = credentialSourceOpts.get('imdsv2_session_token_url'); + this.awsSecurityCredentialsSupplier = + new defaultawssecuritycredentialssupplier_1.DefaultAwsSecurityCredentialsSupplier({ + regionUrl: regionUrl, + securityCredentialsUrl: securityCredentialsUrl, + imdsV2SessionTokenUrl: imdsV2SessionTokenUrl, + }); + this.regionalCredVerificationUrl = credentialSourceOpts.get('regional_cred_verification_url'); + this.credentialSourceType = 'aws'; + // Data validators. + this.validateEnvironmentId(); + } + this.awsRequestSigner = null; + this.region = ''; + } + validateEnvironmentId() { + var _b; + const match = (_b = this.environmentId) === null || _b === void 0 ? void 0 : _b.match(/^(aws)(\d+)$/); + if (!match || !this.regionalCredVerificationUrl) { + throw new Error('No valid AWS "credential_source" provided'); + } + else if (parseInt(match[2], 10) !== 1) { + throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + } + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. This will call the + * {@link AwsSecurityCredentialsSupplier} to retrieve an AWS region and AWS + * Security Credentials, then use them to create a signed AWS STS request that + * can be exchanged for a GCP access token. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Initialize AWS request signer if not already initialized. + if (!this.awsRequestSigner) { + this.region = await this.awsSecurityCredentialsSupplier.getAwsRegion(this.supplierContext); + this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { + return this.awsSecurityCredentialsSupplier.getAwsSecurityCredentials(this.supplierContext); + }, this.region); + } + // Generate signed request to AWS STS GetCallerIdentity API. + // Use the required regional endpoint. Otherwise, the request will fail. + const options = await this.awsRequestSigner.getRequestOptions({ + ..._a.RETRY_CONFIG, + url: this.regionalCredVerificationUrl.replace('{region}', this.region), + method: 'POST', + }); + // The GCP STS endpoint expects the headers to be formatted as: + // [ + // {key: 'x-amz-date', value: '...'}, + // {key: 'Authorization', value: '...'}, + // ... + // ] + // And then serialized as: + // encodeURIComponent(JSON.stringify({ + // url: '...', + // method: 'POST', + // headers: [{key: 'x-amz-date', value: '...'}, ...] + // })) + const reformattedHeader = []; + const extendedHeaders = Object.assign({ + // The full, canonical resource name of the workload identity pool + // provider, with or without the HTTPS prefix. + // Including this header as part of the signature is recommended to + // ensure data integrity. + 'x-goog-cloud-target-resource': this.audience, + }, options.headers); + // Reformat header to GCP STS expected format. + for (const key in extendedHeaders) { + reformattedHeader.push({ + key, + value: extendedHeaders[key], + }); + } + // Serialize the reformatted signed request. + return encodeURIComponent(JSON.stringify({ + url: options.url, + method: options.method, + headers: reformattedHeader, + })); } -}; -exports.emitWarningIfUnsupportedVersion = emitWarningIfUnsupportedVersion; +} +exports.AwsClient = AwsClient; +_a = AwsClient; +_AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL = { value: 'https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15' }; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; /***/ }), -/***/ 88074: -/***/ ((__unused_webpack_module, exports) => { +/***/ 1754: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.decorateServiceException = exports.ServiceException = void 0; -class ServiceException extends Error { - constructor(options) { - super(options.message); - Object.setPrototypeOf(this, ServiceException.prototype); - this.name = options.name; - this.$fault = options.$fault; - this.$metadata = options.$metadata; - } -} -exports.ServiceException = ServiceException; -const decorateServiceException = (exception, additions = {}) => { - Object.entries(additions) - .filter(([, v]) => v !== undefined) - .forEach(([k, v]) => { - if (exception[k] == undefined || exception[k] === "") { - exception[k] = v; +exports.AwsRequestSigner = void 0; +const crypto_1 = __nccwpck_require__(78043); +/** AWS Signature Version 4 signing algorithm identifier. */ +const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; +/** + * The termination string for the AWS credential scope value as defined in + * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + */ +const AWS_REQUEST_TYPE = 'aws4_request'; +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +class AwsRequestSigner { + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials, region) { + this.getCredentials = getCredentials; + this.region = region; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + async getRequestOptions(amzOptions) { + if (!amzOptions.url) { + throw new Error('"url" is required in "amzOptions"'); + } + // Stringify JSON requests. This will be set in the request body of the + // generated signed request. + const requestPayloadData = typeof amzOptions.data === 'object' + ? JSON.stringify(amzOptions.data) + : amzOptions.data; + const url = amzOptions.url; + const method = amzOptions.method || 'GET'; + const requestPayload = amzOptions.body || requestPayloadData; + const additionalAmzHeaders = amzOptions.headers; + const awsSecurityCredentials = await this.getCredentials(); + const uri = new URL(url); + const headerMap = await generateAuthenticationHeaderMap({ + crypto: this.crypto, + host: uri.host, + canonicalUri: uri.pathname, + canonicalQuerystring: uri.search.substr(1), + method, + region: this.region, + securityCredentials: awsSecurityCredentials, + requestPayload, + additionalAmzHeaders, + }); + // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. + const headers = Object.assign( + // Add x-amz-date if available. + headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { + Authorization: headerMap.authorizationHeader, + host: uri.host, + }, additionalAmzHeaders || {}); + if (awsSecurityCredentials.token) { + Object.assign(headers, { + 'x-amz-security-token': awsSecurityCredentials.token, + }); + } + const awsSignedReq = { + url, + method: method, + headers, + }; + if (typeof requestPayload !== 'undefined') { + awsSignedReq.body = requestPayload; } - }); - const message = exception.message || exception.Message || "UnknownError"; - exception.message = message; - delete exception.Message; - return exception; -}; -exports.decorateServiceException = decorateServiceException; + return awsSignedReq; + } +} +exports.AwsRequestSigner = AwsRequestSigner; +/** + * Creates the HMAC-SHA256 hash of the provided message using the + * provided key. + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The HMAC-SHA256 key to use. + * @param msg The message to hash. + * @return The computed hash bytes. + */ +async function sign(crypto, key, msg) { + return await crypto.signWithHmacSha256(key, msg); +} +/** + * Calculates the signing key used to calculate the signature for + * AWS Signature Version 4 based on: + * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The AWS secret access key. + * @param dateStamp The '%Y%m%d' date format. + * @param region The AWS region. + * @param serviceName The AWS service name, eg. sts. + * @return The signing key bytes. + */ +async function getSigningKey(crypto, key, dateStamp, region, serviceName) { + const kDate = await sign(crypto, `AWS4${key}`, dateStamp); + const kRegion = await sign(crypto, kDate, region); + const kService = await sign(crypto, kRegion, serviceName); + const kSigning = await sign(crypto, kService, 'aws4_request'); + return kSigning; +} +/** + * Generates the authentication header map needed for generating the AWS + * Signature Version 4 signed request. + * + * @param option The options needed to compute the authentication header map. + * @return The AWS authentication header map which constitutes of the following + * components: amz-date, authorization header and canonical query string. + */ +async function generateAuthenticationHeaderMap(options) { + const additionalAmzHeaders = options.additionalAmzHeaders || {}; + const requestPayload = options.requestPayload || ''; + // iam.amazonaws.com host => iam service. + // sts.us-east-2.amazonaws.com => sts service. + const serviceName = options.host.split('.')[0]; + const now = new Date(); + // Format: '%Y%m%dT%H%M%SZ'. + const amzDate = now + .toISOString() + .replace(/[-:]/g, '') + .replace(/\.[0-9]+/, ''); + // Format: '%Y%m%d'. + const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); + // Change all additional headers to be lower case. + const reformattedAdditionalAmzHeaders = {}; + Object.keys(additionalAmzHeaders).forEach(key => { + reformattedAdditionalAmzHeaders[key.toLowerCase()] = + additionalAmzHeaders[key]; + }); + // Add AWS token if available. + if (options.securityCredentials.token) { + reformattedAdditionalAmzHeaders['x-amz-security-token'] = + options.securityCredentials.token; + } + // Header keys need to be sorted alphabetically. + const amzHeaders = Object.assign({ + host: options.host, + }, + // Previously the date was not fixed with x-amz- and could be provided manually. + // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req + reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); + let canonicalHeaders = ''; + const signedHeadersList = Object.keys(amzHeaders).sort(); + signedHeadersList.forEach(key => { + canonicalHeaders += `${key}:${amzHeaders[key]}\n`; + }); + const signedHeaders = signedHeadersList.join(';'); + const payloadHash = await options.crypto.sha256DigestHex(requestPayload); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + const canonicalRequest = `${options.method}\n` + + `${options.canonicalUri}\n` + + `${options.canonicalQuerystring}\n` + + `${canonicalHeaders}\n` + + `${signedHeaders}\n` + + `${payloadHash}`; + const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + const stringToSign = `${AWS_ALGORITHM}\n` + + `${amzDate}\n` + + `${credentialScope}\n` + + (await options.crypto.sha256DigestHex(canonicalRequest)); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); + const signature = await sign(options.crypto, signingKey, stringToSign); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html + const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; + return { + // Do not return x-amz-date if date is available. + amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, + authorizationHeader, + canonicalQuerystring: options.canonicalQuerystring, + }; +} /***/ }), -/***/ 76016: -/***/ ((__unused_webpack_module, exports) => { +/***/ 40810: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _BaseExternalAccountClient_instances, _BaseExternalAccountClient_pendingAccessToken, _BaseExternalAccountClient_internalRefreshAccessTokenAsync; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.extendedEncodeURIComponent = void 0; -function extendedEncodeURIComponent(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); +exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +const stream = __nccwpck_require__(12781); +const authclient_1 = __nccwpck_require__(44627); +const sts = __nccwpck_require__(86308); +const util_1 = __nccwpck_require__(68905); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The default OAuth scope to request when none is provided. */ +const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; +/** Default impersonated token lifespan in seconds.*/ +const DEFAULT_TOKEN_LIFESPAN = 3600; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; +/** + * Cloud resource manager URL used to retrieve project information. + * + * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead + **/ +exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; +/** The workforce audience pattern. */ +const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/token'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(51402); +/** + * For backwards compatibility. + */ +var authclient_2 = __nccwpck_require__(44627); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } })); +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +class BaseExternalAccountClient extends authclient_1.AuthClient { + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + _BaseExternalAccountClient_instances.add(this); + /** + * A pending access token request. Used for concurrent calls. + */ + _BaseExternalAccountClient_pendingAccessToken.set(this, null); + const opts = (0, util_1.originalOrCamelOptions)(options); + const type = opts.get('type'); + if (type && type !== exports.EXTERNAL_ACCOUNT_TYPE) { + throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + + `received "${options.type}"`); + } + const clientId = opts.get('client_id'); + const clientSecret = opts.get('client_secret'); + const tokenUrl = (_a = opts.get('token_url')) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain); + const subjectTokenType = opts.get('subject_token_type'); + const workforcePoolUserProject = opts.get('workforce_pool_user_project'); + const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); + const serviceAccountImpersonation = opts.get('service_account_impersonation'); + const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); + this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || + `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); + if (clientId) { + this.clientAuth = { + confidentialClientType: 'basic', + clientId, + clientSecret, + }; + } + this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); + this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; + this.cachedAccessToken = null; + this.audience = opts.get('audience'); + this.subjectTokenType = subjectTokenType; + this.workforcePoolUserProject = workforcePoolUserProject; + const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); + if (this.workforcePoolUserProject && + !this.audience.match(workforceAudiencePattern)) { + throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + + 'credentials.'); + } + this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; + this.serviceAccountImpersonationLifetime = + serviceAccountImpersonationLifetime; + if (this.serviceAccountImpersonationLifetime) { + this.configLifetimeRequested = true; + } + else { + this.configLifetimeRequested = false; + this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; + } + this.projectNumber = this.getProjectNumber(this.audience); + this.supplierContext = { + audience: this.audience, + subjectTokenType: this.subjectTokenType, + transporter: this.transporter, + }; + } + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail() { + var _a; + if (this.serviceAccountImpersonationUrl) { + if (this.serviceAccountImpersonationUrl.length > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} + **/ + throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); + } + // Parse email from URL. The formal looks as follows: + // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken + const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; + const result = re.exec(this.serviceAccountImpersonationUrl); + return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + } + return null; + } + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + super.setCredentials(credentials); + this.cachedAccessToken = credentials; + } + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + async getProjectId() { + const projectNumber = this.projectNumber || this.workforcePoolUserProject; + if (this.projectId) { + // Return previously determined project ID. + return this.projectId; + } + else if (projectNumber) { + // Preferable not to use request() to avoid retrial policies. + const headers = await this.getRequestHeaders(); + const response = await this.transporter.request({ + ...BaseExternalAccountClient.RETRY_CONFIG, + headers, + url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, + responseType: 'json', + }); + this.projectId = response.data.projectId; + return this.projectId; + } + return null; + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + async refreshAccessTokenAsync() { + // Use an existing access token request, or cache a new one + __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f") || __classPrivateFieldGet(this, _BaseExternalAccountClient_instances, "m", _BaseExternalAccountClient_internalRefreshAccessTokenAsync).call(this), "f"); + try { + return await __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f"); + } + finally { + // clear pending access token for future requests + __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, null, "f"); + } + } + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + getProjectNumber(audience) { + // STS audience pattern: + // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... + const match = audience.match(/\/projects\/([^/]+)/); + if (!match) { + return null; + } + return match[1]; + } + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + async getImpersonatedAccessToken(token) { + const opts = { + ...BaseExternalAccountClient.RETRY_CONFIG, + url: this.serviceAccountImpersonationUrl, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + data: { + scope: this.getScopesArray(), + lifetime: this.serviceAccountImpersonationLifetime + 's', + }, + responseType: 'json', + }; + const response = await this.transporter.request(opts); + const successResponse = response.data; + return { + access_token: successResponse.accessToken, + // Convert from ISO format to timestamp. + expiry_date: new Date(successResponse.expireTime).getTime(), + res: response, + }; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(accessToken) { + const now = new Date().getTime(); + return accessToken.expiry_date + ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } + /** + * @return The list of scopes for the requested GCP access token. + */ + getScopesArray() { + // Since scopes can be provided as string or array, the type should + // be normalized. + if (typeof this.scopes === 'string') { + return [this.scopes]; + } + return this.scopes || [DEFAULT_OAUTH_SCOPE]; + } + getMetricsHeaderValue() { + const nodeVersion = process.version.replace(/^v/, ''); + const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; + const credentialSourceType = this.credentialSourceType + ? this.credentialSourceType + : 'unknown'; + return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; + } +} +exports.BaseExternalAccountClient = BaseExternalAccountClient; +_BaseExternalAccountClient_pendingAccessToken = new WeakMap(), _BaseExternalAccountClient_instances = new WeakSet(), _BaseExternalAccountClient_internalRefreshAccessTokenAsync = async function _BaseExternalAccountClient_internalRefreshAccessTokenAsync() { + // Retrieve the external credential. + const subjectToken = await this.retrieveSubjectToken(); + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + audience: this.audience, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken, + subjectTokenType: this.subjectTokenType, + // generateAccessToken requires the provided access token to have + // scopes: + // https://www.googleapis.com/auth/iam or + // https://www.googleapis.com/auth/cloud-platform + // The new service account access token scopes will match the user + // provided ones. + scope: this.serviceAccountImpersonationUrl + ? [DEFAULT_OAUTH_SCOPE] + : this.getScopesArray(), + }; + // Exchange the external credentials for a GCP access token. + // Client auth is prioritized over passing the workforcePoolUserProject + // parameter for STS token exchange. + const additionalOptions = !this.clientAuth && this.workforcePoolUserProject + ? { userProject: this.workforcePoolUserProject } + : undefined; + const additionalHeaders = { + 'x-goog-api-client': this.getMetricsHeaderValue(), + }; + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); + if (this.serviceAccountImpersonationUrl) { + this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + } + else if (stsResponse.expires_in) { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, + res: stsResponse.res, + }; + } + else { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + res: stsResponse.res, + }; + } + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedAccessToken.expiry_date, + access_token: this.cachedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, }); -} -exports.extendedEncodeURIComponent = extendedEncodeURIComponent; + // Return the cached access token. + return this.cachedAccessToken; +}; /***/ }), -/***/ 30941: +/***/ 96875: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveChecksumRuntimeConfig = exports.getChecksumConfiguration = exports.AlgorithmId = void 0; -const types_1 = __nccwpck_require__(55756); -Object.defineProperty(exports, "AlgorithmId", ({ enumerable: true, get: function () { return types_1.AlgorithmId; } })); -const getChecksumConfiguration = (runtimeConfig) => { - const checksumAlgorithms = []; - for (const id in types_1.AlgorithmId) { - const algorithmId = types_1.AlgorithmId[id]; - if (runtimeConfig[algorithmId] === undefined) { - continue; +exports.Compute = void 0; +const gaxios_1 = __nccwpck_require__(59555); +const gcpMetadata = __nccwpck_require__(3563); +const oauth2client_1 = __nccwpck_require__(3936); +class Compute extends oauth2client_1.OAuth2Client { + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + */ + constructor(options = {}) { + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; + this.serviceAccountEmail = options.serviceAccountEmail || 'default'; + this.scopes = Array.isArray(options.scopes) + ? options.scopes + : options.scopes + ? [options.scopes] + : []; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; + let data; + try { + const instanceOptions = { + property: tokenPath, + }; + if (this.scopes.length > 0) { + instanceOptions.params = { + scopes: this.scopes.join(','), + }; + } + data = await gcpMetadata.instance(instanceOptions); } - checksumAlgorithms.push({ - algorithmId: () => algorithmId, - checksumConstructor: () => runtimeConfig[algorithmId], - }); + catch (e) { + if (e instanceof gaxios_1.GaxiosError) { + e.message = `Could not refresh access token: ${e.message}`; + this.wrapError(e); + } + throw e; + } + const tokens = data; + if (data && data.expires_in) { + tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res: null }; } - return { - _checksumAlgorithms: checksumAlgorithms, - addChecksumAlgorithm(algo) { - this._checksumAlgorithms.push(algo); - }, - checksumAlgorithms() { - return this._checksumAlgorithms; - }, - }; -}; -exports.getChecksumConfiguration = getChecksumConfiguration; -const resolveChecksumRuntimeConfig = (clientConfig) => { - const runtimeConfig = {}; - clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { - runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); - }); - return runtimeConfig; -}; -exports.resolveChecksumRuntimeConfig = resolveChecksumRuntimeConfig; + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + + `?format=full&audience=${targetAudience}`; + let idToken; + try { + const instanceOptions = { + property: idTokenPath, + }; + idToken = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof Error) { + e.message = `Could not fetch ID token: ${e.message}`; + } + throw e; + } + return idToken; + } + wrapError(e) { + const res = e.response; + if (res && res.status) { + e.status = res.status; + if (res.status === 403) { + e.message = + 'A Forbidden error was returned while attempting to retrieve an access ' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have the correct permission scopes specified: ' + + e.message; + } + else if (res.status === 404) { + e.message = + 'A Not Found error was returned while attempting to retrieve an access' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have any permission scopes specified: ' + + e.message; + } + } + } +} +exports.Compute = Compute; /***/ }), -/***/ 78643: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 89799: +/***/ (function(__unused_webpack_module, exports) { "use strict"; +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _DefaultAwsSecurityCredentialsSupplier_instances, _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveDefaultRuntimeConfig = exports.getDefaultClientConfiguration = exports.getDefaultExtensionConfiguration = void 0; -const checksum_1 = __nccwpck_require__(30941); -const retry_1 = __nccwpck_require__(67367); -const getDefaultExtensionConfiguration = (runtimeConfig) => { - return { - ...(0, checksum_1.getChecksumConfiguration)(runtimeConfig), - ...(0, retry_1.getRetryConfiguration)(runtimeConfig), +exports.DefaultAwsSecurityCredentialsSupplier = void 0; +/** + * Internal AWS security credentials supplier implementation used by {@link AwsClient} + * when a credential source is provided instead of a user defined supplier. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + */ +class DefaultAwsSecurityCredentialsSupplier { + /** + * Instantiates a new DefaultAwsSecurityCredentialsSupplier using information + * from the credential_source stored in the ADC file. + * @param opts The default aws security credentials supplier options object to + * build the supplier with. + */ + constructor(opts) { + _DefaultAwsSecurityCredentialsSupplier_instances.add(this); + this.regionUrl = opts.regionUrl; + this.securityCredentialsUrl = opts.securityCredentialsUrl; + this.imdsV2SessionTokenUrl = opts.imdsV2SessionTokenUrl; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Returns the active AWS region. This first checks to see if the region + * is available as an environment variable. If it is not, then the supplier + * will call the region URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS region string. + */ + async getAwsRegion(context) { + // Priority order for region determination: + // AWS_REGION > AWS_DEFAULT_REGION > metadata server. + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get); + } + const metadataHeaders = {}; + if (!__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get) && this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + if (!this.regionUrl) { + throw new Error('Unable to determine AWS region due to missing ' + + '"options.credential_source.region_url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.regionUrl, + method: 'GET', + responseType: 'text', + headers: metadataHeaders, + }; + const response = await context.transporter.request(opts); + // Remove last character. For example, if us-east-2b is returned, + // the region would be us-east-2. + return response.data.substr(0, response.data.length - 1); + } + /** + * Returns AWS security credentials. This first checks to see if the credentials + * is available as environment variables. If it is not, then the supplier + * will call the security credentials URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS security credentials. + */ + async getAwsSecurityCredentials(context) { + // Check environment variables for permanent credentials first. + // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get); + } + const metadataHeaders = {}; + if (this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + // Since the role on a VM can change, we don't need to cache it. + const roleName = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName).call(this, metadataHeaders, context.transporter); + // Temporary credentials typically last for several hours. + // Expiration is returned in response. + // Consider future optimization of this logic to cache AWS tokens + // until their natural expiration. + const awsCreds = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials).call(this, roleName, metadataHeaders, context.transporter); + return { + accessKeyId: awsCreds.AccessKeyId, + secretAccessKey: awsCreds.SecretAccessKey, + token: awsCreds.Token, + }; + } +} +exports.DefaultAwsSecurityCredentialsSupplier = DefaultAwsSecurityCredentialsSupplier; +_DefaultAwsSecurityCredentialsSupplier_instances = new WeakSet(), _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken = +/** + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the IMDSv2 Session Token. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken(transporter) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.imdsV2SessionTokenUrl, + method: 'PUT', + responseType: 'text', + headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, }; -}; -exports.getDefaultExtensionConfiguration = getDefaultExtensionConfiguration; -exports.getDefaultClientConfiguration = exports.getDefaultExtensionConfiguration; -const resolveDefaultRuntimeConfig = (config) => { - return { - ...(0, checksum_1.resolveChecksumRuntimeConfig)(config), - ...(0, retry_1.resolveRetryRuntimeConfig)(config), + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName = +/** + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName(headers, transporter) { + if (!this.securityCredentialsUrl) { + throw new Error('Unable to determine AWS role name due to missing ' + + '"options.credential_source.url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.securityCredentialsUrl, + method: 'GET', + responseType: 'text', + headers: headers, }; + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials = +/** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ +async function _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials(roleName, headers, transporter) { + const response = await transporter.request({ + ...this.additionalGaxiosOptions, + url: `${this.securityCredentialsUrl}/${roleName}`, + responseType: 'json', + headers: headers, + }); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get() { + // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. + // Only one is required. + return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); +}, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get() { + // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. + if (process.env['AWS_ACCESS_KEY_ID'] && + process.env['AWS_SECRET_ACCESS_KEY']) { + return { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + token: process.env['AWS_SESSION_TOKEN'], + }; + } + return null; }; -exports.resolveDefaultRuntimeConfig = resolveDefaultRuntimeConfig; /***/ }), -/***/ 1822: +/***/ 6270: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(78643), exports); +exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; +const stream = __nccwpck_require__(12781); +const authclient_1 = __nccwpck_require__(44627); +const sts = __nccwpck_require__(86308); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The requested token exchange subject_token_type: rfc8693#section-2.1 + */ +const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. + */ +exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. + */ +class DownscopedClient extends authclient_1.AuthClient { + /** + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** + * Optional additional behavior customization options. + * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** + * Optional quota project id for setting up in the x-goog-user-project header. + */ + constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { + super({ ...additionalOptions, quotaProjectId }); + this.authClient = authClient; + this.credentialAccessBoundary = credentialAccessBoundary; + // Check 1-10 Access Boundary Rules are defined within Credential Access + // Boundary. + if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { + throw new Error('At least one access boundary rule needs to be defined.'); + } + else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > + exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { + throw new Error('The provided access boundary has more than ' + + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); + } + // Check at least one permission should be defined in each Access Boundary + // Rule. + for (const rule of credentialAccessBoundary.accessBoundary + .accessBoundaryRules) { + if (rule.availablePermissions.length === 0) { + throw new Error('At least one permission should be defined in access boundary rules.'); + } + } + this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); + this.cachedDownscopedAccessToken = null; + } + /** + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + if (!credentials.expiry_date) { + throw new Error('The access token expiry_date field is missing in the provided ' + + 'credentials.'); + } + super.setCredentials(credentials); + this.cachedDownscopedAccessToken = credentials; + } + async getAccessToken() { + // If the cached access token is unavailable or expired, force refresh. + // The Downscoped access token will be returned in + // DownscopedAccessTokenResponse format. + if (!this.cachedDownscopedAccessToken || + this.isExpired(this.cachedDownscopedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return Downscoped access token in DownscopedAccessTokenResponse format. + return { + token: this.cachedDownscopedAccessToken.access_token, + expirationTime: this.cachedDownscopedAccessToken.expiry_date, + res: this.cachedDownscopedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. + */ + async refreshAccessTokenAsync() { + var _a; + // Retrieve GCP access token from source credential. + const subjectToken = (await this.authClient.getAccessToken()).token; + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken: subjectToken, + subjectTokenType: STS_SUBJECT_TOKEN_TYPE, + }; + // Exchange the source AuthClient access token for a Downscoped access + // token. + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); + /** + * The STS endpoint will only return the expiration time for the downscoped + * access token if the original access token represents a service account. + * The downscoped token's expiration time will always match the source + * credential expiration. When no expires_in is returned, we can copy the + * source credential's expiration time. + */ + const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; + const expiryDate = stsResponse.expires_in + ? new Date().getTime() + stsResponse.expires_in * 1000 + : sourceCredExpireDate; + // Save response in cached access token. + this.cachedDownscopedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: expiryDate, + res: stsResponse.res, + }; + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedDownscopedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedDownscopedAccessToken.expiry_date, + access_token: this.cachedDownscopedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedDownscopedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(downscopedAccessToken) { + const now = new Date().getTime(); + return downscopedAccessToken.expiry_date + ? now >= + downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.DownscopedClient = DownscopedClient; /***/ }), -/***/ 67367: -/***/ ((__unused_webpack_module, exports) => { +/***/ 21380: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveRetryRuntimeConfig = exports.getRetryConfiguration = void 0; -const getRetryConfiguration = (runtimeConfig) => { - let _retryStrategy = runtimeConfig.retryStrategy; - return { - setRetryStrategy(retryStrategy) { - _retryStrategy = retryStrategy; - }, - retryStrategy() { - return _retryStrategy; - }, - }; -}; -exports.getRetryConfiguration = getRetryConfiguration; -const resolveRetryRuntimeConfig = (retryStrategyConfiguration) => { - const runtimeConfig = {}; - runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); - return runtimeConfig; -}; -exports.resolveRetryRuntimeConfig = resolveRetryRuntimeConfig; +exports.GCPEnv = void 0; +exports.clear = clear; +exports.getEnv = getEnv; +const gcpMetadata = __nccwpck_require__(3563); +var GCPEnv; +(function (GCPEnv) { + GCPEnv["APP_ENGINE"] = "APP_ENGINE"; + GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; + GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; + GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; + GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; + GCPEnv["NONE"] = "NONE"; +})(GCPEnv || (exports.GCPEnv = GCPEnv = {})); +let envPromise; +function clear() { + envPromise = undefined; +} +async function getEnv() { + if (envPromise) { + return envPromise; + } + envPromise = getEnvMemoized(); + return envPromise; +} +async function getEnvMemoized() { + let env = GCPEnv.NONE; + if (isAppEngine()) { + env = GCPEnv.APP_ENGINE; + } + else if (isCloudFunction()) { + env = GCPEnv.CLOUD_FUNCTIONS; + } + else if (await isComputeEngine()) { + if (await isKubernetesEngine()) { + env = GCPEnv.KUBERNETES_ENGINE; + } + else if (isCloudRun()) { + env = GCPEnv.CLOUD_RUN; + } + else { + env = GCPEnv.COMPUTE_ENGINE; + } + } + else { + env = GCPEnv.NONE; + } + return env; +} +function isAppEngine() { + return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); +} +function isCloudFunction() { + return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); +} +/** + * This check only verifies that the environment is running knative. + * This must be run *after* checking for Kubernetes, otherwise it will + * return a false positive. + */ +function isCloudRun() { + return !!process.env.K_CONFIGURATION; +} +async function isKubernetesEngine() { + try { + await gcpMetadata.instance('attributes/cluster-name'); + return true; + } + catch (e) { + return false; + } +} +async function isComputeEngine() { + return gcpMetadata.isAvailable(); +} /***/ }), -/***/ 42638: +/***/ 8749: /***/ ((__unused_webpack_module, exports) => { "use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getArrayIfSingleItem = void 0; -const getArrayIfSingleItem = (mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray]; -exports.getArrayIfSingleItem = getArrayIfSingleItem; +exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; +const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; +const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; +const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; +/** + * Defines the response of a 3rd party executable run by the pluggable auth client. + */ +class ExecutableResponse { + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson) { + // Check that the required fields exist in the json response. + if (!responseJson.version) { + throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + } + if (responseJson.success === undefined) { + throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + } + this.version = responseJson.version; + this.success = responseJson.success; + // Validate required fields for a successful response. + if (this.success) { + this.expirationTime = responseJson.expiration_time; + this.tokenType = responseJson.token_type; + // Validate token type field. + if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { + throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + } + // Validate subject token. + if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { + if (!responseJson.saml_response) { + throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); + } + this.subjectToken = responseJson.saml_response; + } + else { + if (!responseJson.id_token) { + throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); + } + this.subjectToken = responseJson.id_token; + } + } + else { + // Both code and message must be provided for unsuccessful responses. + if (!responseJson.code) { + throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + } + if (!responseJson.message) { + throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + } + this.errorCode = responseJson.code; + this.errorMessage = responseJson.message; + } + } + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid() { + return !this.isExpired() && this.success; + } + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired() { + return (this.expirationTime !== undefined && + this.expirationTime < Math.round(Date.now() / 1000)); + } +} +exports.ExecutableResponse = ExecutableResponse; +/** + * An error thrown by the ExecutableResponse class. + */ +class ExecutableResponseError extends Error { + constructor(message) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableResponseError = ExecutableResponseError; +/** + * An error thrown when the 'version' field in an executable response is missing or invalid. + */ +class InvalidVersionFieldError extends ExecutableResponseError { +} +exports.InvalidVersionFieldError = InvalidVersionFieldError; +/** + * An error thrown when the 'success' field in an executable response is missing or invalid. + */ +class InvalidSuccessFieldError extends ExecutableResponseError { +} +exports.InvalidSuccessFieldError = InvalidSuccessFieldError; +/** + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. + */ +class InvalidExpirationTimeFieldError extends ExecutableResponseError { +} +exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; +/** + * An error thrown when the 'token_type' field in an executable response is missing or invalid. + */ +class InvalidTokenTypeFieldError extends ExecutableResponseError { +} +exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; +/** + * An error thrown when the 'code' field in an executable response is missing or invalid. + */ +class InvalidCodeFieldError extends ExecutableResponseError { +} +exports.InvalidCodeFieldError = InvalidCodeFieldError; +/** + * An error thrown when the 'message' field in an executable response is missing or invalid. + */ +class InvalidMessageFieldError extends ExecutableResponseError { +} +exports.InvalidMessageFieldError = InvalidMessageFieldError; +/** + * An error thrown when the subject token in an executable response is missing or invalid. + */ +class InvalidSubjectTokenError extends ExecutableResponseError { +} +exports.InvalidSubjectTokenError = InvalidSubjectTokenError; /***/ }), -/***/ 92188: -/***/ ((__unused_webpack_module, exports) => { +/***/ 38765: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getValueFromTextNode = void 0; -const getValueFromTextNode = (obj) => { - const textNodeName = "#text"; - for (const key in obj) { - if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== undefined) { - obj[key] = obj[key][textNodeName]; +exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; +const authclient_1 = __nccwpck_require__(44627); +const oauth2common_1 = __nccwpck_require__(19510); +const gaxios_1 = __nccwpck_require__(59555); +const stream = __nccwpck_require__(12781); +const baseexternalclient_1 = __nccwpck_require__(40810); +/** + * The credentials JSON file type for external account authorized user clients. + */ +exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/oauthtoken'; +/** + * Handler for token refresh requests sent to the token_url endpoint for external + * authorized user credentials. + */ +class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an ExternalAccountAuthorizedUserHandler instance. + * @param url The URL of the token refresh endpoint. + * @param transporter The transporter to use for the refresh request. + * @param clientAuthentication The client authentication credentials to use + * for the refresh request. + */ + constructor(url, transporter, clientAuthentication) { + super(clientAuthentication); + this.url = url; + this.transporter = transporter; + } + /** + * Requests a new access token from the token_url endpoint using the provided + * refresh token. + * @param refreshToken The refresh token to use to generate a new access token. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @return A promise that resolves with the token refresh response containing + * the requested access token and its expiration time. + */ + async refreshToken(refreshToken, additionalHeaders) { + const values = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + ...additionalHeaders, + }; + const opts = { + ...ExternalAccountAuthorizedUserHandler.RETRY_CONFIG, + url: this.url, + method: 'POST', + headers, + data: values.toString(), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const tokenRefreshResponse = response.data; + tokenRefreshResponse.res = response; + return tokenRefreshResponse; } - else if (typeof obj[key] === "object" && obj[key] !== null) { - obj[key] = (0, exports.getValueFromTextNode)(obj[key]); + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; } } - return obj; -}; -exports.getValueFromTextNode = getValueFromTextNode; +} +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { + /** + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + if (options.universe_domain) { + this.universeDomain = options.universe_domain; + } + this.refreshToken = options.refresh_token; + const clientAuth = { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + }; + this.externalAccountAuthorizedUserHandler = + new ExternalAccountAuthorizedUserHandler((_a = options.token_url) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain), this.transporter, clientAuth); + this.cachedAccessToken = null; + this.quotaProjectId = options.quota_project_id; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; + } + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + } + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. + */ + async refreshAccessTokenAsync() { + // Refresh the access token using the refresh token. + const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); + this.cachedAccessToken = { + access_token: refreshResponse.access_token, + expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, + res: refreshResponse.res, + }; + if (refreshResponse.refresh_token !== undefined) { + this.refreshToken = refreshResponse.refresh_token; + } + return this.cachedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(credentials) { + const now = new Date().getTime(); + return credentials.expiry_date + ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; /***/ }), -/***/ 63570: +/***/ 94381: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(70438), exports); -tslib_1.__exportStar(__nccwpck_require__(61600), exports); -tslib_1.__exportStar(__nccwpck_require__(32813), exports); -tslib_1.__exportStar(__nccwpck_require__(75414), exports); -tslib_1.__exportStar(__nccwpck_require__(92541), exports); -tslib_1.__exportStar(__nccwpck_require__(56929), exports); -tslib_1.__exportStar(__nccwpck_require__(21737), exports); -tslib_1.__exportStar(__nccwpck_require__(9681), exports); -tslib_1.__exportStar(__nccwpck_require__(11163), exports); -tslib_1.__exportStar(__nccwpck_require__(91809), exports); -tslib_1.__exportStar(__nccwpck_require__(1822), exports); -tslib_1.__exportStar(__nccwpck_require__(88074), exports); -tslib_1.__exportStar(__nccwpck_require__(76016), exports); -tslib_1.__exportStar(__nccwpck_require__(42638), exports); -tslib_1.__exportStar(__nccwpck_require__(92188), exports); -tslib_1.__exportStar(__nccwpck_require__(32964), exports); -tslib_1.__exportStar(__nccwpck_require__(83495), exports); -tslib_1.__exportStar(__nccwpck_require__(74857), exports); -tslib_1.__exportStar(__nccwpck_require__(15342), exports); -tslib_1.__exportStar(__nccwpck_require__(53456), exports); -tslib_1.__exportStar(__nccwpck_require__(1752), exports); -tslib_1.__exportStar(__nccwpck_require__(92480), exports); +exports.ExternalAccountClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(40810); +const identitypoolclient_1 = __nccwpck_require__(20117); +const awsclient_1 = __nccwpck_require__(71569); +const pluggable_auth_client_1 = __nccwpck_require__(44782); +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +class ExternalAccountClient { + constructor() { + throw new Error('ExternalAccountClients should be initialized via: ' + + 'ExternalAccountClient.fromJSON(), ' + + 'directly via explicit constructors, eg. ' + + 'new AwsClient(options), new IdentityPoolClient(options), new' + + 'PluggableAuthClientOptions, or via ' + + 'new GoogleAuth(options).getClient()'); + } + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options, additionalOptions) { + var _a, _b; + if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { + return new awsclient_1.AwsClient(options, additionalOptions); + } + else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { + return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + } + else { + return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + } + } + else { + return null; + } + } +} +exports.ExternalAccountClient = ExternalAccountClient; /***/ }), -/***/ 32964: -/***/ ((__unused_webpack_module, exports) => { +/***/ 27646: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var _a, _b, _c; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.LazyJsonString = exports.StringWrapper = void 0; -const StringWrapper = function () { - const Class = Object.getPrototypeOf(this).constructor; - const Constructor = Function.bind.apply(String, [null, ...arguments]); - const instance = new Constructor(); - Object.setPrototypeOf(instance, Class.prototype); - return instance; -}; -exports.StringWrapper = StringWrapper; -exports.StringWrapper.prototype = Object.create(String.prototype, { - constructor: { - value: exports.StringWrapper, - enumerable: false, - writable: true, - configurable: true, - }, -}); -Object.setPrototypeOf(exports.StringWrapper, String); -class LazyJsonString extends exports.StringWrapper { - deserializeJSON() { - return JSON.parse(super.toString()); - } - toJSON() { - return super.toString(); +exports.FileSubjectTokenSupplier = void 0; +const util_1 = __nccwpck_require__(73837); +const fs = __nccwpck_require__(57147); +// fs.readfile is undefined in browser karma tests causing +// `npm run browser-test` to fail as test.oauth2.ts imports this file via +// src/index.ts. +// Fallback to void function to avoid promisify throwing a TypeError. +const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); +const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); +const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); +/** + * Internal subject token supplier implementation used when a file location + * is configured in the credential configuration used to build an {@link IdentityPoolClient} + */ +class FileSubjectTokenSupplier { + /** + * Instantiates a new file based subject token supplier. + * @param opts The file subject token supplier options to build the supplier + * with. + */ + constructor(opts) { + this.filePath = opts.filePath; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; } - static fromObject(object) { - if (object instanceof LazyJsonString) { - return object; + /** + * Returns the subject token stored at the file specified in the constructor. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + let parsedFilePath = this.filePath; + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + parsedFilePath = await realpath(parsedFilePath); + if (!(await lstat(parsedFilePath)).isFile()) { + throw new Error(); + } } - else if (object instanceof String || typeof object === "string") { - return new LazyJsonString(object); + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${parsedFilePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; } - return new LazyJsonString(JSON.stringify(object)); + let subjectToken; + const rawText = await readFile(parsedFilePath, { encoding: 'utf8' }); + if (this.formatType === 'text') { + subjectToken = rawText; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const json = JSON.parse(rawText); + subjectToken = json[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source file'); + } + return subjectToken; } } -exports.LazyJsonString = LazyJsonString; +exports.FileSubjectTokenSupplier = FileSubjectTokenSupplier; /***/ }), -/***/ 83495: -/***/ ((__unused_webpack_module, exports) => { +/***/ 20695: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleAuth_instances, _GoogleAuth_pendingAuthClient, _GoogleAuth_prepareAndCacheClient, _GoogleAuth_determineClient; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.take = exports.convertMap = exports.map = void 0; -function map(arg0, arg1, arg2) { - let target; - let filter; - let instructions; - if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { - target = {}; - instructions = arg0; - } - else { - target = arg0; - if (typeof arg1 === "function") { - filter = arg1; - instructions = arg2; - return mapWithFilter(target, filter, instructions); +exports.GoogleAuth = exports.GoogleAuthExceptionMessages = exports.CLOUD_SDK_CLIENT_ID = void 0; +const child_process_1 = __nccwpck_require__(32081); +const fs = __nccwpck_require__(57147); +const gcpMetadata = __nccwpck_require__(3563); +const os = __nccwpck_require__(22037); +const path = __nccwpck_require__(71017); +const crypto_1 = __nccwpck_require__(78043); +const transporters_1 = __nccwpck_require__(72649); +const computeclient_1 = __nccwpck_require__(96875); +const idtokenclient_1 = __nccwpck_require__(80298); +const envDetect_1 = __nccwpck_require__(21380); +const jwtclient_1 = __nccwpck_require__(13959); +const refreshclient_1 = __nccwpck_require__(98790); +const impersonated_1 = __nccwpck_require__(91103); +const externalclient_1 = __nccwpck_require__(94381); +const baseexternalclient_1 = __nccwpck_require__(40810); +const authclient_1 = __nccwpck_require__(44627); +const externalAccountAuthorizedUserClient_1 = __nccwpck_require__(38765); +const util_1 = __nccwpck_require__(68905); +exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; +exports.GoogleAuthExceptionMessages = { + API_KEY_WITH_CREDENTIALS: 'API Keys and Credentials are mutually exclusive authentication methods and cannot be used together.', + NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_ADC_FOUND: 'Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.', + NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + + 'To learn more about Universe Domain retrieval, visit: \n' + + 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', +}; +class GoogleAuth { + // Note: this properly is only public to satisfy unit tests. + // https://github.com/Microsoft/TypeScript/issues/5228 + get isGCE() { + return this.checkIsGCE; + } + /** + * Configuration is resolved in the following order of precedence: + * - {@link GoogleAuthOptions.credentials `credentials`} + * - {@link GoogleAuthOptions.keyFilename `keyFilename`} + * - {@link GoogleAuthOptions.keyFile `keyFile`} + * + * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the + * {@link AuthClient `AuthClient`s}. + * + * @param opts + */ + constructor(opts = {}) { + _GoogleAuth_instances.add(this); + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + this.checkIsGCE = undefined; + // To save the contents of the JSON credential file + this.jsonContent = null; + this.cachedCredential = null; + /** + * A pending {@link AuthClient}. Used for concurrent {@link GoogleAuth.getClient} calls. + */ + _GoogleAuth_pendingAuthClient.set(this, null); + this.clientOptions = {}; + this._cachedProjectId = opts.projectId || null; + this.cachedCredential = opts.authClient || null; + this.keyFilename = opts.keyFilename || opts.keyFile; + this.scopes = opts.scopes; + this.clientOptions = opts.clientOptions || {}; + this.jsonContent = opts.credentials || null; + this.apiKey = opts.apiKey || this.clientOptions.apiKey || null; + // Cannot use both API Key + Credentials + if (this.apiKey && (this.jsonContent || this.clientOptions.credentials)) { + throw new RangeError(exports.GoogleAuthExceptionMessages.API_KEY_WITH_CREDENTIALS); + } + if (opts.universeDomain) { + this.clientOptions.universeDomain = opts.universeDomain; + } + } + // GAPIC client libraries should always use self-signed JWTs. The following + // variables are set on the JWT client in order to indicate the type of library, + // and sign the JWT with the correct audience and scopes (if not supplied). + setGapicJWTValues(client) { + client.defaultServicePath = this.defaultServicePath; + client.useJWTAccessWithScope = this.useJWTAccessWithScope; + client.defaultScopes = this.defaultScopes; + } + getProjectId(callback) { + if (callback) { + this.getProjectIdAsync().then(r => callback(null, r), callback); } else { - instructions = arg1; + return this.getProjectIdAsync(); } } - for (const key of Object.keys(instructions)) { - if (!Array.isArray(instructions[key])) { - target[key] = instructions[key]; - continue; + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + async getProjectIdOptional() { + try { + return await this.getProjectId(); + } + catch (e) { + if (e instanceof Error && + e.message === exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { + return null; + } + else { + throw e; + } } - applyInstruction(target, null, instructions, key); } - return target; -} -exports.map = map; -const convertMap = (target) => { - const output = {}; - for (const [k, v] of Object.entries(target || {})) { - output[k] = [, v]; + /** + * A private method for finding and caching a projectId. + * + * Supports environments in order of precedence: + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + * + * @returns projectId + */ + async findAndCacheProjectId() { + let projectId = null; + projectId || (projectId = await this.getProductionProjectId()); + projectId || (projectId = await this.getFileProjectId()); + projectId || (projectId = await this.getDefaultServiceProjectId()); + projectId || (projectId = await this.getGCEProjectId()); + projectId || (projectId = await this.getExternalAccountClientProjectId()); + if (projectId) { + this._cachedProjectId = projectId; + return projectId; + } + else { + throw new Error(exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); + } } - return output; -}; -exports.convertMap = convertMap; -const take = (source, instructions) => { - const out = {}; - for (const key in instructions) { - applyInstruction(out, source, instructions, key); + async getProjectIdAsync() { + if (this._cachedProjectId) { + return this._cachedProjectId; + } + if (!this._findProjectIdPromise) { + this._findProjectIdPromise = this.findAndCacheProjectId(); + } + return this._findProjectIdPromise; } - return out; -}; -exports.take = take; -const mapWithFilter = (target, filter, instructions) => { - return map(target, Object.entries(instructions).reduce((_instructions, [key, value]) => { - if (Array.isArray(value)) { - _instructions[key] = value; + /** + * Retrieves a universe domain from the metadata server via + * {@link gcpMetadata.universe}. + * + * @returns a universe domain + */ + async getUniverseDomainFromMetadataServer() { + var _a; + let universeDomain; + try { + universeDomain = await gcpMetadata.universe('universe-domain'); + universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); } - else { - if (typeof value === "function") { - _instructions[key] = [filter, value()]; + catch (e) { + if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { + universeDomain = authclient_1.DEFAULT_UNIVERSE; } else { - _instructions[key] = [filter, value]; + throw e; } } - return _instructions; - }, {})); -}; -const applyInstruction = (target, source, instructions, targetKey) => { - if (source !== null) { - let instruction = instructions[targetKey]; - if (typeof instruction === "function") { - instruction = [, instruction]; + return universeDomain; + } + /** + * Retrieves, caches, and returns the universe domain in the following order + * of precedence: + * - The universe domain in {@link GoogleAuth.clientOptions} + * - An existing or ADC {@link AuthClient}'s universe domain + * - {@link gcpMetadata.universe}, if {@link Compute} client + * + * @returns The universe domain + */ + async getUniverseDomain() { + let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); + try { + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); } - const [filter = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; - if ((typeof filter === "function" && filter(source[sourceKey])) || (typeof filter !== "function" && !!filter)) { - target[targetKey] = valueFn(source[sourceKey]); + catch (_a) { + // client or ADC is not available + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); } - return; + return universeDomain; } - let [filter, value] = instructions[targetKey]; - if (typeof value === "function") { - let _value; - const defaultFilterPassed = filter === undefined && (_value = value()) != null; - const customFilterPassed = (typeof filter === "function" && !!filter(void 0)) || (typeof filter !== "function" && !!filter); - if (defaultFilterPassed) { - target[targetKey] = _value; + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + getAnyScopes() { + return this.scopes || this.defaultScopes; + } + getApplicationDefault(optionsOrCallback = {}, callback) { + let options; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; } - else if (customFilterPassed) { - target[targetKey] = value(); + else { + options = optionsOrCallback; } - } - else { - const defaultFilterPassed = filter === undefined && value != null; - const customFilterPassed = (typeof filter === "function" && !!filter(value)) || (typeof filter !== "function" && !!filter); - if (defaultFilterPassed || customFilterPassed) { - target[targetKey] = value; + if (callback) { + this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); + } + else { + return this.getApplicationDefaultAsync(options); } } -}; -const nonNullish = (_) => _ != null; -const pass = (_) => _; - - -/***/ }), - -/***/ 74857: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.logger = exports.strictParseByte = exports.strictParseShort = exports.strictParseInt32 = exports.strictParseInt = exports.strictParseLong = exports.limitedParseFloat32 = exports.limitedParseFloat = exports.handleFloat = exports.limitedParseDouble = exports.strictParseFloat32 = exports.strictParseFloat = exports.strictParseDouble = exports.expectUnion = exports.expectString = exports.expectObject = exports.expectNonNull = exports.expectByte = exports.expectShort = exports.expectInt32 = exports.expectInt = exports.expectLong = exports.expectFloat32 = exports.expectNumber = exports.expectBoolean = exports.parseBoolean = void 0; -const parseBoolean = (value) => { - switch (value) { - case "true": - return true; - case "false": - return false; - default: - throw new Error(`Unable to parse boolean value "${value}"`); + async getApplicationDefaultAsync(options = {}) { + // If we've already got a cached credential, return it. + // This will also preserve one's configured quota project, in case they + // set one directly on the credential previously. + if (this.cachedCredential) { + // cache, while preserving existing quota project preferences + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, this.cachedCredential, null); + } + let credential; + // Check for the existence of a local environment variable pointing to the + // location of the credential file. This is typically used in local + // developer scenarios. + credential = + await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); + } + // Look in the well-known credential file location. + credential = + await this._tryGetApplicationCredentialsFromWellKnownFile(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); + } + // Determine if we're running on GCE. + if (await this._checkIsGCE()) { + options.scopes = this.getAnyScopes(); + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, new computeclient_1.Compute(options)); + } + throw new Error(exports.GoogleAuthExceptionMessages.NO_ADC_FOUND); } -}; -exports.parseBoolean = parseBoolean; -const expectBoolean = (value) => { - if (value === null || value === undefined) { - return undefined; + /** + * Determines whether the auth layer is running on Google Compute Engine. + * Checks for GCP Residency, then fallback to checking if metadata server + * is available. + * + * @returns A promise that resolves with the boolean. + * @api private + */ + async _checkIsGCE() { + if (this.checkIsGCE === undefined) { + this.checkIsGCE = + gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); + } + return this.checkIsGCE; } - if (typeof value === "number") { - if (value === 0 || value === 1) { - exports.logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { + const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || + process.env['google_application_credentials']; + if (!credentialsPath || credentialsPath.length === 0) { + return null; } - if (value === 0) { - return false; + try { + return this._getApplicationCredentialsFromFilePath(credentialsPath, options); } - if (value === 1) { - return true; + catch (e) { + if (e instanceof Error) { + e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; + } + throw e; } } - if (typeof value === "string") { - const lower = value.toLowerCase(); - if (lower === "false" || lower === "true") { - exports.logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromWellKnownFile(options) { + // First, figure out the location of the file, depending upon the OS type. + let location = null; + if (this._isWindows()) { + // Windows + location = process.env['APPDATA']; } - if (lower === "false") { - return false; + else { + // Linux or Mac + const home = process.env['HOME']; + if (home) { + location = path.join(home, '.config'); + } } - if (lower === "true") { - return true; + // If we found the root path, expand it. + if (location) { + location = path.join(location, 'gcloud', 'application_default_credentials.json'); + if (!fs.existsSync(location)) { + location = null; + } } + // The file does not exist. + if (!location) { + return null; + } + // The file seems to exist. Try to use it. + const client = await this._getApplicationCredentialsFromFilePath(location, options); + return client; } - if (typeof value === "boolean") { - return value; - } - throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); -}; -exports.expectBoolean = expectBoolean; -const expectNumber = (value) => { - if (value === null || value === undefined) { - return undefined; - } - if (typeof value === "string") { - const parsed = parseFloat(value); - if (!Number.isNaN(parsed)) { - if (String(parsed) !== String(value)) { - exports.logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + async _getApplicationCredentialsFromFilePath(filePath, options = {}) { + // Make sure the path looks like a string. + if (!filePath || filePath.length === 0) { + throw new Error('The file path is invalid.'); + } + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = fs.realpathSync(filePath); + if (!fs.lstatSync(filePath).isFile()) { + throw new Error(); } - return parsed; } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + // Now open a read stream on the file, and parse it. + const readStream = fs.createReadStream(filePath); + return this.fromStream(readStream, options); } - if (typeof value === "number") { - return value; + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json) { + var _a, _b, _c, _d; + if (!json) { + throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + } + if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + } + if (!json.source_credentials) { + throw new Error('The incoming JSON object does not contain a source_credentials field'); + } + if (!json.service_account_impersonation_url) { + throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + } + const sourceClient = this.fromJSON(json.source_credentials); + if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} + **/ + throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); + } + // Extract service account from service_account_impersonation_url + const targetPrincipal = (_c = (_b = /(?[^/]+):(generateAccessToken|generateIdToken)$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; + if (!targetPrincipal) { + throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + } + const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; + return new impersonated_1.Impersonated({ + ...json, + sourceClient, + targetPrincipal, + targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], + }); } - throw new TypeError(`Expected number, got ${typeof value}: ${value}`); -}; -exports.expectNumber = expectNumber; -const MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); -const expectFloat32 = (value) => { - const expected = (0, exports.expectNumber)(value); - if (expected !== undefined && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { - if (Math.abs(expected) > MAX_FLOAT) { - throw new TypeError(`Expected 32-bit float, got ${value}`); + /** + * Create a credentials instance using the given input options. + * This client is not cached. + * + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json, options = {}) { + let client; + // user's preferred universe domain + const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { + client = new refreshclient_1.UserRefreshClient(options); + client.fromJSON(json); + } + else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + client = this.fromImpersonatedJSON(json); } + else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + client = externalclient_1.ExternalAccountClient.fromJSON(json, options); + client.scopes = this.getAnyScopes(); + } + else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { + client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); + } + else { + options.scopes = this.scopes; + client = new jwtclient_1.JWT(options); + this.setGapicJWTValues(client); + client.fromJSON(json); + } + if (preferredUniverseDomain) { + client.universeDomain = preferredUniverseDomain; + } + return client; } - return expected; -}; -exports.expectFloat32 = expectFloat32; -const expectLong = (value) => { - if (value === null || value === undefined) { - return undefined; + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + _cacheClientFromJSON(json, options) { + const client = this.fromJSON(json, options); + // cache both raw data used to instantiate client and client itself. + this.jsonContent = json; + this.cachedCredential = client; + return client; } - if (Number.isInteger(value) && !Number.isNaN(value)) { - return value; + fromStream(inputStream, optionsOrCallback = {}, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); + } + else { + return this.fromStreamAsync(inputStream, options); + } } - throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); -}; -exports.expectLong = expectLong; -exports.expectInt = exports.expectLong; -const expectInt32 = (value) => expectSizedInt(value, 32); -exports.expectInt32 = expectInt32; -const expectShort = (value) => expectSizedInt(value, 16); -exports.expectShort = expectShort; -const expectByte = (value) => expectSizedInt(value, 8); -exports.expectByte = expectByte; -const expectSizedInt = (value, size) => { - const expected = (0, exports.expectLong)(value); - if (expected !== undefined && castInt(expected, size) !== expected) { - throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + fromStreamAsync(inputStream, options) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the Google auth settings.'); + } + const chunks = []; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => chunks.push(chunk)) + .on('end', () => { + try { + try { + const data = JSON.parse(chunks.join('')); + const r = this._cacheClientFromJSON(data, options); + return resolve(r); + } + catch (err) { + // If we failed parsing this.keyFileName, assume that it + // is a PEM or p12 certificate: + if (!this.keyFilename) + throw err; + const client = new jwtclient_1.JWT({ + ...this.clientOptions, + keyFile: this.keyFilename, + }); + this.cachedCredential = client; + this.setGapicJWTValues(client); + return resolve(client); + } + } + catch (err) { + return reject(err); + } + }); + }); } - return expected; -}; -const castInt = (value, size) => { - switch (size) { - case 32: - return Int32Array.of(value)[0]; - case 16: - return Int16Array.of(value)[0]; - case 8: - return Int8Array.of(value)[0]; + /** + * Create a credentials instance using the given API key string. + * The created client is not cached. In order to create and cache it use the {@link GoogleAuth.getClient `getClient`} method after first providing an {@link GoogleAuth.apiKey `apiKey`}. + * + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey, options = {}) { + return new jwtclient_1.JWT({ ...options, apiKey }); } -}; -const expectNonNull = (value, location) => { - if (value === null || value === undefined) { - if (location) { - throw new TypeError(`Expected a non-null value for ${location}`); + /** + * Determines whether the current operating system is Windows. + * @api private + */ + _isWindows() { + const sys = os.platform(); + if (sys && sys.length >= 3) { + if (sys.substring(0, 3).toLowerCase() === 'win') { + return true; + } } - throw new TypeError("Expected a non-null value"); + return false; } - return value; -}; -exports.expectNonNull = expectNonNull; -const expectObject = (value) => { - if (value === null || value === undefined) { - return undefined; + /** + * Run the Google Cloud SDK command that prints the default project ID + */ + async getDefaultServiceProjectId() { + return new Promise(resolve => { + (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { + if (!err && stdout) { + try { + const projectId = JSON.parse(stdout).configuration.properties.core.project; + resolve(projectId); + return; + } + catch (e) { + // ignore errors + } + } + resolve(null); + }); + }); } - if (typeof value === "object" && !Array.isArray(value)) { - return value; + /** + * Loads the project id from environment variables. + * @api private + */ + getProductionProjectId() { + return (process.env['GCLOUD_PROJECT'] || + process.env['GOOGLE_CLOUD_PROJECT'] || + process.env['gcloud_project'] || + process.env['google_cloud_project']); } - const receivedType = Array.isArray(value) ? "array" : typeof value; - throw new TypeError(`Expected object, got ${receivedType}: ${value}`); -}; -exports.expectObject = expectObject; -const expectString = (value) => { - if (value === null || value === undefined) { - return undefined; + /** + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private + */ + async getFileProjectId() { + if (this.cachedCredential) { + // Try to read the project ID from the cached credentials file + return this.cachedCredential.projectId; + } + // Ensure the projectId is loaded from the keyFile if available. + if (this.keyFilename) { + const creds = await this.getClient(); + if (creds && creds.projectId) { + return creds.projectId; + } + } + // Try to load a credentials file and read its project ID + const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); + if (r) { + return r.projectId; + } + else { + return null; + } } - if (typeof value === "string") { - return value; + /** + * Gets the project ID from external account client if available. + */ + async getExternalAccountClientProjectId() { + if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + return null; + } + const creds = await this.getClient(); + // Do not suppress the underlying error, as the error could contain helpful + // information for debugging and fixing. This is especially true for + // external account creds as in order to get the project ID, the following + // operations have to succeed: + // 1. Valid credentials file should be supplied. + // 2. Ability to retrieve access tokens from STS token exchange API. + // 3. Ability to exchange for service account impersonated credentials (if + // enabled). + // 4. Ability to get project info using the access token from step 2 or 3. + // Without surfacing the error, it is harder for developers to determine + // which step went wrong. + return await creds.getProjectId(); } - if (["boolean", "number", "bigint"].includes(typeof value)) { - exports.logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); - return String(value); + /** + * Gets the Compute Engine project ID if it can be inferred. + */ + async getGCEProjectId() { + try { + const r = await gcpMetadata.project('project-id'); + return r; + } + catch (e) { + // Ignore any errors + return null; + } } - throw new TypeError(`Expected string, got ${typeof value}: ${value}`); -}; -exports.expectString = expectString; -const expectUnion = (value) => { - if (value === null || value === undefined) { - return undefined; + getCredentials(callback) { + if (callback) { + this.getCredentialsAsync().then(r => callback(null, r), callback); + } + else { + return this.getCredentialsAsync(); + } } - const asObject = (0, exports.expectObject)(value); - const setKeys = Object.entries(asObject) - .filter(([, v]) => v != null) - .map(([k]) => k); - if (setKeys.length === 0) { - throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + async getCredentialsAsync() { + const client = await this.getClient(); + if (client instanceof impersonated_1.Impersonated) { + return { client_email: client.getTargetPrincipal() }; + } + if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { + const serviceAccountEmail = client.getServiceAccountEmail(); + if (serviceAccountEmail) { + return { + client_email: serviceAccountEmail, + universe_domain: client.universeDomain, + }; + } + } + if (this.jsonContent) { + return { + client_email: this.jsonContent.client_email, + private_key: this.jsonContent.private_key, + universe_domain: this.jsonContent.universe_domain, + }; + } + if (await this._checkIsGCE()) { + const [client_email, universe_domain] = await Promise.all([ + gcpMetadata.instance('service-accounts/default/email'), + this.getUniverseDomain(), + ]); + return { client_email, universe_domain }; + } + throw new Error(exports.GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); } - if (setKeys.length > 1) { - throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + /** + * Automatically obtain an {@link AuthClient `AuthClient`} based on the + * provided configuration. If no options were passed, use Application + * Default Credentials. + */ + async getClient() { + if (this.cachedCredential) { + return this.cachedCredential; + } + // Use an existing auth client request, or cache a new one + __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f") || __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_determineClient).call(this), "f"); + try { + return await __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f"); + } + finally { + // reset the pending auth client in case it is changed later + __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, null, "f"); + } } - return asObject; -}; -exports.expectUnion = expectUnion; -const strictParseDouble = (value) => { - if (typeof value == "string") { - return (0, exports.expectNumber)(parseNumber(value)); + /** + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. + */ + async getIdTokenClient(targetAudience) { + const client = await this.getClient(); + if (!('fetchIdToken' in client)) { + throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); + } + return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); } - return (0, exports.expectNumber)(value); -}; -exports.strictParseDouble = strictParseDouble; -exports.strictParseFloat = exports.strictParseDouble; -const strictParseFloat32 = (value) => { - if (typeof value == "string") { - return (0, exports.expectFloat32)(parseNumber(value)); + /** + * Automatically obtain application default credentials, and return + * an access token for making requests. + */ + async getAccessToken() { + const client = await this.getClient(); + return (await client.getAccessToken()).token; } - return (0, exports.expectFloat32)(value); -}; -exports.strictParseFloat32 = strictParseFloat32; -const NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; -const parseNumber = (value) => { - const matches = value.match(NUMBER_REGEX); - if (matches === null || matches[0].length !== value.length) { - throw new TypeError(`Expected real number, got implicit NaN`); + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + async getRequestHeaders(url) { + const client = await this.getClient(); + return client.getRequestHeaders(url); } - return parseFloat(value); -}; -const limitedParseDouble = (value) => { - if (typeof value == "string") { - return parseFloatString(value); + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + async authorizeRequest(opts) { + opts = opts || {}; + const url = opts.url || opts.uri; + const client = await this.getClient(); + const headers = await client.getRequestHeaders(url); + opts.headers = Object.assign(opts.headers || {}, headers); + return opts; } - return (0, exports.expectNumber)(value); -}; -exports.limitedParseDouble = limitedParseDouble; -exports.handleFloat = exports.limitedParseDouble; -exports.limitedParseFloat = exports.limitedParseDouble; -const limitedParseFloat32 = (value) => { - if (typeof value == "string") { - return parseFloatString(value); + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async request(opts) { + const client = await this.getClient(); + return client.request(opts); + } + /** + * Determine the compute environment in which the code is running. + */ + getEnv() { + return (0, envDetect_1.getEnv)(); + } + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + * @param endpoint A custom endpoint to use. + * + * @example + * ``` + * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); + * ``` + */ + async sign(data, endpoint) { + const client = await this.getClient(); + const universe = await this.getUniverseDomain(); + endpoint = + endpoint || + `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; + if (client instanceof impersonated_1.Impersonated) { + const signed = await client.sign(data); + return signed.signedBlob; + } + const crypto = (0, crypto_1.createCrypto)(); + if (client instanceof jwtclient_1.JWT && client.key) { + const sign = await crypto.sign(client.key, data); + return sign; + } + const creds = await this.getCredentials(); + if (!creds.client_email) { + throw new Error('Cannot sign data without `client_email`.'); + } + return this.signBlob(crypto, creds.client_email, data, endpoint); + } + async signBlob(crypto, emailOrUniqueId, data, endpoint) { + const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); + const res = await this.request({ + method: 'POST', + url: url.href, + data: { + payload: crypto.encodeBase64StringUtf8(data), + }, + retry: true, + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + return res.data.signedBlob; } - return (0, exports.expectFloat32)(value); -}; -exports.limitedParseFloat32 = limitedParseFloat32; -const parseFloatString = (value) => { - switch (value) { - case "NaN": - return NaN; - case "Infinity": - return Infinity; - case "-Infinity": - return -Infinity; - default: - throw new Error(`Unable to parse float value: ${value}`); +} +exports.GoogleAuth = GoogleAuth; +_GoogleAuth_pendingAuthClient = new WeakMap(), _GoogleAuth_instances = new WeakSet(), _GoogleAuth_prepareAndCacheClient = async function _GoogleAuth_prepareAndCacheClient(credential, quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT'] || null) { + const projectId = await this.getProjectIdOptional(); + if (quotaProjectIdOverride) { + credential.quotaProjectId = quotaProjectIdOverride; } -}; -const strictParseLong = (value) => { - if (typeof value === "string") { - return (0, exports.expectLong)(parseNumber(value)); + this.cachedCredential = credential; + return { credential, projectId }; +}, _GoogleAuth_determineClient = async function _GoogleAuth_determineClient() { + if (this.jsonContent) { + return this._cacheClientFromJSON(this.jsonContent, this.clientOptions); } - return (0, exports.expectLong)(value); -}; -exports.strictParseLong = strictParseLong; -exports.strictParseInt = exports.strictParseLong; -const strictParseInt32 = (value) => { - if (typeof value === "string") { - return (0, exports.expectInt32)(parseNumber(value)); + else if (this.keyFilename) { + const filePath = path.resolve(this.keyFilename); + const stream = fs.createReadStream(filePath); + return await this.fromStreamAsync(stream, this.clientOptions); } - return (0, exports.expectInt32)(value); -}; -exports.strictParseInt32 = strictParseInt32; -const strictParseShort = (value) => { - if (typeof value === "string") { - return (0, exports.expectShort)(parseNumber(value)); + else if (this.apiKey) { + const client = await this.fromAPIKey(this.apiKey, this.clientOptions); + client.scopes = this.scopes; + const { credential } = await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, client); + return credential; } - return (0, exports.expectShort)(value); -}; -exports.strictParseShort = strictParseShort; -const strictParseByte = (value) => { - if (typeof value === "string") { - return (0, exports.expectByte)(parseNumber(value)); + else { + const { credential } = await this.getApplicationDefaultAsync(this.clientOptions); + return credential; } - return (0, exports.expectByte)(value); -}; -exports.strictParseByte = strictParseByte; -const stackTraceWarning = (message) => { - return String(new TypeError(message).stack || message) - .split("\n") - .slice(0, 5) - .filter((s) => !s.includes("stackTraceWarning")) - .join("\n"); -}; -exports.logger = { - warn: console.warn, }; +/** + * Export DefaultTransporter as a static property of the class. + */ +GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; /***/ }), -/***/ 15342: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 39735: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolvedPath = void 0; -const extended_encode_uri_component_1 = __nccwpck_require__(76016); -const resolvedPath = (resolvedPath, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { - if (input != null && input[memberName] !== undefined) { - const labelValue = labelValueProvider(); - if (labelValue.length <= 0) { - throw new Error("Empty value provided for input HTTP label: " + memberName + "."); - } - resolvedPath = resolvedPath.replace(uriLabel, isGreedyLabel - ? labelValue - .split("/") - .map((segment) => (0, extended_encode_uri_component_1.extendedEncodeURIComponent)(segment)) - .join("/") - : (0, extended_encode_uri_component_1.extendedEncodeURIComponent)(labelValue)); +exports.IAMAuth = void 0; +class IAMAuth { + /** + * IAM credentials. + * + * @param selector the iam authority selector + * @param token the token + * @constructor + */ + constructor(selector, token) { + this.selector = selector; + this.token = token; + this.selector = selector; + this.token = token; } - else { - throw new Error("No value provided for input HTTP label: " + memberName + "."); + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders() { + return { + 'x-goog-iam-authority-selector': this.selector, + 'x-goog-iam-authorization-token': this.token, + }; } - return resolvedPath; -}; -exports.resolvedPath = resolvedPath; +} +exports.IAMAuth = IAMAuth; /***/ }), -/***/ 53456: -/***/ ((__unused_webpack_module, exports) => { +/***/ 20117: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.serializeFloat = void 0; -const serializeFloat = (value) => { - if (value !== value) { - return "NaN"; +exports.IdentityPoolClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(40810); +const util_1 = __nccwpck_require__(68905); +const filesubjecttokensupplier_1 = __nccwpck_require__(27646); +const urlsubjecttokensupplier_1 = __nccwpck_require__(7428); +/** + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. + */ +class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const subjectTokenSupplier = opts.get('subject_token_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !subjectTokenSupplier) { + throw new Error('A credential source or subject token supplier must be specified.'); + } + if (credentialSource && subjectTokenSupplier) { + throw new Error('Only one of credential source or subject token supplier can be specified.'); + } + if (subjectTokenSupplier) { + this.subjectTokenSupplier = subjectTokenSupplier; + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + const formatOpts = (0, util_1.originalOrCamelOptions)(credentialSourceOpts.get('format')); + // Text is the default format type. + const formatType = formatOpts.get('type') || 'text'; + const formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); + if (formatType !== 'json' && formatType !== 'text') { + throw new Error(`Invalid credential_source format "${formatType}"`); + } + if (formatType === 'json' && !formatSubjectTokenFieldName) { + throw new Error('Missing subject_token_field_name for JSON credential_source format'); + } + const file = credentialSourceOpts.get('file'); + const url = credentialSourceOpts.get('url'); + const headers = credentialSourceOpts.get('headers'); + if (file && url) { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + else if (file && !url) { + this.credentialSourceType = 'file'; + this.subjectTokenSupplier = new filesubjecttokensupplier_1.FileSubjectTokenSupplier({ + filePath: file, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + }); + } + else if (!file && url) { + this.credentialSourceType = 'url'; + this.subjectTokenSupplier = new urlsubjecttokensupplier_1.UrlSubjectTokenSupplier({ + url: url, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + headers: headers, + additionalGaxiosOptions: IdentityPoolClient.RETRY_CONFIG, + }); + } + else { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + } } - switch (value) { - case Infinity: - return "Infinity"; - case -Infinity: - return "-Infinity"; - default: - return value; + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. Gets a subject token by calling + * the configured {@link SubjectTokenSupplier} + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + return this.subjectTokenSupplier.getSubjectToken(this.supplierContext); } -}; -exports.serializeFloat = serializeFloat; +} +exports.IdentityPoolClient = IdentityPoolClient; /***/ }), -/***/ 1752: -/***/ ((__unused_webpack_module, exports) => { +/***/ 80298: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports._json = void 0; -const _json = (obj) => { - if (obj == null) { - return {}; - } - if (Array.isArray(obj)) { - return obj.filter((_) => _ != null).map(exports._json); +exports.IdTokenClient = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +class IdTokenClient extends oauth2client_1.OAuth2Client { + /** + * Google ID Token client + * + * Retrieve ID token from the metadata server. + * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server + */ + constructor(options) { + super(options); + this.targetAudience = options.targetAudience; + this.idTokenProvider = options.idTokenProvider; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + if (!this.credentials.id_token || + !this.credentials.expiry_date || + this.isTokenExpiring()) { + const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); + this.credentials = { + id_token: idToken, + expiry_date: this.getIdTokenExpiryDate(idToken), + }; + } + const headers = { + Authorization: 'Bearer ' + this.credentials.id_token, + }; + return { headers }; } - if (typeof obj === "object") { - const target = {}; - for (const key of Object.keys(obj)) { - if (obj[key] == null) { - continue; - } - target[key] = (0, exports._json)(obj[key]); + getIdTokenExpiryDate(idToken) { + const payloadB64 = idToken.split('.')[1]; + if (payloadB64) { + const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); + return payload.exp * 1000; } - return target; } - return obj; -}; -exports._json = _json; +} +exports.IdTokenClient = IdTokenClient; /***/ }), -/***/ 92480: -/***/ ((__unused_webpack_module, exports) => { +/***/ 91103: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.splitEvery = void 0; -function splitEvery(value, delimiter, numDelimiters) { - if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { - throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); +exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +const gaxios_1 = __nccwpck_require__(59555); +const util_1 = __nccwpck_require__(68905); +exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; +class Impersonated extends oauth2client_1.OAuth2Client { + /** + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f; + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { + expiry_date: 1, + refresh_token: 'impersonated-placeholder', + }; + this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); + this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; + this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; + this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; + this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; + const usingExplicitUniverseDomain = !!(0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (!usingExplicitUniverseDomain) { + // override the default universe with the source's universe + this.universeDomain = this.sourceClient.universeDomain; + } + else if (this.sourceClient.universeDomain !== this.universeDomain) { + // non-default universe and is not matching the source - this could be a credential leak + throw new RangeError(`Universe domain ${this.sourceClient.universeDomain} in source credentials does not match ${this.universeDomain} universe domain set for impersonated credentials.`); + } + this.endpoint = + (_f = options.endpoint) !== null && _f !== void 0 ? _f : `https://iamcredentials.${this.universeDomain}`; + } + /** + * Signs some bytes. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} + * @param blobToSign String to sign. + * + * @returns A {@link SignBlobResponse} denoting the keyID and signedBlob in base64 string + */ + async sign(blobToSign) { + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:signBlob`; + const body = { + delegates: this.delegates, + payload: Buffer.from(blobToSign).toString('base64'), + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data; } - const segments = value.split(delimiter); - if (numDelimiters === 1) { - return segments; + /** The service account email to be impersonated. */ + getTargetPrincipal() { + return this.targetPrincipal; } - const compoundSegments = []; - let currentSegment = ""; - for (let i = 0; i < segments.length; i++) { - if (currentSegment === "") { - currentSegment = segments[i]; - } - else { - currentSegment += delimiter + segments[i]; + /** + * Refreshes the access token. + */ + async refreshToken() { + var _a, _b, _c, _d, _e, _f; + try { + await this.sourceClient.getAccessToken(); + const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; + const u = `${this.endpoint}/v1/${name}:generateAccessToken`; + const body = { + delegates: this.delegates, + scope: this.targetScopes, + lifetime: this.lifetime + 's', + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + const tokenResponse = res.data; + this.credentials.access_token = tokenResponse.accessToken; + this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); + return { + tokens: this.credentials, + res, + }; } - if ((i + 1) % numDelimiters === 0) { - compoundSegments.push(currentSegment); - currentSegment = ""; + catch (error) { + if (!(error instanceof Error)) + throw error; + let status = 0; + let message = ''; + if (error instanceof gaxios_1.GaxiosError) { + status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; + message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; + } + if (status && message) { + error.message = `${status}: unable to impersonate: ${message}`; + throw error; + } + else { + error.message = `unable to impersonate: ${error}`; + throw error; + } } } - if (currentSegment !== "") { - compoundSegments.push(currentSegment); + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + async fetchIdToken(targetAudience, options) { + var _a, _b; + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:generateIdToken`; + const body = { + delegates: this.delegates, + audience: targetAudience, + includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, + useEmailAzp: (_b = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _b !== void 0 ? _b : true, + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data.token; } - return compoundSegments; } -exports.splitEvery = splitEvery; - - -/***/ }), - -/***/ 74075: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - - -/***/ }), - -/***/ 93242: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpApiKeyAuthLocation = void 0; -var HttpApiKeyAuthLocation; -(function (HttpApiKeyAuthLocation) { - HttpApiKeyAuthLocation["HEADER"] = "header"; - HttpApiKeyAuthLocation["QUERY"] = "query"; -})(HttpApiKeyAuthLocation = exports.HttpApiKeyAuthLocation || (exports.HttpApiKeyAuthLocation = {})); - - -/***/ }), - -/***/ 81851: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - - -/***/ }), - -/***/ 91530: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - - -/***/ }), - -/***/ 74020: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - - -/***/ }), - -/***/ 52263: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - - -/***/ }), - -/***/ 79467: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpAuthLocation = void 0; -var HttpAuthLocation; -(function (HttpAuthLocation) { - HttpAuthLocation["HEADER"] = "header"; - HttpAuthLocation["QUERY"] = "query"; -})(HttpAuthLocation = exports.HttpAuthLocation || (exports.HttpAuthLocation = {})); +exports.Impersonated = Impersonated; /***/ }), -/***/ 11239: +/***/ 68740: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(79467), exports); -tslib_1.__exportStar(__nccwpck_require__(93242), exports); -tslib_1.__exportStar(__nccwpck_require__(81851), exports); -tslib_1.__exportStar(__nccwpck_require__(91530), exports); -tslib_1.__exportStar(__nccwpck_require__(74020), exports); -tslib_1.__exportStar(__nccwpck_require__(52263), exports); - - -/***/ }), - -/***/ 63274: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - - -/***/ }), - -/***/ 78340: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - - -/***/ }), - -/***/ 4744: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - - -/***/ }), - -/***/ 68270: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - - -/***/ }), - -/***/ 39580: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JWTAccess = void 0; +const jws = __nccwpck_require__(4636); +const util_1 = __nccwpck_require__(68905); +const DEFAULT_HEADER = { + alg: 'RS256', + typ: 'JWT', +}; +class JWTAccess { + /** + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. + */ + constructor(email, key, keyId, eagerRefreshThresholdMillis) { + this.cache = new util_1.LRUCache({ + capacity: 500, + maxAge: 60 * 60 * 1000, + }); + this.email = email; + this.key = key; + this.keyId = keyId; + this.eagerRefreshThresholdMillis = + eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; + } + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url, scopes) { + let cacheKey = url; + if (scopes && Array.isArray(scopes) && scopes.length) { + cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; + } + else if (typeof scopes === 'string') { + cacheKey = url ? `${url}_${scopes}` : scopes; + } + if (!cacheKey) { + throw Error('Scopes or url must be provided'); + } + return cacheKey; + } + /** + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. + */ + getRequestHeaders(url, additionalClaims, scopes) { + // Return cached authorization headers, unless we are within + // eagerRefreshThresholdMillis ms of them expiring: + const key = this.getCachedKey(url, scopes); + const cachedToken = this.cache.get(key); + const now = Date.now(); + if (cachedToken && + cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { + return cachedToken.headers; + } + const iat = Math.floor(Date.now() / 1000); + const exp = JWTAccess.getExpirationTime(iat); + let defaultClaims; + // Turn scopes into space-separated string + if (Array.isArray(scopes)) { + scopes = scopes.join(' '); + } + // If scopes are specified, sign with scopes + if (scopes) { + defaultClaims = { + iss: this.email, + sub: this.email, + scope: scopes, + exp, + iat, + }; + } + else { + defaultClaims = { + iss: this.email, + sub: this.email, + aud: url, + exp, + iat, + }; + } + // if additionalClaims are provided, ensure they do not collide with + // other required claims. + if (additionalClaims) { + for (const claim in defaultClaims) { + if (additionalClaims[claim]) { + throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); + } + } + } + const header = this.keyId + ? { ...DEFAULT_HEADER, kid: this.keyId } + : DEFAULT_HEADER; + const payload = Object.assign(defaultClaims, additionalClaims); + // Sign the jwt and add it to the cache + const signedJWT = jws.sign({ header, payload, secret: this.key }); + const headers = { Authorization: `Bearer ${signedJWT}` }; + this.cache.set(key, { + expiration: exp * 1000, + headers, + }); + return headers; + } + /** + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. + */ + static getExpirationTime(iat) { + const exp = iat + 3600; // 3600 seconds = 1 hour + return exp; + } + /** + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + reject(new Error('Must pass in a stream containing the service account auth settings.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('data', chunk => (s += chunk)) + .on('error', reject) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (err) { + reject(err); + } + }); + }); + } +} +exports.JWTAccess = JWTAccess; /***/ }), -/***/ 57628: +/***/ 13959: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(39580), exports); -tslib_1.__exportStar(__nccwpck_require__(98398), exports); -tslib_1.__exportStar(__nccwpck_require__(76522), exports); - - -/***/ }), - -/***/ 98398: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JWT = void 0; +const gtoken_1 = __nccwpck_require__(76031); +const jwtaccess_1 = __nccwpck_require__(68740); +const oauth2client_1 = __nccwpck_require__(3936); +const authclient_1 = __nccwpck_require__(44627); +class JWT extends oauth2client_1.OAuth2Client { + constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { + const opts = optionsOrEmail && typeof optionsOrEmail === 'object' + ? optionsOrEmail + : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; + super(opts); + this.email = opts.email; + this.keyFile = opts.keyFile; + this.key = opts.key; + this.keyId = opts.keyId; + this.scopes = opts.scopes; + this.subject = opts.subject; + this.additionalClaims = opts.additionalClaims; + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; + } + /** + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. + */ + createScoped(scopes) { + const jwt = new JWT(this); + jwt.scopes = scopes; + return jwt; + } + /** + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. + */ + async getRequestMetadataAsync(url) { + url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; + const useSelfSignedJWT = (!this.hasUserScopes() && url) || + (this.useJWTAccessWithScope && this.hasAnyScopes()) || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { + throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); + } + if (!this.apiKey && useSelfSignedJWT) { + if (this.additionalClaims && + this.additionalClaims.target_audience) { + const { tokens } = await this.refreshToken(); + return { + headers: this.addSharedMetadataHeaders({ + Authorization: `Bearer ${tokens.id_token}`, + }), + }; + } + else { + // no scopes have been set, but a uri has been provided. Use JWTAccess + // credentials. + if (!this.access) { + this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); + } + let scopes; + if (this.hasUserScopes()) { + scopes = this.scopes; + } + else if (!url) { + scopes = this.defaultScopes; + } + const useScopes = this.useJWTAccessWithScope || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, + // Scopes take precedent over audience for signing, + // so we only provide them if `useJWTAccessWithScope` is on or + // if we are in a non-default universe + useScopes ? scopes : undefined); + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + else if (this.hasAnyScopes() || this.apiKey) { + return super.getRequestMetadataAsync(url); + } + else { + // If no audience, apiKey, or scopes are provided, we should not attempt + // to populate any headers: + return { headers: {} }; + } + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + // Create a new gToken for fetching an ID token + const gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: { target_audience: targetAudience }, + transporter: this.transporter, + }); + await gtoken.getToken({ + forceRefresh: true, + }); + if (!gtoken.idToken) { + throw new Error('Unknown error: Failed to fetch ID token'); + } + return gtoken.idToken; + } + /** + * Determine if there are currently scopes available. + */ + hasUserScopes() { + if (!this.scopes) { + return false; + } + return this.scopes.length > 0; + } + /** + * Are there any default or user scopes defined. + */ + hasAnyScopes() { + if (this.scopes && this.scopes.length > 0) + return true; + if (this.defaultScopes && this.defaultScopes.length > 0) + return true; + return false; + } + authorize(callback) { + if (callback) { + this.authorizeAsync().then(r => callback(null, r), callback); + } + else { + return this.authorizeAsync(); + } + } + async authorizeAsync() { + const result = await this.refreshToken(); + if (!result) { + throw new Error('No result returned'); + } + this.credentials = result.tokens; + this.credentials.refresh_token = 'jwt-placeholder'; + this.key = this.gtoken.key; + this.email = this.gtoken.iss; + return result.tokens; + } + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const gtoken = this.createGToken(); + const token = await gtoken.getToken({ + forceRefresh: this.isTokenExpiring(), + }); + const tokens = { + access_token: token.access_token, + token_type: 'Bearer', + expiry_date: gtoken.expiresAt, + id_token: gtoken.idToken, + }; + this.emit('tokens', tokens); + return { res: null, tokens }; + } + /** + * Create a gToken if it doesn't already exist. + */ + createGToken() { + if (!this.gtoken) { + this.gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: this.additionalClaims, + transporter: this.transporter, + }); + } + return this.gtoken; + } + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the service account auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (e) { + reject(e); + } + }); + }); + } + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey) { + if (typeof apiKey !== 'string') { + throw new Error('Must provide an API Key string.'); + } + this.apiKey = apiKey; + } + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + async getCredentials() { + if (this.key) { + return { private_key: this.key, client_email: this.email }; + } + else if (this.keyFile) { + const gtoken = this.createGToken(); + const creds = await gtoken.getCredentials(this.keyFile); + return { private_key: creds.privateKey, client_email: creds.clientEmail }; + } + throw new Error('A key or a keyFile must be provided to getCredentials.'); + } +} +exports.JWT = JWT; /***/ }), -/***/ 76522: +/***/ 74524: /***/ ((__unused_webpack_module, exports) => { "use strict"; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LoginTicket = void 0; +class LoginTicket { + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env, pay) { + this.envelope = env; + this.payload = pay; + } + getEnvelope() { + return this.envelope; + } + getPayload() { + return this.payload; + } + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId() { + const payload = this.getPayload(); + if (payload && payload.sub) { + return payload.sub; + } + return null; + } + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes() { + return { envelope: this.getEnvelope(), payload: this.getPayload() }; + } +} +exports.LoginTicket = LoginTicket; /***/ }), -/***/ 89035: -/***/ ((__unused_webpack_module, exports) => { +/***/ 3936: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OAuth2Client = exports.ClientAuthentication = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; +const gaxios_1 = __nccwpck_require__(59555); +const querystring = __nccwpck_require__(63477); +const stream = __nccwpck_require__(12781); +const formatEcdsa = __nccwpck_require__(11728); +const crypto_1 = __nccwpck_require__(78043); +const authclient_1 = __nccwpck_require__(44627); +const loginticket_1 = __nccwpck_require__(74524); +var CodeChallengeMethod; +(function (CodeChallengeMethod) { + CodeChallengeMethod["Plain"] = "plain"; + CodeChallengeMethod["S256"] = "S256"; +})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); +var CertificateFormat; +(function (CertificateFormat) { + CertificateFormat["PEM"] = "PEM"; + CertificateFormat["JWK"] = "JWK"; +})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); +/** + * The client authentication type. Supported values are basic, post, and none. + * https://datatracker.ietf.org/doc/html/rfc7591#section-2 + */ +var ClientAuthentication; +(function (ClientAuthentication) { + ClientAuthentication["ClientSecretPost"] = "ClientSecretPost"; + ClientAuthentication["ClientSecretBasic"] = "ClientSecretBasic"; + ClientAuthentication["None"] = "None"; +})(ClientAuthentication || (exports.ClientAuthentication = ClientAuthentication = {})); +class OAuth2Client extends authclient_1.AuthClient { + constructor(optionsOrClientId, clientSecret, redirectUri) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { clientId: optionsOrClientId, clientSecret, redirectUri }; + super(opts); + this.certificateCache = {}; + this.certificateExpiry = null; + this.certificateCacheFormat = CertificateFormat.PEM; + this.refreshTokenPromises = new Map(); + this._clientId = opts.clientId; + this._clientSecret = opts.clientSecret; + this.redirectUri = opts.redirectUri; + this.endpoints = { + tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', + oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', + oauth2TokenUrl: 'https://oauth2.googleapis.com/token', + oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', + oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', + oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', + oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', + ...opts.endpoints, + }; + this.clientAuthentication = + opts.clientAuthentication || ClientAuthentication.ClientSecretPost; + this.issuers = opts.issuers || [ + 'accounts.google.com', + 'https://accounts.google.com', + this.universeDomain, + ]; + } + /** + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. + */ + generateAuthUrl(opts = {}) { + if (opts.code_challenge_method && !opts.code_challenge) { + throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); + } + opts.response_type = opts.response_type || 'code'; + opts.client_id = opts.client_id || this._clientId; + opts.redirect_uri = opts.redirect_uri || this.redirectUri; + // Allow scopes to be passed either as array or a string + if (Array.isArray(opts.scope)) { + opts.scope = opts.scope.join(' '); + } + const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); + return (rootUrl + + '?' + + querystring.stringify(opts)); + } + generateCodeVerifier() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); + } + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + async generateCodeVerifierAsync() { + // base64 encoding uses 6 bits per character, and we want to generate128 + // characters. 6*128/8 = 96. + const crypto = (0, crypto_1.createCrypto)(); + const randomString = crypto.randomBytesBase64(96); + // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ + // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just + // swapping out a few chars. + const codeVerifier = randomString + .replace(/\+/g, '~') + .replace(/=/g, '_') + .replace(/\//g, '-'); + // Generate the base64 encoded SHA256 + const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); + // We need to use base64UrlEncoding instead of standard base64 + const codeChallenge = unencodedCodeChallenge + .split('=')[0] + .replace(/\+/g, '-') + .replace(/\//g, '_'); + return { codeVerifier, codeChallenge }; + } + getToken(codeOrOptions, callback) { + const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; + if (callback) { + this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); + } + else { + return this.getTokenAsync(options); + } + } + async getTokenAsync(options) { + const url = this.endpoints.oauth2TokenUrl.toString(); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + const values = { + client_id: options.client_id || this._clientId, + code_verifier: options.codeVerifier, + code: options.code, + grant_type: 'authorization_code', + redirect_uri: options.redirect_uri || this.redirectUri, + }; + if (this.clientAuthentication === ClientAuthentication.ClientSecretBasic) { + const basic = Buffer.from(`${this._clientId}:${this._clientSecret}`); + headers['Authorization'] = `Basic ${basic.toString('base64')}`; + } + if (this.clientAuthentication === ClientAuthentication.ClientSecretPost) { + values.client_secret = this._clientSecret; + } + const res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(values), + headers, + }); + const tokens = res.data; + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + /** + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private + */ + async refreshToken(refreshToken) { + if (!refreshToken) { + return this.refreshTokenNoCache(refreshToken); + } + // If a request to refresh using the same token has started, + // return the same promise. + if (this.refreshTokenPromises.has(refreshToken)) { + return this.refreshTokenPromises.get(refreshToken); + } + const p = this.refreshTokenNoCache(refreshToken).then(r => { + this.refreshTokenPromises.delete(refreshToken); + return r; + }, e => { + this.refreshTokenPromises.delete(refreshToken); + throw e; + }); + this.refreshTokenPromises.set(refreshToken, p); + return p; + } + async refreshTokenNoCache(refreshToken) { + var _a; + if (!refreshToken) { + throw new Error('No refresh token is set.'); + } + const url = this.endpoints.oauth2TokenUrl.toString(); + const data = { + refresh_token: refreshToken, + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + }; + let res; + try { + // request for new token + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(data), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError && + e.message === 'invalid_grant' && + ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && + /ReAuth/i.test(e.response.data.error_description)) { + e.message = JSON.stringify(e.response.data); + } + throw e; + } + const tokens = res.data; + // TODO: de-duplicate this code from a few spots + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + refreshAccessToken(callback) { + if (callback) { + this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); + } + else { + return this.refreshAccessTokenAsync(); + } + } + async refreshAccessTokenAsync() { + const r = await this.refreshToken(this.credentials.refresh_token); + const tokens = r.tokens; + tokens.refresh_token = this.credentials.refresh_token; + this.credentials = tokens; + return { credentials: this.credentials, res: r.res }; + } + getAccessToken(callback) { + if (callback) { + this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + else { + return this.getAccessTokenAsync(); + } + } + async getAccessTokenAsync() { + const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); + if (shouldRefresh) { + if (!this.credentials.refresh_token) { + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + return { token: this.credentials.access_token }; + } + } + else { + throw new Error('No refresh token or refresh handler callback is set.'); + } + } + const r = await this.refreshAccessTokenAsync(); + if (!r.credentials || (r.credentials && !r.credentials.access_token)) { + throw new Error('Could not refresh access token.'); + } + return { token: r.credentials.access_token, res: r.res }; + } + else { + return { token: this.credentials.access_token }; + } + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * In OAuth2Client, the result has the form: + * { Authorization: 'Bearer ' } + * @param url The optional url being authorized + */ + async getRequestHeaders(url) { + const headers = (await this.getRequestMetadataAsync(url)).headers; + return headers; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + const thisCreds = this.credentials; + if (!thisCreds.access_token && + !thisCreds.refresh_token && + !this.apiKey && + !this.refreshHandler) { + throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + } + if (thisCreds.access_token && !this.isTokenExpiring()) { + thisCreds.token_type = thisCreds.token_type || 'Bearer'; + const headers = { + Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + // If refreshHandler exists, call processAndValidateRefreshHandler(). + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + const headers = { + Authorization: 'Bearer ' + this.credentials.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + if (this.apiKey) { + return { headers: { 'X-Goog-Api-Key': this.apiKey } }; + } + let r = null; + let tokens = null; + try { + r = await this.refreshToken(thisCreds.refresh_token); + tokens = r.tokens; + } + catch (err) { + const e = err; + if (e.response && + (e.response.status === 403 || e.response.status === 404)) { + e.message = `Could not refresh access token: ${e.message}`; + } + throw e; + } + const credentials = this.credentials; + credentials.token_type = credentials.token_type || 'Bearer'; + tokens.refresh_token = credentials.refresh_token; + this.credentials = tokens; + const headers = { + Authorization: credentials.token_type + ' ' + tokens.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; + } + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + * + * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} + */ + static getRevokeTokenUrl(token) { + return new OAuth2Client().getRevokeTokenURL(token).toString(); + } + /** + * Generates a URL to revoke the given token. + * + * @param token The existing token to be revoked. + */ + getRevokeTokenURL(token) { + const url = new URL(this.endpoints.oauth2RevokeUrl); + url.searchParams.append('token', token); + return url; + } + revokeToken(token, callback) { + const opts = { + ...OAuth2Client.RETRY_CONFIG, + url: this.getRevokeTokenURL(token).toString(), + method: 'POST', + }; + if (callback) { + this.transporter + .request(opts) + .then(r => callback(null, r), callback); + } + else { + return this.transporter.request(opts); + } + } + revokeCredentials(callback) { + if (callback) { + this.revokeCredentialsAsync().then(res => callback(null, res), callback); + } + else { + return this.revokeCredentialsAsync(); + } + } + async revokeCredentialsAsync() { + const token = this.credentials.access_token; + this.credentials = {}; + if (token) { + return this.revokeToken(token); + } + else { + throw new Error('No access token to revoke.'); + } + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + async requestAsync(opts, reAuthRetried = false) { + let r2; + try { + const r = await this.getRequestMetadataAsync(opts.url); + opts.headers = opts.headers || {}; + if (r.headers && r.headers['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; + } + if (r.headers && r.headers.Authorization) { + opts.headers.Authorization = r.headers.Authorization; + } + if (this.apiKey) { + opts.headers['X-Goog-Api-Key'] = this.apiKey; + } + r2 = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - An access_token and refresh_token were available, but either no + // expiry_date was available or the forceRefreshOnFailure flag is set. + // The absent expiry_date case can happen when developers stash the + // access_token and refresh_token for later use, but the access_token + // fails on the first try because it's expired. Some developers may + // choose to enable forceRefreshOnFailure to mitigate time-related + // errors. + // Or the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - No refresh_token was available + // - An access_token and a refreshHandler callback were available, but + // either no expiry_date was available or the forceRefreshOnFailure + // flag is set. The access_token fails on the first try because it's + // expired. Some developers may choose to enable forceRefreshOnFailure + // to mitigate time-related errors. + const mayRequireRefresh = this.credentials && + this.credentials.access_token && + this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure); + const mayRequireRefreshWithNoRefreshToken = this.credentials && + this.credentials.access_token && + !this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure) && + this.refreshHandler; + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefresh) { + await this.refreshAccessTokenAsync(); + return this.requestAsync(opts, true); + } + else if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefreshWithNoRefreshToken) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + } + return this.requestAsync(opts, true); + } + } + throw e; + } + return r2; + } + verifyIdToken(options, callback) { + // This function used to accept two arguments instead of an options object. + // Check the types to help users upgrade with less pain. + // This check can be removed after a 2.0 release. + if (callback && typeof callback !== 'function') { + throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); + } + if (callback) { + this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); + } + else { + return this.verifyIdTokenAsync(options); + } + } + async verifyIdTokenAsync(options) { + if (!options.idToken) { + throw new Error('The verifyIdToken method requires an ID Token'); + } + const response = await this.getFederatedSignonCertsAsync(); + const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); + return login; + } + /** + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. + */ + async getTokenInfo(accessToken) { + const { data } = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + Authorization: `Bearer ${accessToken}`, + }, + url: this.endpoints.tokenInfoUrl.toString(), + }); + const info = Object.assign({ + expiry_date: new Date().getTime() + data.expires_in * 1000, + scopes: data.scope.split(' '), + }, data); + delete info.expires_in; + delete info.scope; + return info; + } + getFederatedSignonCerts(callback) { + if (callback) { + this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); + } + else { + return this.getFederatedSignonCertsAsync(); + } + } + async getFederatedSignonCertsAsync() { + const nowTime = new Date().getTime(); + const format = (0, crypto_1.hasBrowserCrypto)() + ? CertificateFormat.JWK + : CertificateFormat.PEM; + if (this.certificateExpiry && + nowTime < this.certificateExpiry.getTime() && + this.certificateCacheFormat === format) { + return { certs: this.certificateCache, format }; + } + let res; + let url; + switch (format) { + case CertificateFormat.PEM: + url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); + break; + case CertificateFormat.JWK: + url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + const cacheControl = res ? res.headers['cache-control'] : undefined; + let cacheAge = -1; + if (cacheControl) { + const pattern = new RegExp('max-age=([0-9]*)'); + const regexResult = pattern.exec(cacheControl); + if (regexResult && regexResult.length === 2) { + // Cache results with max-age (in seconds) + cacheAge = Number(regexResult[1]) * 1000; // milliseconds + } + } + let certificates = {}; + switch (format) { + case CertificateFormat.PEM: + certificates = res.data; + break; + case CertificateFormat.JWK: + for (const key of res.data.keys) { + certificates[key.kid] = key; + } + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + const now = new Date(); + this.certificateExpiry = + cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); + this.certificateCache = certificates; + this.certificateCacheFormat = format; + return { certs: certificates, format, res }; + } + getIapPublicKeys(callback) { + if (callback) { + this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); + } + else { + return this.getIapPublicKeysAsync(); + } + } + async getIapPublicKeysAsync() { + let res; + const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + return { pubkeys: res.data, res }; + } + verifySignedJwtWithCerts() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); + } + /** + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. + */ + async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { + const crypto = (0, crypto_1.createCrypto)(); + if (!maxExpiry) { + maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; + } + const segments = jwt.split('.'); + if (segments.length !== 3) { + throw new Error('Wrong number of segments in token: ' + jwt); + } + const signed = segments[0] + '.' + segments[1]; + let signature = segments[2]; + let envelope; + let payload; + try { + envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; + } + throw err; + } + if (!envelope) { + throw new Error("Can't parse token envelope: " + segments[0]); + } + try { + payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token payload '${segments[0]}`; + } + throw err; + } + if (!payload) { + throw new Error("Can't parse token payload: " + segments[1]); + } + if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { + // If this is not present, then there's no reason to attempt verification + throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); + } + const cert = certs[envelope.kid]; + if (envelope.alg === 'ES256') { + signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); + } + const verified = await crypto.verify(cert, signed, signature); + if (!verified) { + throw new Error('Invalid token signature: ' + jwt); + } + if (!payload.iat) { + throw new Error('No issue time in token: ' + JSON.stringify(payload)); + } + if (!payload.exp) { + throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + } + const iat = Number(payload.iat); + if (isNaN(iat)) + throw new Error('iat field using invalid format'); + const exp = Number(payload.exp); + if (isNaN(exp)) + throw new Error('exp field using invalid format'); + const now = new Date().getTime() / 1000; + if (exp >= now + maxExpiry) { + throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + } + const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; + const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; + if (now < earliest) { + throw new Error('Token used too early, ' + + now + + ' < ' + + earliest + + ': ' + + JSON.stringify(payload)); + } + if (now > latest) { + throw new Error('Token used too late, ' + + now + + ' > ' + + latest + + ': ' + + JSON.stringify(payload)); + } + if (issuers && issuers.indexOf(payload.iss) < 0) { + throw new Error('Invalid issuer, expected one of [' + + issuers + + '], but got ' + + payload.iss); + } + // Check the audience matches if we have one + if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { + const aud = payload.aud; + let audVerified = false; + // If the requiredAudience is an array, check if it contains token + // audience + if (requiredAudience.constructor === Array) { + audVerified = requiredAudience.indexOf(aud) > -1; + } + else { + audVerified = aud === requiredAudience; + } + if (!audVerified) { + throw new Error('Wrong recipient, payload audience != requiredAudience'); + } + } + return new loginticket_1.LoginTicket(envelope, payload); + } + /** + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. + */ + async processAndValidateRefreshHandler() { + if (this.refreshHandler) { + const accessTokenResponse = await this.refreshHandler(); + if (!accessTokenResponse.access_token) { + throw new Error('No access token is returned by the refreshHandler callback.'); + } + return accessTokenResponse; + } + return; + } + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + isTokenExpiring() { + const expiryDate = this.credentials.expiry_date; + return expiryDate + ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis + : false; + } +} +exports.OAuth2Client = OAuth2Client; +/** + * @deprecated use instance's {@link OAuth2Client.endpoints} + */ +OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; +/** + * Clock skew - five minutes in seconds + */ +OAuth2Client.CLOCK_SKEW_SECS_ = 300; +/** + * The default max Token Lifetime is one day in seconds + */ +OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; /***/ }), -/***/ 7225: -/***/ ((__unused_webpack_module, exports) => { +/***/ 19510: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OAuthClientAuthHandler = void 0; +exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; +const querystring = __nccwpck_require__(63477); +const crypto_1 = __nccwpck_require__(78043); +/** List of HTTP methods that accept request bodies. */ +const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; +/** + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. + */ +class OAuthClientAuthHandler { + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication) { + this.clientAuthentication = clientAuthentication; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + applyClientAuthenticationOptions(opts, bearerToken) { + // Inject authenticated header. + this.injectAuthenticatedHeaders(opts, bearerToken); + // Inject authenticated request body. + if (!bearerToken) { + this.injectAuthenticatedRequestBody(opts); + } + } + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + injectAuthenticatedHeaders(opts, bearerToken) { + var _a; + // Bearer token prioritized higher than basic Auth. + if (bearerToken) { + opts.headers = opts.headers || {}; + Object.assign(opts.headers, { + Authorization: `Bearer ${bearerToken}}`, + }); + } + else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { + opts.headers = opts.headers || {}; + const clientId = this.clientAuthentication.clientId; + const clientSecret = this.clientAuthentication.clientSecret || ''; + const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); + Object.assign(opts.headers, { + Authorization: `Basic ${base64EncodedCreds}`, + }); + } + } + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + injectAuthenticatedRequestBody(opts) { + var _a; + if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { + const method = (opts.method || 'GET').toUpperCase(); + // Inject authenticated request body. + if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { + // Get content-type. + let contentType; + const headers = opts.headers || {}; + for (const key in headers) { + if (key.toLowerCase() === 'content-type' && headers[key]) { + contentType = headers[key].toLowerCase(); + break; + } + } + if (contentType === 'application/x-www-form-urlencoded') { + opts.data = opts.data || ''; + const data = querystring.parse(opts.data); + Object.assign(data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + opts.data = querystring.stringify(data); + } + else if (contentType === 'application/json') { + opts.data = opts.data || {}; + Object.assign(opts.data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + } + else { + throw new Error(`${contentType} content-types are not supported with ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + else { + throw new Error(`${method} HTTP method does not support ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; + } +} +exports.OAuthClientAuthHandler = OAuthClientAuthHandler; +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +function getErrorFromOAuthErrorResponse(resp, err) { + // Error response. + const errorCode = resp.error; + const errorDescription = resp.error_description; + const errorUri = resp.error_uri; + let message = `Error code ${errorCode}`; + if (typeof errorDescription !== 'undefined') { + message += `: ${errorDescription}`; + } + if (typeof errorUri !== 'undefined') { + message += ` - ${errorUri}`; + } + const newError = new Error(message); + // Copy properties from original error to newly generated error. + if (err) { + const keys = Object.keys(err); + if (err.stack) { + // Copy error.stack if available. + keys.push('stack'); + } + keys.forEach(key => { + // Do not overwrite the message field. + if (key !== 'message') { + Object.defineProperty(newError, key, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: err[key], + writable: false, + enumerable: true, + }); + } + }); + } + return newError; +} /***/ }), -/***/ 54126: -/***/ ((__unused_webpack_module, exports) => { +/***/ 32460: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.EndpointURLScheme = void 0; -var EndpointURLScheme; -(function (EndpointURLScheme) { - EndpointURLScheme["HTTP"] = "http"; - EndpointURLScheme["HTTPS"] = "https"; -})(EndpointURLScheme = exports.EndpointURLScheme || (exports.EndpointURLScheme = {})); +exports.PassThroughClient = void 0; +const authclient_1 = __nccwpck_require__(44627); +/** + * An AuthClient without any Authentication information. Useful for: + * - Anonymous access + * - Local Emulators + * - Testing Environments + * + */ +class PassThroughClient extends authclient_1.AuthClient { + /** + * Creates a request without any authentication headers or checks. + * + * @remarks + * + * In testing environments it may be useful to change the provided + * {@link AuthClient.transporter} for any desired request overrides/handling. + * + * @param opts + * @returns The response of the request. + */ + async request(opts) { + return this.transporter.request(opts); + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getAccessToken() { + return {}; + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getRequestHeaders() { + return {}; + } +} +exports.PassThroughClient = PassThroughClient; +const a = new PassThroughClient(); +a.getAccessToken(); /***/ }), -/***/ 55612: -/***/ ((__unused_webpack_module, exports) => { +/***/ 44782: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthClient = exports.ExecutableError = void 0; +const baseexternalclient_1 = __nccwpck_require__(40810); +const executable_response_1 = __nccwpck_require__(8749); +const pluggable_auth_handler_1 = __nccwpck_require__(18941); +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +class ExecutableError extends Error { + constructor(message, code) { + super(`The executable failed with exit code: ${code} and error message: ${message}.`); + this.code = code; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableError = ExecutableError; +/** + * The default executable timeout when none is provided, in milliseconds. + */ +const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; +/** + * The minimum allowed executable timeout in milliseconds. + */ +const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; +/** + * The maximum allowed executable timeout in milliseconds. + */ +const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; +/** + * The environment variable to check to see if executable can be run. + * Value must be set to '1' for the executable to run. + */ +const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; +/** + * The maximum currently supported executable version. + */ +const MAXIMUM_EXECUTABLE_VERSION = 1; +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + if (!options.credential_source.executable) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + this.command = options.credential_source.executable.command; + if (!this.command) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + // Check if the provided timeout exists and if it is valid. + if (options.credential_source.executable.timeout_millis === undefined) { + this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; + } + else { + this.timeoutMillis = options.credential_source.executable.timeout_millis; + if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || + this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { + throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); + } + } + this.outputFile = options.credential_source.executable.output_file; + this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ + command: this.command, + timeoutMillis: this.timeoutMillis, + outputFile: this.outputFile, + }); + this.credentialSourceType = 'executable'; + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Check if the executable is allowed to run. + if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { + throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + + 'Variable to 1.'); + } + let executableResponse = undefined; + // Try to get cached executable response from output file. + if (this.outputFile) { + executableResponse = await this.handler.retrieveCachedResponse(); + } + // If no response from output file, call the executable. + if (!executableResponse) { + // Set up environment map with required values for the executable. + const envMap = new Map(); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); + // Always set to 0 because interactive mode is not supported. + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); + if (this.outputFile) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); + } + const serviceAccountEmail = this.getServiceAccountEmail(); + if (serviceAccountEmail) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + } + executableResponse = + await this.handler.retrieveResponseFromExecutable(envMap); + } + if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { + throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); + } + // Check that response was successful. + if (!executableResponse.success) { + throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); + } + // Check that response contains expiration time if output file was specified. + if (this.outputFile) { + if (!executableResponse.expirationTime) { + throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + } + } + // Check that response is not expired. + if (executableResponse.isExpired()) { + throw new Error('Executable response is expired.'); + } + // Return subject token from response. + return executableResponse.subjectToken; + } +} +exports.PluggableAuthClient = PluggableAuthClient; /***/ }), -/***/ 43084: -/***/ ((__unused_webpack_module, exports) => { +/***/ 18941: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthHandler = void 0; +const pluggable_auth_client_1 = __nccwpck_require__(44782); +const executable_response_1 = __nccwpck_require__(8749); +const childProcess = __nccwpck_require__(32081); +const fs = __nccwpck_require__(57147); +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +class PluggableAuthHandler { + /** + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. + */ + constructor(options) { + if (!options.command) { + throw new Error('No command provided.'); + } + this.commandComponents = PluggableAuthHandler.parseCommand(options.command); + this.timeoutMillis = options.timeoutMillis; + if (!this.timeoutMillis) { + throw new Error('No timeoutMillis provided.'); + } + this.outputFile = options.outputFile; + } + /** + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. + */ + retrieveResponseFromExecutable(envMap) { + return new Promise((resolve, reject) => { + // Spawn process to run executable using added environment variables. + const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { + env: { ...process.env, ...Object.fromEntries(envMap) }, + }); + let output = ''; + // Append stdout to output as executable runs. + child.stdout.on('data', (data) => { + output += data; + }); + // Append stderr as executable runs. + child.stderr.on('data', (err) => { + output += err; + }); + // Set up a timeout to end the child process and throw an error. + const timeout = setTimeout(() => { + // Kill child process and remove listeners so 'close' event doesn't get + // read after child process is killed. + child.removeAllListeners(); + child.kill(); + return reject(new Error('The executable failed to finish within the timeout specified.')); + }, this.timeoutMillis); + child.on('close', (code) => { + // Cancel timeout if executable closes before timeout is reached. + clearTimeout(timeout); + if (code === 0) { + // If the executable completed successfully, try to return the parsed response. + try { + const responseJson = JSON.parse(output); + const response = new executable_response_1.ExecutableResponse(responseJson); + return resolve(response); + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + return reject(error); + } + return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); + } + } + else { + return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); + } + }); + }); + } + /** + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. + */ + async retrieveCachedResponse() { + if (!this.outputFile || this.outputFile.length === 0) { + return undefined; + } + let filePath; + try { + filePath = await fs.promises.realpath(this.outputFile); + } + catch (_a) { + // If file path cannot be resolved, return undefined. + return undefined; + } + if (!(await fs.promises.lstat(filePath)).isFile()) { + // If path does not lead to file, return undefined. + return undefined; + } + const responseString = await fs.promises.readFile(filePath, { + encoding: 'utf8', + }); + if (responseString === '') { + return undefined; + } + try { + const responseJson = JSON.parse(responseString); + const response = new executable_response_1.ExecutableResponse(responseJson); + // Check if response is successful and unexpired. + if (response.isValid()) { + return new executable_response_1.ExecutableResponse(responseJson); + } + return undefined; + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + throw error; + } + throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); + } + } + /** + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. + */ + static parseCommand(command) { + // Split the command into components by splitting on spaces, + // unless spaces are contained in quotation marks. + const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); + if (!components) { + throw new Error(`Provided command: "${command}" could not be parsed.`); + } + // Remove quotation marks from the beginning and end of each component if they are present. + for (let i = 0; i < components.length; i++) { + if (components[i][0] === '"' && components[i].slice(-1) === '"') { + components[i] = components[i].slice(1, -1); + } + } + return components; + } +} +exports.PluggableAuthHandler = PluggableAuthHandler; /***/ }), -/***/ 89843: -/***/ ((__unused_webpack_module, exports) => { +/***/ 98790: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +const querystring_1 = __nccwpck_require__(63477); +exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; +class UserRefreshClient extends oauth2client_1.OAuth2Client { + constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { + clientId: optionsOrClientId, + clientSecret, + refreshToken, + eagerRefreshThresholdMillis, + forceRefreshOnFailure, + }; + super(opts); + this._refreshToken = opts.refreshToken; + this.credentials.refresh_token = opts.refreshToken; + } + /** + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + return super.refreshTokenNoCache(this._refreshToken); + } + async fetchIdToken(targetAudience) { + const res = await this.transporter.request({ + ...UserRefreshClient.RETRY_CONFIG, + url: this.endpoints.oauth2TokenUrl, + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + method: 'POST', + data: (0, querystring_1.stringify)({ + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + refresh_token: this._refreshToken, + target_audience: targetAudience, + }), + }); + return res.data.id_token; + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the user refresh token'); + } + if (json.type !== 'authorized_user') { + throw new Error('The incoming JSON object does not have the "authorized_user" type'); + } + if (!json.client_id) { + throw new Error('The incoming JSON object does not contain a client_id field'); + } + if (!json.client_secret) { + throw new Error('The incoming JSON object does not contain a client_secret field'); + } + if (!json.refresh_token) { + throw new Error('The incoming JSON object does not contain a refresh_token field'); + } + this._clientId = json.client_id; + this._clientSecret = json.client_secret; + this._refreshToken = json.refresh_token; + this.credentials.refresh_token = json.refresh_token; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + async fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + return reject(new Error('Must pass in a stream containing the user refresh token.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + return resolve(); + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + static fromJSON(json) { + const client = new UserRefreshClient(); + client.fromJSON(json); + return client; + } +} +exports.UserRefreshClient = UserRefreshClient; /***/ }), -/***/ 63799: -/***/ ((__unused_webpack_module, exports) => { +/***/ 86308: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StsCredentials = void 0; +const gaxios_1 = __nccwpck_require__(59555); +const querystring = __nccwpck_require__(63477); +const transporters_1 = __nccwpck_require__(72649); +const oauth2common_1 = __nccwpck_require__(19510); +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint, clientAuthentication) { + super(clientAuthentication); + this.tokenExchangeEndpoint = tokenExchangeEndpoint; + this.transporter = new transporters_1.DefaultTransporter(); + } + /** + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. + */ + async exchangeToken(stsCredentialsOptions, additionalHeaders, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + var _a, _b, _c; + const values = { + grant_type: stsCredentialsOptions.grantType, + resource: stsCredentialsOptions.resource, + audience: stsCredentialsOptions.audience, + scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), + requested_token_type: stsCredentialsOptions.requestedTokenType, + subject_token: stsCredentialsOptions.subjectToken, + subject_token_type: stsCredentialsOptions.subjectTokenType, + actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, + actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, + // Non-standard GCP-specific options. + options: options && JSON.stringify(options), + }; + // Remove undefined fields. + Object.keys(values).forEach(key => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof values[key] === 'undefined') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete values[key]; + } + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + // Inject additional STS headers if available. + Object.assign(headers, additionalHeaders || {}); + const opts = { + ...StsCredentials.RETRY_CONFIG, + url: this.tokenExchangeEndpoint.toString(), + method: 'POST', + headers, + data: querystring.stringify(values), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const stsSuccessfulResponse = response.data; + stsSuccessfulResponse.res = response; + return stsSuccessfulResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +exports.StsCredentials = StsCredentials; /***/ }), -/***/ 21550: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7428: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(55612), exports); -tslib_1.__exportStar(__nccwpck_require__(43084), exports); -tslib_1.__exportStar(__nccwpck_require__(89843), exports); -tslib_1.__exportStar(__nccwpck_require__(57658), exports); -tslib_1.__exportStar(__nccwpck_require__(63799), exports); +exports.UrlSubjectTokenSupplier = void 0; +/** + * Internal subject token supplier implementation used when a URL + * is configured in the credential configuration used to build an {@link IdentityPoolClient} + */ +class UrlSubjectTokenSupplier { + /** + * Instantiates a URL subject token supplier. + * @param opts The URL subject token supplier options to build the supplier with. + */ + constructor(opts) { + this.url = opts.url; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + this.headers = opts.headers; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Sends a GET request to the URL provided in the constructor and resolves + * with the returned external subject token. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.url, + method: 'GET', + headers: this.headers, + responseType: this.formatType, + }; + let subjectToken; + if (this.formatType === 'text') { + const response = await context.transporter.request(opts); + subjectToken = response.data; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const response = await context.transporter.request(opts); + subjectToken = response.data[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source URL'); + } + return subjectToken; + } +} +exports.UrlSubjectTokenSupplier = UrlSubjectTokenSupplier; /***/ }), -/***/ 57658: -/***/ ((__unused_webpack_module, exports) => { +/***/ 14693: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BrowserCrypto = void 0; +// This file implements crypto functions we need using in-browser +// SubtleCrypto interface `window.crypto.subtle`. +const base64js = __nccwpck_require__(26463); +const crypto_1 = __nccwpck_require__(78043); +class BrowserCrypto { + constructor() { + if (typeof window === 'undefined' || + window.crypto === undefined || + window.crypto.subtle === undefined) { + throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); + } + } + async sha256DigestBase64(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return base64js.fromByteArray(new Uint8Array(outputBuffer)); + } + randomBytesBase64(count) { + const array = new Uint8Array(count); + window.crypto.getRandomValues(array); + return base64js.fromByteArray(array); + } + static padBase64(base64) { + // base64js requires padding, so let's add some '=' + while (base64.length % 4 !== 0) { + base64 += '='; + } + return base64; + } + async verify(pubkey, data, signature) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + const dataArray = new TextEncoder().encode(data); + const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); + const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); + // SubtleCrypto's verify method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); + return result; + } + async sign(privateKey, data) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + const dataArray = new TextEncoder().encode(data); + const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); + // SubtleCrypto's sign method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); + return base64js.fromByteArray(new Uint8Array(result)); + } + decodeBase64StringUtf8(base64) { + const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); + const result = new TextDecoder().decode(uint8array); + return result; + } + encodeBase64StringUtf8(text) { + const uint8array = new TextEncoder().encode(text); + const result = base64js.fromByteArray(uint8array); + return result; + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + // Convert key, if provided in ArrayBuffer format, to string. + const rawKey = typeof key === 'string' + ? key + : String.fromCharCode(...new Uint16Array(key)); + const enc = new TextEncoder(); + const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { + name: 'HMAC', + hash: { + name: 'SHA-256', + }, + }, false, ['sign']); + return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + } +} +exports.BrowserCrypto = BrowserCrypto; /***/ }), -/***/ 88508: -/***/ ((__unused_webpack_module, exports) => { +/***/ 78043: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCrypto = createCrypto; +exports.hasBrowserCrypto = hasBrowserCrypto; +exports.fromArrayBufferToHex = fromArrayBufferToHex; +const crypto_1 = __nccwpck_require__(14693); +const crypto_2 = __nccwpck_require__(30757); +function createCrypto() { + if (hasBrowserCrypto()) { + return new crypto_1.BrowserCrypto(); + } + return new crypto_2.NodeCrypto(); +} +function hasBrowserCrypto() { + return (typeof window !== 'undefined' && + typeof window.crypto !== 'undefined' && + typeof window.crypto.subtle !== 'undefined'); +} +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +function fromArrayBufferToHex(arrayBuffer) { + // Convert buffer to byte array. + const byteArray = Array.from(new Uint8Array(arrayBuffer)); + // Convert bytes to hex string. + return byteArray + .map(byte => { + return byte.toString(16).padStart(2, '0'); + }) + .join(''); +} /***/ }), -/***/ 8947: -/***/ ((__unused_webpack_module, exports) => { +/***/ 30757: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveChecksumRuntimeConfig = exports.getChecksumConfiguration = exports.AlgorithmId = void 0; -var AlgorithmId; -(function (AlgorithmId) { - AlgorithmId["MD5"] = "md5"; - AlgorithmId["CRC32"] = "crc32"; - AlgorithmId["CRC32C"] = "crc32c"; - AlgorithmId["SHA1"] = "sha1"; - AlgorithmId["SHA256"] = "sha256"; -})(AlgorithmId = exports.AlgorithmId || (exports.AlgorithmId = {})); -const getChecksumConfiguration = (runtimeConfig) => { - const checksumAlgorithms = []; - if (runtimeConfig.sha256 !== undefined) { - checksumAlgorithms.push({ - algorithmId: () => AlgorithmId.SHA256, - checksumConstructor: () => runtimeConfig.sha256, - }); +exports.NodeCrypto = void 0; +const crypto = __nccwpck_require__(6113); +class NodeCrypto { + async sha256DigestBase64(str) { + return crypto.createHash('sha256').update(str).digest('base64'); } - if (runtimeConfig.md5 != undefined) { - checksumAlgorithms.push({ - algorithmId: () => AlgorithmId.MD5, - checksumConstructor: () => runtimeConfig.md5, - }); + randomBytesBase64(count) { + return crypto.randomBytes(count).toString('base64'); } - return { - _checksumAlgorithms: checksumAlgorithms, - addChecksumAlgorithm(algo) { - this._checksumAlgorithms.push(algo); - }, - checksumAlgorithms() { - return this._checksumAlgorithms; - }, - }; -}; -exports.getChecksumConfiguration = getChecksumConfiguration; -const resolveChecksumRuntimeConfig = (clientConfig) => { - const runtimeConfig = {}; - clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { - runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); - }); - return runtimeConfig; -}; -exports.resolveChecksumRuntimeConfig = resolveChecksumRuntimeConfig; + async verify(pubkey, data, signature) { + const verifier = crypto.createVerify('RSA-SHA256'); + verifier.update(data); + verifier.end(); + return verifier.verify(pubkey, signature, 'base64'); + } + async sign(privateKey, data) { + const signer = crypto.createSign('RSA-SHA256'); + signer.update(data); + signer.end(); + return signer.sign(privateKey, 'base64'); + } + decodeBase64StringUtf8(base64) { + return Buffer.from(base64, 'base64').toString('utf-8'); + } + encodeBase64StringUtf8(text) { + return Buffer.from(text, 'utf-8').toString('base64'); + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + return crypto.createHash('sha256').update(str).digest('hex'); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + const cryptoKey = typeof key === 'string' ? key : toBuffer(key); + return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); + } +} +exports.NodeCrypto = NodeCrypto; +/** + * Converts a Node.js Buffer to an ArrayBuffer. + * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer + * @param buffer The Buffer input to covert. + * @return The ArrayBuffer representation of the input. + */ +function toArrayBuffer(buffer) { + return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); +} +/** + * Converts an ArrayBuffer to a Node.js Buffer. + * @param arrayBuffer The ArrayBuffer input to covert. + * @return The Buffer representation of the input. + */ +function toBuffer(arrayBuffer) { + return Buffer.from(arrayBuffer); +} /***/ }), -/***/ 89169: +/***/ 20810: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveDefaultRuntimeConfig = exports.getDefaultClientConfiguration = void 0; -const checksum_1 = __nccwpck_require__(8947); -const getDefaultClientConfiguration = (runtimeConfig) => { - return { - ...(0, checksum_1.getChecksumConfiguration)(runtimeConfig), - }; -}; -exports.getDefaultClientConfiguration = getDefaultClientConfiguration; -const resolveDefaultRuntimeConfig = (config) => { - return { - ...(0, checksum_1.resolveChecksumRuntimeConfig)(config), - }; -}; -exports.resolveDefaultRuntimeConfig = resolveDefaultRuntimeConfig; - - -/***/ }), - -/***/ 32245: +exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PassThroughClient = exports.ExecutableError = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsRequestSigner = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.ClientAuthentication = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gaxios = exports.gcpMetadata = void 0; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const googleauth_1 = __nccwpck_require__(20695); +Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); +// Export common deps to ensure types/instances are the exact match. Useful +// for consistently configuring the library across versions. +exports.gcpMetadata = __nccwpck_require__(3563); +exports.gaxios = __nccwpck_require__(59555); +var authclient_1 = __nccwpck_require__(44627); +Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } })); +var computeclient_1 = __nccwpck_require__(96875); +Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); +var envDetect_1 = __nccwpck_require__(21380); +Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); +var iam_1 = __nccwpck_require__(39735); +Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); +var idtokenclient_1 = __nccwpck_require__(80298); +Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); +var jwtaccess_1 = __nccwpck_require__(68740); +Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); +var jwtclient_1 = __nccwpck_require__(13959); +Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); +var impersonated_1 = __nccwpck_require__(91103); +Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); +var oauth2client_1 = __nccwpck_require__(3936); +Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); +Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); +Object.defineProperty(exports, "ClientAuthentication", ({ enumerable: true, get: function () { return oauth2client_1.ClientAuthentication; } })); +var loginticket_1 = __nccwpck_require__(74524); +Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); +var refreshclient_1 = __nccwpck_require__(98790); +Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); +var awsclient_1 = __nccwpck_require__(71569); +Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); +var awsrequestsigner_1 = __nccwpck_require__(1754); +Object.defineProperty(exports, "AwsRequestSigner", ({ enumerable: true, get: function () { return awsrequestsigner_1.AwsRequestSigner; } })); +var identitypoolclient_1 = __nccwpck_require__(20117); +Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); +var externalclient_1 = __nccwpck_require__(94381); +Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); +var baseexternalclient_1 = __nccwpck_require__(40810); +Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); +var downscopedclient_1 = __nccwpck_require__(6270); +Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); +var pluggable_auth_client_1 = __nccwpck_require__(44782); +Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); +Object.defineProperty(exports, "ExecutableError", ({ enumerable: true, get: function () { return pluggable_auth_client_1.ExecutableError; } })); +var passthrough_1 = __nccwpck_require__(32460); +Object.defineProperty(exports, "PassThroughClient", ({ enumerable: true, get: function () { return passthrough_1.PassThroughClient; } })); +var transporters_1 = __nccwpck_require__(72649); +Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); +const auth = new googleauth_1.GoogleAuth(); +exports.auth = auth; + + +/***/ }), + +/***/ 16608: /***/ ((__unused_webpack_module, exports) => { "use strict"; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validate = validate; +// Accepts an options object passed from the user to the API. In the +// previous version of the API, it referred to a `Request` options object. +// Now it refers to an Axiox Request Config object. This is here to help +// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function validate(options) { + const vpairs = [ + { invalid: 'uri', expected: 'url' }, + { invalid: 'json', expected: 'data' }, + { invalid: 'qs', expected: 'params' }, + ]; + for (const pair of vpairs) { + if (options[pair.invalid]) { + const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; + throw new Error(e); + } + } +} /***/ }), -/***/ 47447: +/***/ 72649: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AlgorithmId = void 0; -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(89169), exports); -tslib_1.__exportStar(__nccwpck_require__(32245), exports); -var checksum_1 = __nccwpck_require__(8947); -Object.defineProperty(exports, "AlgorithmId", ({ enumerable: true, get: function () { return checksum_1.AlgorithmId; } })); +exports.DefaultTransporter = void 0; +const gaxios_1 = __nccwpck_require__(59555); +const options_1 = __nccwpck_require__(16608); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(51402); +const PRODUCT_NAME = 'google-api-nodejs-client'; +class DefaultTransporter { + constructor() { + /** + * A configurable, replacable `Gaxios` instance. + */ + this.instance = new gaxios_1.Gaxios(); + } + /** + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. + */ + configure(opts = {}) { + opts.headers = opts.headers || {}; + if (typeof window === 'undefined') { + // set transporter user agent if not in browser + const uaValue = opts.headers['User-Agent']; + if (!uaValue) { + opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; + } + else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { + opts.headers['User-Agent'] = + `${uaValue} ${DefaultTransporter.USER_AGENT}`; + } + // track google-auth-library-nodejs version: + if (!opts.headers['x-goog-api-client']) { + const nodeVersion = process.version.replace(/^v/, ''); + opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; + } + } + return opts; + } + /** + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. + */ + request(opts) { + // ensure the user isn't passing in request-style options + opts = this.configure(opts); + (0, options_1.validate)(opts); + return this.instance.request(opts).catch(e => { + throw this.processError(e); + }); + } + get defaults() { + return this.instance.defaults; + } + set defaults(opts) { + this.instance.defaults = opts; + } + /** + * Changes the error to include details from the body. + */ + processError(e) { + const res = e.response; + const err = e; + const body = res ? res.data : null; + if (res && body && body.error && res.status !== 200) { + if (typeof body.error === 'string') { + err.message = body.error; + err.status = res.status; + } + else if (Array.isArray(body.error.errors)) { + err.message = body.error.errors + .map((err2) => err2.message) + .join('\n'); + err.code = body.error.code; + err.errors = body.error.errors; + } + else { + err.message = body.error.message; + err.code = body.error.code; + } + } + else if (res && res.status >= 400) { + // Consider all 4xx and 5xx responses errors. + err.message = body; + err.status = res.status; + } + return err; + } +} +exports.DefaultTransporter = DefaultTransporter; +/** + * Default user agent. + */ +DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; /***/ }), -/***/ 18883: -/***/ ((__unused_webpack_module, exports) => { +/***/ 68905: +/***/ (function(__unused_webpack_module, exports) { "use strict"; +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.FieldPosition = void 0; -var FieldPosition; -(function (FieldPosition) { - FieldPosition[FieldPosition["HEADER"] = 0] = "HEADER"; - FieldPosition[FieldPosition["TRAILER"] = 1] = "TRAILER"; -})(FieldPosition = exports.FieldPosition || (exports.FieldPosition = {})); +exports.LRUCache = void 0; +exports.snakeToCamel = snakeToCamel; +exports.originalOrCamelOptions = originalOrCamelOptions; +/** + * Returns the camel case of a provided string. + * + * @remarks + * + * Match any `_` and not `_` pair, then return the uppercase of the not `_` + * character. + * + * @internal + * + * @param str the string to convert + * @returns the camelCase'd string + */ +function snakeToCamel(str) { + return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); +} +/** + * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference + * for original, non-camelCase key. + * + * @param obj object to lookup a value in + * @returns a `get` function for getting `obj[key || snakeKey]`, if available + */ +function originalOrCamelOptions(obj) { + /** + * + * @param key an index of object, preferably snake_case + * @returns the value `obj[key || snakeKey]`, if available + */ + function get(key) { + var _a; + const o = (obj || {}); + return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; + } + return { get }; +} +/** + * A simple LRU cache utility. + * Not meant for external usage. + * + * @experimental + * @internal + */ +class LRUCache { + constructor(options) { + _LRUCache_instances.add(this); + /** + * Maps are in order. Thus, the older item is the first item. + * + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} + */ + _LRUCache_cache.set(this, new Map()); + this.capacity = options.capacity; + this.maxAge = options.maxAge; + } + /** + * Add an item to the cache. + * + * @param key the key to upsert + * @param value the value of the key + */ + set(key, value) { + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + } + /** + * Get an item from the cache. + * + * @param key the key to retrieve + */ + get(key) { + const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); + if (!item) + return; + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + return item.value; + } +} +exports.LRUCache = LRUCache; +_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); + __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { + value, + lastAccessed: Date.now(), + }); +}, _LRUCache_evict = function _LRUCache_evict() { + const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; + /** + * Because we know Maps are in order, this item is both the + * last item in the list (capacity) and oldest (maxAge). + */ + let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + while (!oldestItem.done && + (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many + oldestItem.value[1].lastAccessed < cutoffDate) // too old + ) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); + oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + } +}; /***/ }), -/***/ 12842: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); +/***/ 83555: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/* module decorator */ module = __nccwpck_require__.nmd(module); +(e=>{"function"==typeof define&&define.amd?define(["protobufjs/minimal"],e): true&&module&&module.exports&&(module.exports=e(__nccwpck_require__(96916)))})(function(o){var e,t,n,r,F,a=o.Reader,i=o.Writer,p=o.util,l=o.roots.iam_protos||(o.roots.iam_protos={});function B(e,t,n){o.rpc.Service.call(this,e,t,n)}function s(e){if(e)for(var t=Object.keys(e),n=0;n>>3){case 1:o.resource=e.string();break;case 2:o.policy=l.google.iam.v1.Policy.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},s.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},s.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.resource&&e.hasOwnProperty("resource")&&!p.isString(e.resource))return"resource: string expected";if(null!=e.policy&&e.hasOwnProperty("policy")){e=l.google.iam.v1.Policy.verify(e.policy);if(e)return"policy."+e}return null},s.fromObject=function(e){if(e instanceof l.google.iam.v1.SetIamPolicyRequest)return e;var t=new l.google.iam.v1.SetIamPolicyRequest;if(null!=e.resource&&(t.resource=String(e.resource)),null!=e.policy){if("object"!=typeof e.policy)throw TypeError(".google.iam.v1.SetIamPolicyRequest.policy: object expected");t.policy=l.google.iam.v1.Policy.fromObject(e.policy)}return t},s.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.resource="",n.policy=null),null!=e.resource&&e.hasOwnProperty("resource")&&(n.resource=e.resource),null!=e.policy&&e.hasOwnProperty("policy")&&(n.policy=l.google.iam.v1.Policy.toObject(e.policy,t)),n},s.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},s),t.GetIamPolicyRequest=(u.prototype.resource="",u.prototype.options=null,u.create=function(e){return new u(e)},u.encode=function(e,t){return t=t||i.create(),null!=e.resource&&Object.hasOwnProperty.call(e,"resource")&&t.uint32(10).string(e.resource),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&l.google.iam.v1.GetPolicyOptions.encode(e.options,t.uint32(18).fork()).ldelim(),t},u.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},u.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.iam.v1.GetIamPolicyRequest;e.pos>>3){case 1:o.resource=e.string();break;case 2:o.options=l.google.iam.v1.GetPolicyOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},u.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},u.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.resource&&e.hasOwnProperty("resource")&&!p.isString(e.resource))return"resource: string expected";if(null!=e.options&&e.hasOwnProperty("options")){e=l.google.iam.v1.GetPolicyOptions.verify(e.options);if(e)return"options."+e}return null},u.fromObject=function(e){if(e instanceof l.google.iam.v1.GetIamPolicyRequest)return e;var t=new l.google.iam.v1.GetIamPolicyRequest;if(null!=e.resource&&(t.resource=String(e.resource)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.iam.v1.GetIamPolicyRequest.options: object expected");t.options=l.google.iam.v1.GetPolicyOptions.fromObject(e.options)}return t},u.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.resource="",n.options=null),null!=e.resource&&e.hasOwnProperty("resource")&&(n.resource=e.resource),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.iam.v1.GetPolicyOptions.toObject(e.options,t)),n},u.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},u),t.TestIamPermissionsRequest=(c.prototype.resource="",c.prototype.permissions=p.emptyArray,c.create=function(e){return new c(e)},c.encode=function(e,t){if(t=t||i.create(),null!=e.resource&&Object.hasOwnProperty.call(e,"resource")&&t.uint32(10).string(e.resource),null!=e.permissions&&e.permissions.length)for(var n=0;n>>3){case 1:o.resource=e.string();break;case 2:o.permissions&&o.permissions.length||(o.permissions=[]),o.permissions.push(e.string());break;default:e.skipType(7&r)}}return o},c.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},c.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.resource&&e.hasOwnProperty("resource")&&!p.isString(e.resource))return"resource: string expected";if(null!=e.permissions&&e.hasOwnProperty("permissions")){if(!Array.isArray(e.permissions))return"permissions: array expected";for(var t=0;t>>3==1?(o.permissions&&o.permissions.length||(o.permissions=[]),o.permissions.push(e.string())):e.skipType(7&r)}return o},G.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},G.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.permissions&&e.hasOwnProperty("permissions")){if(!Array.isArray(e.permissions))return"permissions: array expected";for(var t=0;t>>3==1?o.requestedPolicyVersion=e.int32():e.skipType(7&r)}return o},U.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},U.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.requestedPolicyVersion&&e.hasOwnProperty("requestedPolicyVersion")&&!p.isInteger(e.requestedPolicyVersion)?"requestedPolicyVersion: integer expected":null},U.fromObject=function(e){var t;return e instanceof l.google.iam.v1.GetPolicyOptions?e:(t=new l.google.iam.v1.GetPolicyOptions,null!=e.requestedPolicyVersion&&(t.requestedPolicyVersion=0|e.requestedPolicyVersion),t)},U.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.requestedPolicyVersion=0),null!=e.requestedPolicyVersion&&e.hasOwnProperty("requestedPolicyVersion")&&(n.requestedPolicyVersion=e.requestedPolicyVersion),n},U.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},U),t.Policy=(d.prototype.version=0,d.prototype.bindings=p.emptyArray,d.prototype.etag=p.newBuffer([]),d.create=function(e){return new d(e)},d.encode=function(e,t){if(t=t||i.create(),null!=e.version&&Object.hasOwnProperty.call(e,"version")&&t.uint32(8).int32(e.version),null!=e.etag&&Object.hasOwnProperty.call(e,"etag")&&t.uint32(26).bytes(e.etag),null!=e.bindings&&e.bindings.length)for(var n=0;n>>3){case 1:o.version=e.int32();break;case 4:o.bindings&&o.bindings.length||(o.bindings=[]),o.bindings.push(l.google.iam.v1.Binding.decode(e,e.uint32()));break;case 3:o.etag=e.bytes();break;default:e.skipType(7&r)}}return o},d.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},d.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.version&&e.hasOwnProperty("version")&&!p.isInteger(e.version))return"version: integer expected";if(null!=e.bindings&&e.hasOwnProperty("bindings")){if(!Array.isArray(e.bindings))return"bindings: array expected";for(var t=0;t>>3){case 1:o.role=e.string();break;case 2:o.members&&o.members.length||(o.members=[]),o.members.push(e.string());break;case 3:o.condition=l.google.type.Expr.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},g.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},g.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.role&&e.hasOwnProperty("role")&&!p.isString(e.role))return"role: string expected";if(null!=e.members&&e.hasOwnProperty("members")){if(!Array.isArray(e.members))return"members: array expected";for(var t=0;t>>3){case 1:o.bindingDeltas&&o.bindingDeltas.length||(o.bindingDeltas=[]),o.bindingDeltas.push(l.google.iam.v1.BindingDelta.decode(e,e.uint32()));break;case 2:o.auditConfigDeltas&&o.auditConfigDeltas.length||(o.auditConfigDeltas=[]),o.auditConfigDeltas.push(l.google.iam.v1.AuditConfigDelta.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},M.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},M.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.bindingDeltas&&e.hasOwnProperty("bindingDeltas")){if(!Array.isArray(e.bindingDeltas))return"bindingDeltas: array expected";for(var t=0;t>>3){case 1:o.action=e.int32();break;case 2:o.role=e.string();break;case 3:o.member=e.string();break;case 4:o.condition=l.google.type.Expr.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},f.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},f.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.action&&e.hasOwnProperty("action"))switch(e.action){default:return"action: enum value expected";case 0:case 1:case 2:}if(null!=e.role&&e.hasOwnProperty("role")&&!p.isString(e.role))return"role: string expected";if(null!=e.member&&e.hasOwnProperty("member")&&!p.isString(e.member))return"member: string expected";if(null!=e.condition&&e.hasOwnProperty("condition")){e=l.google.type.Expr.verify(e.condition);if(e)return"condition."+e}return null},f.fromObject=function(e){if(e instanceof l.google.iam.v1.BindingDelta)return e;var t=new l.google.iam.v1.BindingDelta;switch(e.action){case"ACTION_UNSPECIFIED":case 0:t.action=0;break;case"ADD":case 1:t.action=1;break;case"REMOVE":case 2:t.action=2}if(null!=e.role&&(t.role=String(e.role)),null!=e.member&&(t.member=String(e.member)),null!=e.condition){if("object"!=typeof e.condition)throw TypeError(".google.iam.v1.BindingDelta.condition: object expected");t.condition=l.google.type.Expr.fromObject(e.condition)}return t},f.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.action=t.enums===String?"ACTION_UNSPECIFIED":0,n.role="",n.member="",n.condition=null),null!=e.action&&e.hasOwnProperty("action")&&(n.action=t.enums===String?l.google.iam.v1.BindingDelta.Action[e.action]:e.action),null!=e.role&&e.hasOwnProperty("role")&&(n.role=e.role),null!=e.member&&e.hasOwnProperty("member")&&(n.member=e.member),null!=e.condition&&e.hasOwnProperty("condition")&&(n.condition=l.google.type.Expr.toObject(e.condition,t)),n},f.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},f.Action=(e={},(r=Object.create(e))[e[0]="ACTION_UNSPECIFIED"]=0,r[e[1]="ADD"]=1,r[e[2]="REMOVE"]=2,r),f),t.AuditConfigDelta=(y.prototype.action=0,y.prototype.service="",y.prototype.exemptedMember="",y.prototype.logType="",y.create=function(e){return new y(e)},y.encode=function(e,t){return t=t||i.create(),null!=e.action&&Object.hasOwnProperty.call(e,"action")&&t.uint32(8).int32(e.action),null!=e.service&&Object.hasOwnProperty.call(e,"service")&&t.uint32(18).string(e.service),null!=e.exemptedMember&&Object.hasOwnProperty.call(e,"exemptedMember")&&t.uint32(26).string(e.exemptedMember),null!=e.logType&&Object.hasOwnProperty.call(e,"logType")&&t.uint32(34).string(e.logType),t},y.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},y.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.iam.v1.AuditConfigDelta;e.pos>>3){case 1:o.action=e.int32();break;case 2:o.service=e.string();break;case 3:o.exemptedMember=e.string();break;case 4:o.logType=e.string();break;default:e.skipType(7&r)}}return o},y.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},y.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.action&&e.hasOwnProperty("action"))switch(e.action){default:return"action: enum value expected";case 0:case 1:case 2:}return null!=e.service&&e.hasOwnProperty("service")&&!p.isString(e.service)?"service: string expected":null!=e.exemptedMember&&e.hasOwnProperty("exemptedMember")&&!p.isString(e.exemptedMember)?"exemptedMember: string expected":null!=e.logType&&e.hasOwnProperty("logType")&&!p.isString(e.logType)?"logType: string expected":null},y.fromObject=function(e){if(e instanceof l.google.iam.v1.AuditConfigDelta)return e;var t=new l.google.iam.v1.AuditConfigDelta;switch(e.action){case"ACTION_UNSPECIFIED":case 0:t.action=0;break;case"ADD":case 1:t.action=1;break;case"REMOVE":case 2:t.action=2}return null!=e.service&&(t.service=String(e.service)),null!=e.exemptedMember&&(t.exemptedMember=String(e.exemptedMember)),null!=e.logType&&(t.logType=String(e.logType)),t},y.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.action=t.enums===String?"ACTION_UNSPECIFIED":0,n.service="",n.exemptedMember="",n.logType=""),null!=e.action&&e.hasOwnProperty("action")&&(n.action=t.enums===String?l.google.iam.v1.AuditConfigDelta.Action[e.action]:e.action),null!=e.service&&e.hasOwnProperty("service")&&(n.service=e.service),null!=e.exemptedMember&&e.hasOwnProperty("exemptedMember")&&(n.exemptedMember=e.exemptedMember),null!=e.logType&&e.hasOwnProperty("logType")&&(n.logType=e.logType),n},y.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},y.Action=(e={},(r=Object.create(e))[e[0]="ACTION_UNSPECIFIED"]=0,r[e[1]="ADD"]=1,r[e[2]="REMOVE"]=2,r),y),t.logging=((e={}).AuditData=(L.prototype.policyDelta=null,L.create=function(e){return new L(e)},L.encode=function(e,t){return t=t||i.create(),null!=e.policyDelta&&Object.hasOwnProperty.call(e,"policyDelta")&&l.google.iam.v1.PolicyDelta.encode(e.policyDelta,t.uint32(18).fork()).ldelim(),t},L.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},L.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.iam.v1.logging.AuditData;e.pos>>3==2?o.policyDelta=l.google.iam.v1.PolicyDelta.decode(e,e.uint32()):e.skipType(7&r)}return o},L.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},L.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.policyDelta&&e.hasOwnProperty("policyDelta")){e=l.google.iam.v1.PolicyDelta.verify(e.policyDelta);if(e)return"policyDelta."+e}return null},L.fromObject=function(e){if(e instanceof l.google.iam.v1.logging.AuditData)return e;var t=new l.google.iam.v1.logging.AuditData;if(null!=e.policyDelta){if("object"!=typeof e.policyDelta)throw TypeError(".google.iam.v1.logging.AuditData.policyDelta: object expected");t.policyDelta=l.google.iam.v1.PolicyDelta.fromObject(e.policyDelta)}return t},L.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.policyDelta=null),null!=e.policyDelta&&e.hasOwnProperty("policyDelta")&&(n.policyDelta=l.google.iam.v1.PolicyDelta.toObject(e.policyDelta,t)),n},L.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},L),e),t),n),F.api=((r={}).Http=(J.prototype.rules=p.emptyArray,J.prototype.fullyDecodeReservedExpansion=!1,J.create=function(e){return new J(e)},J.encode=function(e,t){if(t=t||i.create(),null!=e.rules&&e.rules.length)for(var n=0;n>>3){case 1:o.rules&&o.rules.length||(o.rules=[]),o.rules.push(l.google.api.HttpRule.decode(e,e.uint32()));break;case 2:o.fullyDecodeReservedExpansion=e.bool();break;default:e.skipType(7&r)}}return o},J.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},J.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.rules&&e.hasOwnProperty("rules")){if(!Array.isArray(e.rules))return"rules: array expected";for(var t=0;t>>3){case 1:o.selector=e.string();break;case 2:o.get=e.string();break;case 3:o.put=e.string();break;case 4:o.post=e.string();break;case 5:o.delete=e.string();break;case 6:o.patch=e.string();break;case 8:o.custom=l.google.api.CustomHttpPattern.decode(e,e.uint32());break;case 7:o.body=e.string();break;case 12:o.responseBody=e.string();break;case 11:o.additionalBindings&&o.additionalBindings.length||(o.additionalBindings=[]),o.additionalBindings.push(l.google.api.HttpRule.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},h.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},h.verify=function(e){if("object"!=typeof e||null===e)return"object expected";var t={};if(null!=e.selector&&e.hasOwnProperty("selector")&&!p.isString(e.selector))return"selector: string expected";if(null!=e.get&&e.hasOwnProperty("get")&&(t.pattern=1,!p.isString(e.get)))return"get: string expected";if(null!=e.put&&e.hasOwnProperty("put")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!p.isString(e.put))return"put: string expected"}if(null!=e.post&&e.hasOwnProperty("post")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!p.isString(e.post))return"post: string expected"}if(null!=e.delete&&e.hasOwnProperty("delete")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!p.isString(e.delete))return"delete: string expected"}if(null!=e.patch&&e.hasOwnProperty("patch")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!p.isString(e.patch))return"patch: string expected"}if(null!=e.custom&&e.hasOwnProperty("custom")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,n=l.google.api.CustomHttpPattern.verify(e.custom))return"custom."+n}if(null!=e.body&&e.hasOwnProperty("body")&&!p.isString(e.body))return"body: string expected";if(null!=e.responseBody&&e.hasOwnProperty("responseBody")&&!p.isString(e.responseBody))return"responseBody: string expected";if(null!=e.additionalBindings&&e.hasOwnProperty("additionalBindings")){if(!Array.isArray(e.additionalBindings))return"additionalBindings: array expected";for(var n,o=0;o>>3){case 1:o.kind=e.string();break;case 2:o.path=e.string();break;default:e.skipType(7&r)}}return o},_.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},_.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.kind&&e.hasOwnProperty("kind")&&!p.isString(e.kind)?"kind: string expected":null!=e.path&&e.hasOwnProperty("path")&&!p.isString(e.path)?"path: string expected":null},_.fromObject=function(e){var t;return e instanceof l.google.api.CustomHttpPattern?e:(t=new l.google.api.CustomHttpPattern,null!=e.kind&&(t.kind=String(e.kind)),null!=e.path&&(t.path=String(e.path)),t)},_.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.kind="",n.path=""),null!=e.kind&&e.hasOwnProperty("kind")&&(n.kind=e.kind),null!=e.path&&e.hasOwnProperty("path")&&(n.path=e.path),n},_.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},_),r.FieldBehavior=(e={},(t=Object.create(e))[e[0]="FIELD_BEHAVIOR_UNSPECIFIED"]=0,t[e[1]="OPTIONAL"]=1,t[e[2]="REQUIRED"]=2,t[e[3]="OUTPUT_ONLY"]=3,t[e[4]="INPUT_ONLY"]=4,t[e[5]="IMMUTABLE"]=5,t),r.ResourceDescriptor=(b.prototype.type="",b.prototype.pattern=p.emptyArray,b.prototype.nameField="",b.prototype.history=0,b.prototype.plural="",b.prototype.singular="",b.create=function(e){return new b(e)},b.encode=function(e,t){if(t=t||i.create(),null!=e.type&&Object.hasOwnProperty.call(e,"type")&&t.uint32(10).string(e.type),null!=e.pattern&&e.pattern.length)for(var n=0;n>>3){case 1:o.type=e.string();break;case 2:o.pattern&&o.pattern.length||(o.pattern=[]),o.pattern.push(e.string());break;case 3:o.nameField=e.string();break;case 4:o.history=e.int32();break;case 5:o.plural=e.string();break;case 6:o.singular=e.string();break;default:e.skipType(7&r)}}return o},b.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},b.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.type&&e.hasOwnProperty("type")&&!p.isString(e.type))return"type: string expected";if(null!=e.pattern&&e.hasOwnProperty("pattern")){if(!Array.isArray(e.pattern))return"pattern: array expected";for(var t=0;t>>3){case 1:o.type=e.string();break;case 2:o.childType=e.string();break;default:e.skipType(7&r)}}return o},H.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},H.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.type&&e.hasOwnProperty("type")&&!p.isString(e.type)?"type: string expected":null!=e.childType&&e.hasOwnProperty("childType")&&!p.isString(e.childType)?"childType: string expected":null},H.fromObject=function(e){var t;return e instanceof l.google.api.ResourceReference?e:(t=new l.google.api.ResourceReference,null!=e.type&&(t.type=String(e.type)),null!=e.childType&&(t.childType=String(e.childType)),t)},H.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.type="",n.childType=""),null!=e.type&&e.hasOwnProperty("type")&&(n.type=e.type),null!=e.childType&&e.hasOwnProperty("childType")&&(n.childType=e.childType),n},H.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},H),r),F.protobuf=((n={}).FileDescriptorSet=(q.prototype.file=p.emptyArray,q.create=function(e){return new q(e)},q.encode=function(e,t){if(t=t||i.create(),null!=e.file&&e.file.length)for(var n=0;n>>3==1?(o.file&&o.file.length||(o.file=[]),o.file.push(l.google.protobuf.FileDescriptorProto.decode(e,e.uint32()))):e.skipType(7&r)}return o},q.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},q.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.file&&e.hasOwnProperty("file")){if(!Array.isArray(e.file))return"file: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.package=e.string();break;case 3:o.dependency&&o.dependency.length||(o.dependency=[]),o.dependency.push(e.string());break;case 10:if(o.publicDependency&&o.publicDependency.length||(o.publicDependency=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.name=e.string();break;case 2:o.field&&o.field.length||(o.field=[]),o.field.push(l.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 6:o.extension&&o.extension.length||(o.extension=[]),o.extension.push(l.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 3:o.nestedType&&o.nestedType.length||(o.nestedType=[]),o.nestedType.push(l.google.protobuf.DescriptorProto.decode(e,e.uint32()));break;case 4:o.enumType&&o.enumType.length||(o.enumType=[]),o.enumType.push(l.google.protobuf.EnumDescriptorProto.decode(e,e.uint32()));break;case 5:o.extensionRange&&o.extensionRange.length||(o.extensionRange=[]),o.extensionRange.push(l.google.protobuf.DescriptorProto.ExtensionRange.decode(e,e.uint32()));break;case 8:o.oneofDecl&&o.oneofDecl.length||(o.oneofDecl=[]),o.oneofDecl.push(l.google.protobuf.OneofDescriptorProto.decode(e,e.uint32()));break;case 7:o.options=l.google.protobuf.MessageOptions.decode(e,e.uint32());break;case 9:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(l.google.protobuf.DescriptorProto.ReservedRange.decode(e,e.uint32()));break;case 10:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},O.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},O.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.field&&e.hasOwnProperty("field")){if(!Array.isArray(e.field))return"field: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;case 3:o.options=l.google.protobuf.ExtensionRangeOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},v.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},v.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.start&&e.hasOwnProperty("start")&&!p.isInteger(e.start))return"start: integer expected";if(null!=e.end&&e.hasOwnProperty("end")&&!p.isInteger(e.end))return"end: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=l.google.protobuf.ExtensionRangeOptions.verify(e.options);if(e)return"options."+e}return null},v.fromObject=function(e){if(e instanceof l.google.protobuf.DescriptorProto.ExtensionRange)return e;var t=new l.google.protobuf.DescriptorProto.ExtensionRange;if(null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected");t.options=l.google.protobuf.ExtensionRangeOptions.fromObject(e.options)}return t},v.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0,n.options=null),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.protobuf.ExtensionRangeOptions.toObject(e.options,t)),n},v.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},v),O.ReservedRange=(Y.prototype.start=0,Y.prototype.end=0,Y.create=function(e){return new Y(e)},Y.encode=function(e,t){return t=t||i.create(),null!=e.start&&Object.hasOwnProperty.call(e,"start")&&t.uint32(8).int32(e.start),null!=e.end&&Object.hasOwnProperty.call(e,"end")&&t.uint32(16).int32(e.end),t},Y.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},Y.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.protobuf.DescriptorProto.ReservedRange;e.pos>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},Y.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},Y.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!p.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!p.isInteger(e.end)?"end: integer expected":null},Y.fromObject=function(e){var t;return e instanceof l.google.protobuf.DescriptorProto.ReservedRange?e:(t=new l.google.protobuf.DescriptorProto.ReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},Y.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},Y.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},Y),O),n.ExtensionRangeOptions=(z.prototype.uninterpretedOption=p.emptyArray,z.create=function(e){return new z(e)},z.encode=function(e,t){if(t=t||i.create(),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},z.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},z.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 3:o.number=e.int32();break;case 4:o.label=e.int32();break;case 5:o.type=e.int32();break;case 6:o.typeName=e.string();break;case 2:o.extendee=e.string();break;case 7:o.defaultValue=e.string();break;case 9:o.oneofIndex=e.int32();break;case 10:o.jsonName=e.string();break;case 8:o.options=l.google.protobuf.FieldOptions.decode(e,e.uint32());break;case 17:o.proto3Optional=e.bool();break;default:e.skipType(7&r)}}return o},P.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},P.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!p.isInteger(e.number))return"number: integer expected";if(null!=e.label&&e.hasOwnProperty("label"))switch(e.label){default:return"label: enum value expected";case 1:case 2:case 3:}if(null!=e.type&&e.hasOwnProperty("type"))switch(e.type){default:return"type: enum value expected";case 1:case 2:case 3:case 4:case 5:case 6:case 7:case 8:case 9:case 10:case 11:case 12:case 13:case 14:case 15:case 16:case 17:case 18:}if(null!=e.typeName&&e.hasOwnProperty("typeName")&&!p.isString(e.typeName))return"typeName: string expected";if(null!=e.extendee&&e.hasOwnProperty("extendee")&&!p.isString(e.extendee))return"extendee: string expected";if(null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&!p.isString(e.defaultValue))return"defaultValue: string expected";if(null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&!p.isInteger(e.oneofIndex))return"oneofIndex: integer expected";if(null!=e.jsonName&&e.hasOwnProperty("jsonName")&&!p.isString(e.jsonName))return"jsonName: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=l.google.protobuf.FieldOptions.verify(e.options);if(t)return"options."+t}return null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&"boolean"!=typeof e.proto3Optional?"proto3Optional: boolean expected":null},P.fromObject=function(e){if(e instanceof l.google.protobuf.FieldDescriptorProto)return e;var t=new l.google.protobuf.FieldDescriptorProto;switch(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),e.label){case"LABEL_OPTIONAL":case 1:t.label=1;break;case"LABEL_REQUIRED":case 2:t.label=2;break;case"LABEL_REPEATED":case 3:t.label=3}switch(e.type){case"TYPE_DOUBLE":case 1:t.type=1;break;case"TYPE_FLOAT":case 2:t.type=2;break;case"TYPE_INT64":case 3:t.type=3;break;case"TYPE_UINT64":case 4:t.type=4;break;case"TYPE_INT32":case 5:t.type=5;break;case"TYPE_FIXED64":case 6:t.type=6;break;case"TYPE_FIXED32":case 7:t.type=7;break;case"TYPE_BOOL":case 8:t.type=8;break;case"TYPE_STRING":case 9:t.type=9;break;case"TYPE_GROUP":case 10:t.type=10;break;case"TYPE_MESSAGE":case 11:t.type=11;break;case"TYPE_BYTES":case 12:t.type=12;break;case"TYPE_UINT32":case 13:t.type=13;break;case"TYPE_ENUM":case 14:t.type=14;break;case"TYPE_SFIXED32":case 15:t.type=15;break;case"TYPE_SFIXED64":case 16:t.type=16;break;case"TYPE_SINT32":case 17:t.type=17;break;case"TYPE_SINT64":case 18:t.type=18}if(null!=e.typeName&&(t.typeName=String(e.typeName)),null!=e.extendee&&(t.extendee=String(e.extendee)),null!=e.defaultValue&&(t.defaultValue=String(e.defaultValue)),null!=e.oneofIndex&&(t.oneofIndex=0|e.oneofIndex),null!=e.jsonName&&(t.jsonName=String(e.jsonName)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected");t.options=l.google.protobuf.FieldOptions.fromObject(e.options)}return null!=e.proto3Optional&&(t.proto3Optional=Boolean(e.proto3Optional)),t},P.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.extendee="",n.number=0,n.label=t.enums===String?"LABEL_OPTIONAL":1,n.type=t.enums===String?"TYPE_DOUBLE":1,n.typeName="",n.defaultValue="",n.options=null,n.oneofIndex=0,n.jsonName="",n.proto3Optional=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.extendee&&e.hasOwnProperty("extendee")&&(n.extendee=e.extendee),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.label&&e.hasOwnProperty("label")&&(n.label=t.enums===String?l.google.protobuf.FieldDescriptorProto.Label[e.label]:e.label),null!=e.type&&e.hasOwnProperty("type")&&(n.type=t.enums===String?l.google.protobuf.FieldDescriptorProto.Type[e.type]:e.type),null!=e.typeName&&e.hasOwnProperty("typeName")&&(n.typeName=e.typeName),null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&(n.defaultValue=e.defaultValue),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.protobuf.FieldOptions.toObject(e.options,t)),null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&(n.oneofIndex=e.oneofIndex),null!=e.jsonName&&e.hasOwnProperty("jsonName")&&(n.jsonName=e.jsonName),null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&(n.proto3Optional=e.proto3Optional),n},P.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},P.Type=(e={},(t=Object.create(e))[e[1]="TYPE_DOUBLE"]=1,t[e[2]="TYPE_FLOAT"]=2,t[e[3]="TYPE_INT64"]=3,t[e[4]="TYPE_UINT64"]=4,t[e[5]="TYPE_INT32"]=5,t[e[6]="TYPE_FIXED64"]=6,t[e[7]="TYPE_FIXED32"]=7,t[e[8]="TYPE_BOOL"]=8,t[e[9]="TYPE_STRING"]=9,t[e[10]="TYPE_GROUP"]=10,t[e[11]="TYPE_MESSAGE"]=11,t[e[12]="TYPE_BYTES"]=12,t[e[13]="TYPE_UINT32"]=13,t[e[14]="TYPE_ENUM"]=14,t[e[15]="TYPE_SFIXED32"]=15,t[e[16]="TYPE_SFIXED64"]=16,t[e[17]="TYPE_SINT32"]=17,t[e[18]="TYPE_SINT64"]=18,t),P.Label=(e={},(t=Object.create(e))[e[1]="LABEL_OPTIONAL"]=1,t[e[2]="LABEL_REQUIRED"]=2,t[e[3]="LABEL_REPEATED"]=3,t),P),n.OneofDescriptorProto=(W.prototype.name="",W.prototype.options=null,W.create=function(e){return new W(e)},W.encode=function(e,t){return t=t||i.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&l.google.protobuf.OneofOptions.encode(e.options,t.uint32(18).fork()).ldelim(),t},W.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},W.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.protobuf.OneofDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.options=l.google.protobuf.OneofOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},W.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},W.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.options&&e.hasOwnProperty("options")){e=l.google.protobuf.OneofOptions.verify(e.options);if(e)return"options."+e}return null},W.fromObject=function(e){if(e instanceof l.google.protobuf.OneofDescriptorProto)return e;var t=new l.google.protobuf.OneofDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected");t.options=l.google.protobuf.OneofOptions.fromObject(e.options)}return t},W.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.protobuf.OneofOptions.toObject(e.options,t)),n},W.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},W),n.EnumDescriptorProto=(w.prototype.name="",w.prototype.value=p.emptyArray,w.prototype.options=null,w.prototype.reservedRange=p.emptyArray,w.prototype.reservedName=p.emptyArray,w.create=function(e){return new w(e)},w.encode=function(e,t){if(t=t||i.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.value&&e.value.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.value&&o.value.length||(o.value=[]),o.value.push(l.google.protobuf.EnumValueDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=l.google.protobuf.EnumOptions.decode(e,e.uint32());break;case 4:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(l.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(e,e.uint32()));break;case 5:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},w.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},w.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.value&&e.hasOwnProperty("value")){if(!Array.isArray(e.value))return"value: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},X.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},X.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!p.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!p.isInteger(e.end)?"end: integer expected":null},X.fromObject=function(e){var t;return e instanceof l.google.protobuf.EnumDescriptorProto.EnumReservedRange?e:(t=new l.google.protobuf.EnumDescriptorProto.EnumReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},X.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},X.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},X),w),n.EnumValueDescriptorProto=(j.prototype.name="",j.prototype.number=0,j.prototype.options=null,j.create=function(e){return new j(e)},j.encode=function(e,t){return t=t||i.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.number&&Object.hasOwnProperty.call(e,"number")&&t.uint32(16).int32(e.number),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&l.google.protobuf.EnumValueOptions.encode(e.options,t.uint32(26).fork()).ldelim(),t},j.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},j.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.protobuf.EnumValueDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.number=e.int32();break;case 3:o.options=l.google.protobuf.EnumValueOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},j.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},j.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!p.isInteger(e.number))return"number: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=l.google.protobuf.EnumValueOptions.verify(e.options);if(e)return"options."+e}return null},j.fromObject=function(e){if(e instanceof l.google.protobuf.EnumValueDescriptorProto)return e;var t=new l.google.protobuf.EnumValueDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected");t.options=l.google.protobuf.EnumValueOptions.fromObject(e.options)}return t},j.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.number=0,n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.protobuf.EnumValueOptions.toObject(e.options,t)),n},j.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},j),n.ServiceDescriptorProto=(D.prototype.name="",D.prototype.method=p.emptyArray,D.prototype.options=null,D.create=function(e){return new D(e)},D.encode=function(e,t){if(t=t||i.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.method&&e.method.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.method&&o.method.length||(o.method=[]),o.method.push(l.google.protobuf.MethodDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=l.google.protobuf.ServiceOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},D.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},D.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.method&&e.hasOwnProperty("method")){if(!Array.isArray(e.method))return"method: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.inputType=e.string();break;case 3:o.outputType=e.string();break;case 4:o.options=l.google.protobuf.MethodOptions.decode(e,e.uint32());break;case 5:o.clientStreaming=e.bool();break;case 6:o.serverStreaming=e.bool();break;default:e.skipType(7&r)}}return o},x.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},x.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.inputType&&e.hasOwnProperty("inputType")&&!p.isString(e.inputType))return"inputType: string expected";if(null!=e.outputType&&e.hasOwnProperty("outputType")&&!p.isString(e.outputType))return"outputType: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=l.google.protobuf.MethodOptions.verify(e.options);if(t)return"options."+t}return null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&"boolean"!=typeof e.clientStreaming?"clientStreaming: boolean expected":null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&"boolean"!=typeof e.serverStreaming?"serverStreaming: boolean expected":null},x.fromObject=function(e){if(e instanceof l.google.protobuf.MethodDescriptorProto)return e;var t=new l.google.protobuf.MethodDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.inputType&&(t.inputType=String(e.inputType)),null!=e.outputType&&(t.outputType=String(e.outputType)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected");t.options=l.google.protobuf.MethodOptions.fromObject(e.options)}return null!=e.clientStreaming&&(t.clientStreaming=Boolean(e.clientStreaming)),null!=e.serverStreaming&&(t.serverStreaming=Boolean(e.serverStreaming)),t},x.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.inputType="",n.outputType="",n.options=null,n.clientStreaming=!1,n.serverStreaming=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.inputType&&e.hasOwnProperty("inputType")&&(n.inputType=e.inputType),null!=e.outputType&&e.hasOwnProperty("outputType")&&(n.outputType=e.outputType),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.protobuf.MethodOptions.toObject(e.options,t)),null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&(n.clientStreaming=e.clientStreaming),null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&(n.serverStreaming=e.serverStreaming),n},x.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},x),n.FileOptions=(S.prototype.javaPackage="",S.prototype.javaOuterClassname="",S.prototype.javaMultipleFiles=!1,S.prototype.javaGenerateEqualsAndHash=!1,S.prototype.javaStringCheckUtf8=!1,S.prototype.optimizeFor=1,S.prototype.goPackage="",S.prototype.ccGenericServices=!1,S.prototype.javaGenericServices=!1,S.prototype.pyGenericServices=!1,S.prototype.phpGenericServices=!1,S.prototype.deprecated=!1,S.prototype.ccEnableArenas=!0,S.prototype.objcClassPrefix="",S.prototype.csharpNamespace="",S.prototype.swiftPrefix="",S.prototype.phpClassPrefix="",S.prototype.phpNamespace="",S.prototype.phpMetadataNamespace="",S.prototype.rubyPackage="",S.prototype.uninterpretedOption=p.emptyArray,S.prototype[".google.api.resourceDefinition"]=p.emptyArray,S.create=function(e){return new S(e)},S.encode=function(e,t){if(t=t||i.create(),null!=e.javaPackage&&Object.hasOwnProperty.call(e,"javaPackage")&&t.uint32(10).string(e.javaPackage),null!=e.javaOuterClassname&&Object.hasOwnProperty.call(e,"javaOuterClassname")&&t.uint32(66).string(e.javaOuterClassname),null!=e.optimizeFor&&Object.hasOwnProperty.call(e,"optimizeFor")&&t.uint32(72).int32(e.optimizeFor),null!=e.javaMultipleFiles&&Object.hasOwnProperty.call(e,"javaMultipleFiles")&&t.uint32(80).bool(e.javaMultipleFiles),null!=e.goPackage&&Object.hasOwnProperty.call(e,"goPackage")&&t.uint32(90).string(e.goPackage),null!=e.ccGenericServices&&Object.hasOwnProperty.call(e,"ccGenericServices")&&t.uint32(128).bool(e.ccGenericServices),null!=e.javaGenericServices&&Object.hasOwnProperty.call(e,"javaGenericServices")&&t.uint32(136).bool(e.javaGenericServices),null!=e.pyGenericServices&&Object.hasOwnProperty.call(e,"pyGenericServices")&&t.uint32(144).bool(e.pyGenericServices),null!=e.javaGenerateEqualsAndHash&&Object.hasOwnProperty.call(e,"javaGenerateEqualsAndHash")&&t.uint32(160).bool(e.javaGenerateEqualsAndHash),null!=e.deprecated&&Object.hasOwnProperty.call(e,"deprecated")&&t.uint32(184).bool(e.deprecated),null!=e.javaStringCheckUtf8&&Object.hasOwnProperty.call(e,"javaStringCheckUtf8")&&t.uint32(216).bool(e.javaStringCheckUtf8),null!=e.ccEnableArenas&&Object.hasOwnProperty.call(e,"ccEnableArenas")&&t.uint32(248).bool(e.ccEnableArenas),null!=e.objcClassPrefix&&Object.hasOwnProperty.call(e,"objcClassPrefix")&&t.uint32(290).string(e.objcClassPrefix),null!=e.csharpNamespace&&Object.hasOwnProperty.call(e,"csharpNamespace")&&t.uint32(298).string(e.csharpNamespace),null!=e.swiftPrefix&&Object.hasOwnProperty.call(e,"swiftPrefix")&&t.uint32(314).string(e.swiftPrefix),null!=e.phpClassPrefix&&Object.hasOwnProperty.call(e,"phpClassPrefix")&&t.uint32(322).string(e.phpClassPrefix),null!=e.phpNamespace&&Object.hasOwnProperty.call(e,"phpNamespace")&&t.uint32(330).string(e.phpNamespace),null!=e.phpGenericServices&&Object.hasOwnProperty.call(e,"phpGenericServices")&&t.uint32(336).bool(e.phpGenericServices),null!=e.phpMetadataNamespace&&Object.hasOwnProperty.call(e,"phpMetadataNamespace")&&t.uint32(354).string(e.phpMetadataNamespace),null!=e.rubyPackage&&Object.hasOwnProperty.call(e,"rubyPackage")&&t.uint32(362).string(e.rubyPackage),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3){case 1:o.javaPackage=e.string();break;case 8:o.javaOuterClassname=e.string();break;case 10:o.javaMultipleFiles=e.bool();break;case 20:o.javaGenerateEqualsAndHash=e.bool();break;case 27:o.javaStringCheckUtf8=e.bool();break;case 9:o.optimizeFor=e.int32();break;case 11:o.goPackage=e.string();break;case 16:o.ccGenericServices=e.bool();break;case 17:o.javaGenericServices=e.bool();break;case 18:o.pyGenericServices=e.bool();break;case 42:o.phpGenericServices=e.bool();break;case 23:o.deprecated=e.bool();break;case 31:o.ccEnableArenas=e.bool();break;case 36:o.objcClassPrefix=e.string();break;case 37:o.csharpNamespace=e.string();break;case 39:o.swiftPrefix=e.string();break;case 40:o.phpClassPrefix=e.string();break;case 41:o.phpNamespace=e.string();break;case 44:o.phpMetadataNamespace=e.string();break;case 45:o.rubyPackage=e.string();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1053:o[".google.api.resourceDefinition"]&&o[".google.api.resourceDefinition"].length||(o[".google.api.resourceDefinition"]=[]),o[".google.api.resourceDefinition"].push(l.google.api.ResourceDescriptor.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},S.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},S.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.javaPackage&&e.hasOwnProperty("javaPackage")&&!p.isString(e.javaPackage))return"javaPackage: string expected";if(null!=e.javaOuterClassname&&e.hasOwnProperty("javaOuterClassname")&&!p.isString(e.javaOuterClassname))return"javaOuterClassname: string expected";if(null!=e.javaMultipleFiles&&e.hasOwnProperty("javaMultipleFiles")&&"boolean"!=typeof e.javaMultipleFiles)return"javaMultipleFiles: boolean expected";if(null!=e.javaGenerateEqualsAndHash&&e.hasOwnProperty("javaGenerateEqualsAndHash")&&"boolean"!=typeof e.javaGenerateEqualsAndHash)return"javaGenerateEqualsAndHash: boolean expected";if(null!=e.javaStringCheckUtf8&&e.hasOwnProperty("javaStringCheckUtf8")&&"boolean"!=typeof e.javaStringCheckUtf8)return"javaStringCheckUtf8: boolean expected";if(null!=e.optimizeFor&&e.hasOwnProperty("optimizeFor"))switch(e.optimizeFor){default:return"optimizeFor: enum value expected";case 1:case 2:case 3:}if(null!=e.goPackage&&e.hasOwnProperty("goPackage")&&!p.isString(e.goPackage))return"goPackage: string expected";if(null!=e.ccGenericServices&&e.hasOwnProperty("ccGenericServices")&&"boolean"!=typeof e.ccGenericServices)return"ccGenericServices: boolean expected";if(null!=e.javaGenericServices&&e.hasOwnProperty("javaGenericServices")&&"boolean"!=typeof e.javaGenericServices)return"javaGenericServices: boolean expected";if(null!=e.pyGenericServices&&e.hasOwnProperty("pyGenericServices")&&"boolean"!=typeof e.pyGenericServices)return"pyGenericServices: boolean expected";if(null!=e.phpGenericServices&&e.hasOwnProperty("phpGenericServices")&&"boolean"!=typeof e.phpGenericServices)return"phpGenericServices: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.ccEnableArenas&&e.hasOwnProperty("ccEnableArenas")&&"boolean"!=typeof e.ccEnableArenas)return"ccEnableArenas: boolean expected";if(null!=e.objcClassPrefix&&e.hasOwnProperty("objcClassPrefix")&&!p.isString(e.objcClassPrefix))return"objcClassPrefix: string expected";if(null!=e.csharpNamespace&&e.hasOwnProperty("csharpNamespace")&&!p.isString(e.csharpNamespace))return"csharpNamespace: string expected";if(null!=e.swiftPrefix&&e.hasOwnProperty("swiftPrefix")&&!p.isString(e.swiftPrefix))return"swiftPrefix: string expected";if(null!=e.phpClassPrefix&&e.hasOwnProperty("phpClassPrefix")&&!p.isString(e.phpClassPrefix))return"phpClassPrefix: string expected";if(null!=e.phpNamespace&&e.hasOwnProperty("phpNamespace")&&!p.isString(e.phpNamespace))return"phpNamespace: string expected";if(null!=e.phpMetadataNamespace&&e.hasOwnProperty("phpMetadataNamespace")&&!p.isString(e.phpMetadataNamespace))return"phpMetadataNamespace: string expected";if(null!=e.rubyPackage&&e.hasOwnProperty("rubyPackage")&&!p.isString(e.rubyPackage))return"rubyPackage: string expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.messageSetWireFormat=e.bool();break;case 2:o.noStandardDescriptorAccessor=e.bool();break;case 3:o.deprecated=e.bool();break;case 7:o.mapEntry=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1053:o[".google.api.resource"]=l.google.api.ResourceDescriptor.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},k.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},k.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.messageSetWireFormat&&e.hasOwnProperty("messageSetWireFormat")&&"boolean"!=typeof e.messageSetWireFormat)return"messageSetWireFormat: boolean expected";if(null!=e.noStandardDescriptorAccessor&&e.hasOwnProperty("noStandardDescriptorAccessor")&&"boolean"!=typeof e.noStandardDescriptorAccessor)return"noStandardDescriptorAccessor: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.mapEntry&&e.hasOwnProperty("mapEntry")&&"boolean"!=typeof e.mapEntry)return"mapEntry: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.ctype=e.int32();break;case 2:o.packed=e.bool();break;case 6:o.jstype=e.int32();break;case 5:o.lazy=e.bool();break;case 3:o.deprecated=e.bool();break;case 10:o.weak=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1052:if(o[".google.api.fieldBehavior"]&&o[".google.api.fieldBehavior"].length||(o[".google.api.fieldBehavior"]=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},Q.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},Q.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.allowAlias=e.bool();break;case 3:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},E.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},E.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.allowAlias&&e.hasOwnProperty("allowAlias")&&"boolean"!=typeof e.allowAlias)return"allowAlias: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},K.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},K.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1049:o[".google.api.defaultHost"]=e.string();break;case 1050:o[".google.api.oauthScopes"]=e.string();break;default:e.skipType(7&r)}}return o},A.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},A.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 34:o.idempotencyLevel=e.int32();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 72295728:o[".google.api.http"]=l.google.api.HttpRule.decode(e,e.uint32());break;case 1051:o[".google.api.methodSignature"]&&o[".google.api.methodSignature"].length||(o[".google.api.methodSignature"]=[]),o[".google.api.methodSignature"].push(e.string());break;default:e.skipType(7&r)}}return o},N.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},N.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.idempotencyLevel&&e.hasOwnProperty("idempotencyLevel"))switch(e.idempotencyLevel){default:return"idempotencyLevel: enum value expected";case 0:case 1:case 2:}if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.name&&o.name.length||(o.name=[]),o.name.push(l.google.protobuf.UninterpretedOption.NamePart.decode(e,e.uint32()));break;case 3:o.identifierValue=e.string();break;case 4:o.positiveIntValue=e.uint64();break;case 5:o.negativeIntValue=e.int64();break;case 6:o.doubleValue=e.double();break;case 7:o.stringValue=e.bytes();break;case 8:o.aggregateValue=e.string();break;default:e.skipType(7&r)}}return o},R.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},R.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")){if(!Array.isArray(e.name))return"name: array expected";for(var t=0;t>>0,e.positiveIntValue.high>>>0).toNumber(!0))),null!=e.negativeIntValue&&(p.Long?(t.negativeIntValue=p.Long.fromValue(e.negativeIntValue)).unsigned=!1:"string"==typeof e.negativeIntValue?t.negativeIntValue=parseInt(e.negativeIntValue,10):"number"==typeof e.negativeIntValue?t.negativeIntValue=e.negativeIntValue:"object"==typeof e.negativeIntValue&&(t.negativeIntValue=new p.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber())),null!=e.doubleValue&&(t.doubleValue=Number(e.doubleValue)),null!=e.stringValue&&("string"==typeof e.stringValue?p.base64.decode(e.stringValue,t.stringValue=p.newBuffer(p.base64.length(e.stringValue)),0):e.stringValue.length&&(t.stringValue=e.stringValue)),null!=e.aggregateValue&&(t.aggregateValue=String(e.aggregateValue)),t},R.toObject=function(e,t){var n,o={};if(((t=t||{}).arrays||t.defaults)&&(o.name=[]),t.defaults&&(o.identifierValue="",p.Long?(n=new p.Long(0,0,!0),o.positiveIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.positiveIntValue=t.longs===String?"0":0,p.Long?(n=new p.Long(0,0,!1),o.negativeIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.negativeIntValue=t.longs===String?"0":0,o.doubleValue=0,t.bytes===String?o.stringValue="":(o.stringValue=[],t.bytes!==Array&&(o.stringValue=p.newBuffer(o.stringValue))),o.aggregateValue=""),e.name&&e.name.length){o.name=[];for(var r=0;r>>0,e.positiveIntValue.high>>>0).toNumber(!0):e.positiveIntValue),null!=e.negativeIntValue&&e.hasOwnProperty("negativeIntValue")&&("number"==typeof e.negativeIntValue?o.negativeIntValue=t.longs===String?String(e.negativeIntValue):e.negativeIntValue:o.negativeIntValue=t.longs===String?p.Long.prototype.toString.call(e.negativeIntValue):t.longs===Number?new p.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber():e.negativeIntValue),null!=e.doubleValue&&e.hasOwnProperty("doubleValue")&&(o.doubleValue=t.json&&!isFinite(e.doubleValue)?String(e.doubleValue):e.doubleValue),null!=e.stringValue&&e.hasOwnProperty("stringValue")&&(o.stringValue=t.bytes===String?p.base64.encode(e.stringValue,0,e.stringValue.length):t.bytes===Array?Array.prototype.slice.call(e.stringValue):e.stringValue),null!=e.aggregateValue&&e.hasOwnProperty("aggregateValue")&&(o.aggregateValue=e.aggregateValue),o},R.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},R.NamePart=(Z.prototype.namePart="",Z.prototype.isExtension=!1,Z.create=function(e){return new Z(e)},Z.encode=function(e,t){return(t=t||i.create()).uint32(10).string(e.namePart),t.uint32(16).bool(e.isExtension),t},Z.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},Z.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.protobuf.UninterpretedOption.NamePart;e.pos>>3){case 1:o.namePart=e.string();break;case 2:o.isExtension=e.bool();break;default:e.skipType(7&r)}}if(!o.hasOwnProperty("namePart"))throw p.ProtocolError("missing required 'namePart'",{instance:o});if(o.hasOwnProperty("isExtension"))return o;throw p.ProtocolError("missing required 'isExtension'",{instance:o})},Z.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},Z.verify=function(e){return"object"!=typeof e||null===e?"object expected":p.isString(e.namePart)?"boolean"!=typeof e.isExtension?"isExtension: boolean expected":null:"namePart: string expected"},Z.fromObject=function(e){var t;return e instanceof l.google.protobuf.UninterpretedOption.NamePart?e:(t=new l.google.protobuf.UninterpretedOption.NamePart,null!=e.namePart&&(t.namePart=String(e.namePart)),null!=e.isExtension&&(t.isExtension=Boolean(e.isExtension)),t)},Z.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.namePart="",n.isExtension=!1),null!=e.namePart&&e.hasOwnProperty("namePart")&&(n.namePart=e.namePart),null!=e.isExtension&&e.hasOwnProperty("isExtension")&&(n.isExtension=e.isExtension),n},Z.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},Z),R),n.SourceCodeInfo=($.prototype.location=p.emptyArray,$.create=function(e){return new $(e)},$.encode=function(e,t){if(t=t||i.create(),null!=e.location&&e.location.length)for(var n=0;n>>3==1?(o.location&&o.location.length||(o.location=[]),o.location.push(l.google.protobuf.SourceCodeInfo.Location.decode(e,e.uint32()))):e.skipType(7&r)}return o},$.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},$.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.location&&e.hasOwnProperty("location")){if(!Array.isArray(e.location))return"location: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3==1?(o.annotation&&o.annotation.length||(o.annotation=[]),o.annotation.push(l.google.protobuf.GeneratedCodeInfo.Annotation.decode(e,e.uint32()))):e.skipType(7&r)}return o},ee.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},ee.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.annotation&&e.hasOwnProperty("annotation")){if(!Array.isArray(e.annotation))return"annotation: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.expression=e.string();break;case 2:o.title=e.string();break;case 3:o.description=e.string();break;case 4:o.location=e.string();break;default:e.skipType(7&r)}}return o},V.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},V.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.expression&&e.hasOwnProperty("expression")&&!p.isString(e.expression)?"expression: string expected":null!=e.title&&e.hasOwnProperty("title")&&!p.isString(e.title)?"title: string expected":null!=e.description&&e.hasOwnProperty("description")&&!p.isString(e.description)?"description: string expected":null!=e.location&&e.hasOwnProperty("location")&&!p.isString(e.location)?"location: string expected":null},V.fromObject=function(e){var t;return e instanceof l.google.type.Expr?e:(t=new l.google.type.Expr,null!=e.expression&&(t.expression=String(e.expression)),null!=e.title&&(t.title=String(e.title)),null!=e.description&&(t.description=String(e.description)),null!=e.location&&(t.location=String(e.location)),t)},V.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.expression="",n.title="",n.description="",n.location=""),null!=e.expression&&e.hasOwnProperty("expression")&&(n.expression=e.expression),null!=e.title&&e.hasOwnProperty("title")&&(n.title=e.title),null!=e.description&&e.hasOwnProperty("description")&&(n.description=e.description),null!=e.location&&e.hasOwnProperty("location")&&(n.location=e.location),n},V.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},V),r),F),l}); /***/ }), -/***/ 197: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); +/***/ 22560: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/* module decorator */ module = __nccwpck_require__.nmd(module); +(e=>{"function"==typeof define&&define.amd?define(["protobufjs/minimal"],e): true&&module&&module.exports&&(module.exports=e(__nccwpck_require__(96916)))})(function(o){var e,t,n,F,s=o.Reader,r=o.Writer,u=o.util,c=o.roots.locations_protos||(o.roots.locations_protos={});function L(e,t,n){o.rpc.Service.call(this,e,t,n)}function i(e){if(e)for(var t=Object.keys(e),n=0;n>>3){case 1:o.name=e.string();break;case 2:o.filter=e.string();break;case 3:o.pageSize=e.int32();break;case 4:o.pageToken=e.string();break;default:e.skipType(7&r)}}return o},i.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},i.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name)?"name: string expected":null!=e.filter&&e.hasOwnProperty("filter")&&!u.isString(e.filter)?"filter: string expected":null!=e.pageSize&&e.hasOwnProperty("pageSize")&&!u.isInteger(e.pageSize)?"pageSize: integer expected":null!=e.pageToken&&e.hasOwnProperty("pageToken")&&!u.isString(e.pageToken)?"pageToken: string expected":null},i.fromObject=function(e){var t;return e instanceof c.google.cloud.location.ListLocationsRequest?e:(t=new c.google.cloud.location.ListLocationsRequest,null!=e.name&&(t.name=String(e.name)),null!=e.filter&&(t.filter=String(e.filter)),null!=e.pageSize&&(t.pageSize=0|e.pageSize),null!=e.pageToken&&(t.pageToken=String(e.pageToken)),t)},i.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.filter="",n.pageSize=0,n.pageToken=""),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.filter&&e.hasOwnProperty("filter")&&(n.filter=e.filter),null!=e.pageSize&&e.hasOwnProperty("pageSize")&&(n.pageSize=e.pageSize),null!=e.pageToken&&e.hasOwnProperty("pageToken")&&(n.pageToken=e.pageToken),n},i.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},i),e.ListLocationsResponse=(a.prototype.locations=u.emptyArray,a.prototype.nextPageToken="",a.create=function(e){return new a(e)},a.encode=function(e,t){if(t=t||r.create(),null!=e.locations&&e.locations.length)for(var n=0;n>>3){case 1:o.locations&&o.locations.length||(o.locations=[]),o.locations.push(c.google.cloud.location.Location.decode(e,e.uint32()));break;case 2:o.nextPageToken=e.string();break;default:e.skipType(7&r)}}return o},a.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},a.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.locations&&e.hasOwnProperty("locations")){if(!Array.isArray(e.locations))return"locations: array expected";for(var t=0;t>>3==1?o.name=e.string():e.skipType(7&r)}return o},G.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},G.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name)?"name: string expected":null},G.fromObject=function(e){var t;return e instanceof c.google.cloud.location.GetLocationRequest?e:(t=new c.google.cloud.location.GetLocationRequest,null!=e.name&&(t.name=String(e.name)),t)},G.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name=""),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),n},G.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},G),e.Location=(p.prototype.name="",p.prototype.locationId="",p.prototype.displayName="",p.prototype.labels=u.emptyObject,p.prototype.metadata=null,p.create=function(e){return new p(e)},p.encode=function(e,t){if(t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.labels&&Object.hasOwnProperty.call(e,"labels"))for(var n=Object.keys(e.labels),o=0;o>>3){case 1:o.name=e.string();break;case 4:o.locationId=e.string();break;case 5:o.displayName=e.string();break;case 2:o.labels===u.emptyObject&&(o.labels={});for(var i=e.uint32()+e.pos,a="",p="";e.pos>>3){case 1:a=e.string();break;case 2:p=e.string();break;default:e.skipType(7&l)}}o.labels[a]=p;break;case 3:o.metadata=c.google.protobuf.Any.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},p.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},p.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.locationId&&e.hasOwnProperty("locationId")&&!u.isString(e.locationId))return"locationId: string expected";if(null!=e.displayName&&e.hasOwnProperty("displayName")&&!u.isString(e.displayName))return"displayName: string expected";if(null!=e.labels&&e.hasOwnProperty("labels")){if(!u.isObject(e.labels))return"labels: object expected";for(var t=Object.keys(e.labels),n=0;n>>3){case 1:o.rules&&o.rules.length||(o.rules=[]),o.rules.push(c.google.api.HttpRule.decode(e,e.uint32()));break;case 2:o.fullyDecodeReservedExpansion=e.bool();break;default:e.skipType(7&r)}}return o},l.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},l.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.rules&&e.hasOwnProperty("rules")){if(!Array.isArray(e.rules))return"rules: array expected";for(var t=0;t>>3){case 1:o.selector=e.string();break;case 2:o.get=e.string();break;case 3:o.put=e.string();break;case 4:o.post=e.string();break;case 5:o.delete=e.string();break;case 6:o.patch=e.string();break;case 8:o.custom=c.google.api.CustomHttpPattern.decode(e,e.uint32());break;case 7:o.body=e.string();break;case 12:o.responseBody=e.string();break;case 11:o.additionalBindings&&o.additionalBindings.length||(o.additionalBindings=[]),o.additionalBindings.push(c.google.api.HttpRule.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},d.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},d.verify=function(e){if("object"!=typeof e||null===e)return"object expected";var t={};if(null!=e.selector&&e.hasOwnProperty("selector")&&!u.isString(e.selector))return"selector: string expected";if(null!=e.get&&e.hasOwnProperty("get")&&(t.pattern=1,!u.isString(e.get)))return"get: string expected";if(null!=e.put&&e.hasOwnProperty("put")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!u.isString(e.put))return"put: string expected"}if(null!=e.post&&e.hasOwnProperty("post")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!u.isString(e.post))return"post: string expected"}if(null!=e.delete&&e.hasOwnProperty("delete")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!u.isString(e.delete))return"delete: string expected"}if(null!=e.patch&&e.hasOwnProperty("patch")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!u.isString(e.patch))return"patch: string expected"}if(null!=e.custom&&e.hasOwnProperty("custom")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,n=c.google.api.CustomHttpPattern.verify(e.custom))return"custom."+n}if(null!=e.body&&e.hasOwnProperty("body")&&!u.isString(e.body))return"body: string expected";if(null!=e.responseBody&&e.hasOwnProperty("responseBody")&&!u.isString(e.responseBody))return"responseBody: string expected";if(null!=e.additionalBindings&&e.hasOwnProperty("additionalBindings")){if(!Array.isArray(e.additionalBindings))return"additionalBindings: array expected";for(var n,o=0;o>>3){case 1:o.kind=e.string();break;case 2:o.path=e.string();break;default:e.skipType(7&r)}}return o},g.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},g.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.kind&&e.hasOwnProperty("kind")&&!u.isString(e.kind)?"kind: string expected":null!=e.path&&e.hasOwnProperty("path")&&!u.isString(e.path)?"path: string expected":null},g.fromObject=function(e){var t;return e instanceof c.google.api.CustomHttpPattern?e:(t=new c.google.api.CustomHttpPattern,null!=e.kind&&(t.kind=String(e.kind)),null!=e.path&&(t.path=String(e.path)),t)},g.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.kind="",n.path=""),null!=e.kind&&e.hasOwnProperty("kind")&&(n.kind=e.kind),null!=e.path&&e.hasOwnProperty("path")&&(n.path=e.path),n},g.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},g),e),F.protobuf=((n={}).FileDescriptorSet=(B.prototype.file=u.emptyArray,B.create=function(e){return new B(e)},B.encode=function(e,t){if(t=t||r.create(),null!=e.file&&e.file.length)for(var n=0;n>>3==1?(o.file&&o.file.length||(o.file=[]),o.file.push(c.google.protobuf.FileDescriptorProto.decode(e,e.uint32()))):e.skipType(7&r)}return o},B.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},B.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.file&&e.hasOwnProperty("file")){if(!Array.isArray(e.file))return"file: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.package=e.string();break;case 3:o.dependency&&o.dependency.length||(o.dependency=[]),o.dependency.push(e.string());break;case 10:if(o.publicDependency&&o.publicDependency.length||(o.publicDependency=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.name=e.string();break;case 2:o.field&&o.field.length||(o.field=[]),o.field.push(c.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 6:o.extension&&o.extension.length||(o.extension=[]),o.extension.push(c.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 3:o.nestedType&&o.nestedType.length||(o.nestedType=[]),o.nestedType.push(c.google.protobuf.DescriptorProto.decode(e,e.uint32()));break;case 4:o.enumType&&o.enumType.length||(o.enumType=[]),o.enumType.push(c.google.protobuf.EnumDescriptorProto.decode(e,e.uint32()));break;case 5:o.extensionRange&&o.extensionRange.length||(o.extensionRange=[]),o.extensionRange.push(c.google.protobuf.DescriptorProto.ExtensionRange.decode(e,e.uint32()));break;case 8:o.oneofDecl&&o.oneofDecl.length||(o.oneofDecl=[]),o.oneofDecl.push(c.google.protobuf.OneofDescriptorProto.decode(e,e.uint32()));break;case 7:o.options=c.google.protobuf.MessageOptions.decode(e,e.uint32());break;case 9:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(c.google.protobuf.DescriptorProto.ReservedRange.decode(e,e.uint32()));break;case 10:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},y.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},y.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.field&&e.hasOwnProperty("field")){if(!Array.isArray(e.field))return"field: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;case 3:o.options=c.google.protobuf.ExtensionRangeOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},h.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},h.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.start&&e.hasOwnProperty("start")&&!u.isInteger(e.start))return"start: integer expected";if(null!=e.end&&e.hasOwnProperty("end")&&!u.isInteger(e.end))return"end: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=c.google.protobuf.ExtensionRangeOptions.verify(e.options);if(e)return"options."+e}return null},h.fromObject=function(e){if(e instanceof c.google.protobuf.DescriptorProto.ExtensionRange)return e;var t=new c.google.protobuf.DescriptorProto.ExtensionRange;if(null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected");t.options=c.google.protobuf.ExtensionRangeOptions.fromObject(e.options)}return t},h.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0,n.options=null),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),null!=e.options&&e.hasOwnProperty("options")&&(n.options=c.google.protobuf.ExtensionRangeOptions.toObject(e.options,t)),n},h.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},h),y.ReservedRange=(b.prototype.start=0,b.prototype.end=0,b.create=function(e){return new b(e)},b.encode=function(e,t){return t=t||r.create(),null!=e.start&&Object.hasOwnProperty.call(e,"start")&&t.uint32(8).int32(e.start),null!=e.end&&Object.hasOwnProperty.call(e,"end")&&t.uint32(16).int32(e.end),t},b.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},b.decode=function(e,t){e instanceof s||(e=s.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new c.google.protobuf.DescriptorProto.ReservedRange;e.pos>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},b.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},b.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!u.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!u.isInteger(e.end)?"end: integer expected":null},b.fromObject=function(e){var t;return e instanceof c.google.protobuf.DescriptorProto.ReservedRange?e:(t=new c.google.protobuf.DescriptorProto.ReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},b.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},b.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},b),y),n.ExtensionRangeOptions=(U.prototype.uninterpretedOption=u.emptyArray,U.create=function(e){return new U(e)},U.encode=function(e,t){if(t=t||r.create(),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},U.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},U.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 3:o.number=e.int32();break;case 4:o.label=e.int32();break;case 5:o.type=e.int32();break;case 6:o.typeName=e.string();break;case 2:o.extendee=e.string();break;case 7:o.defaultValue=e.string();break;case 9:o.oneofIndex=e.int32();break;case 10:o.jsonName=e.string();break;case 8:o.options=c.google.protobuf.FieldOptions.decode(e,e.uint32());break;case 17:o.proto3Optional=e.bool();break;default:e.skipType(7&r)}}return o},O.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},O.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!u.isInteger(e.number))return"number: integer expected";if(null!=e.label&&e.hasOwnProperty("label"))switch(e.label){default:return"label: enum value expected";case 1:case 2:case 3:}if(null!=e.type&&e.hasOwnProperty("type"))switch(e.type){default:return"type: enum value expected";case 1:case 2:case 3:case 4:case 5:case 6:case 7:case 8:case 9:case 10:case 11:case 12:case 13:case 14:case 15:case 16:case 17:case 18:}if(null!=e.typeName&&e.hasOwnProperty("typeName")&&!u.isString(e.typeName))return"typeName: string expected";if(null!=e.extendee&&e.hasOwnProperty("extendee")&&!u.isString(e.extendee))return"extendee: string expected";if(null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&!u.isString(e.defaultValue))return"defaultValue: string expected";if(null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&!u.isInteger(e.oneofIndex))return"oneofIndex: integer expected";if(null!=e.jsonName&&e.hasOwnProperty("jsonName")&&!u.isString(e.jsonName))return"jsonName: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=c.google.protobuf.FieldOptions.verify(e.options);if(t)return"options."+t}return null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&"boolean"!=typeof e.proto3Optional?"proto3Optional: boolean expected":null},O.fromObject=function(e){if(e instanceof c.google.protobuf.FieldDescriptorProto)return e;var t=new c.google.protobuf.FieldDescriptorProto;switch(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),e.label){case"LABEL_OPTIONAL":case 1:t.label=1;break;case"LABEL_REQUIRED":case 2:t.label=2;break;case"LABEL_REPEATED":case 3:t.label=3}switch(e.type){case"TYPE_DOUBLE":case 1:t.type=1;break;case"TYPE_FLOAT":case 2:t.type=2;break;case"TYPE_INT64":case 3:t.type=3;break;case"TYPE_UINT64":case 4:t.type=4;break;case"TYPE_INT32":case 5:t.type=5;break;case"TYPE_FIXED64":case 6:t.type=6;break;case"TYPE_FIXED32":case 7:t.type=7;break;case"TYPE_BOOL":case 8:t.type=8;break;case"TYPE_STRING":case 9:t.type=9;break;case"TYPE_GROUP":case 10:t.type=10;break;case"TYPE_MESSAGE":case 11:t.type=11;break;case"TYPE_BYTES":case 12:t.type=12;break;case"TYPE_UINT32":case 13:t.type=13;break;case"TYPE_ENUM":case 14:t.type=14;break;case"TYPE_SFIXED32":case 15:t.type=15;break;case"TYPE_SFIXED64":case 16:t.type=16;break;case"TYPE_SINT32":case 17:t.type=17;break;case"TYPE_SINT64":case 18:t.type=18}if(null!=e.typeName&&(t.typeName=String(e.typeName)),null!=e.extendee&&(t.extendee=String(e.extendee)),null!=e.defaultValue&&(t.defaultValue=String(e.defaultValue)),null!=e.oneofIndex&&(t.oneofIndex=0|e.oneofIndex),null!=e.jsonName&&(t.jsonName=String(e.jsonName)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected");t.options=c.google.protobuf.FieldOptions.fromObject(e.options)}return null!=e.proto3Optional&&(t.proto3Optional=Boolean(e.proto3Optional)),t},O.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.extendee="",n.number=0,n.label=t.enums===String?"LABEL_OPTIONAL":1,n.type=t.enums===String?"TYPE_DOUBLE":1,n.typeName="",n.defaultValue="",n.options=null,n.oneofIndex=0,n.jsonName="",n.proto3Optional=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.extendee&&e.hasOwnProperty("extendee")&&(n.extendee=e.extendee),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.label&&e.hasOwnProperty("label")&&(n.label=t.enums===String?c.google.protobuf.FieldDescriptorProto.Label[e.label]:e.label),null!=e.type&&e.hasOwnProperty("type")&&(n.type=t.enums===String?c.google.protobuf.FieldDescriptorProto.Type[e.type]:e.type),null!=e.typeName&&e.hasOwnProperty("typeName")&&(n.typeName=e.typeName),null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&(n.defaultValue=e.defaultValue),null!=e.options&&e.hasOwnProperty("options")&&(n.options=c.google.protobuf.FieldOptions.toObject(e.options,t)),null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&(n.oneofIndex=e.oneofIndex),null!=e.jsonName&&e.hasOwnProperty("jsonName")&&(n.jsonName=e.jsonName),null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&(n.proto3Optional=e.proto3Optional),n},O.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},O.Type=(e={},(t=Object.create(e))[e[1]="TYPE_DOUBLE"]=1,t[e[2]="TYPE_FLOAT"]=2,t[e[3]="TYPE_INT64"]=3,t[e[4]="TYPE_UINT64"]=4,t[e[5]="TYPE_INT32"]=5,t[e[6]="TYPE_FIXED64"]=6,t[e[7]="TYPE_FIXED32"]=7,t[e[8]="TYPE_BOOL"]=8,t[e[9]="TYPE_STRING"]=9,t[e[10]="TYPE_GROUP"]=10,t[e[11]="TYPE_MESSAGE"]=11,t[e[12]="TYPE_BYTES"]=12,t[e[13]="TYPE_UINT32"]=13,t[e[14]="TYPE_ENUM"]=14,t[e[15]="TYPE_SFIXED32"]=15,t[e[16]="TYPE_SFIXED64"]=16,t[e[17]="TYPE_SINT32"]=17,t[e[18]="TYPE_SINT64"]=18,t),O.Label=(e={},(t=Object.create(e))[e[1]="LABEL_OPTIONAL"]=1,t[e[2]="LABEL_REQUIRED"]=2,t[e[3]="LABEL_REPEATED"]=3,t),O),n.OneofDescriptorProto=(m.prototype.name="",m.prototype.options=null,m.create=function(e){return new m(e)},m.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&c.google.protobuf.OneofOptions.encode(e.options,t.uint32(18).fork()).ldelim(),t},m.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},m.decode=function(e,t){e instanceof s||(e=s.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new c.google.protobuf.OneofDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.options=c.google.protobuf.OneofOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},m.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},m.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.options&&e.hasOwnProperty("options")){e=c.google.protobuf.OneofOptions.verify(e.options);if(e)return"options."+e}return null},m.fromObject=function(e){if(e instanceof c.google.protobuf.OneofDescriptorProto)return e;var t=new c.google.protobuf.OneofDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected");t.options=c.google.protobuf.OneofOptions.fromObject(e.options)}return t},m.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.options&&e.hasOwnProperty("options")&&(n.options=c.google.protobuf.OneofOptions.toObject(e.options,t)),n},m.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},m),n.EnumDescriptorProto=(v.prototype.name="",v.prototype.value=u.emptyArray,v.prototype.options=null,v.prototype.reservedRange=u.emptyArray,v.prototype.reservedName=u.emptyArray,v.create=function(e){return new v(e)},v.encode=function(e,t){if(t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.value&&e.value.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.value&&o.value.length||(o.value=[]),o.value.push(c.google.protobuf.EnumValueDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=c.google.protobuf.EnumOptions.decode(e,e.uint32());break;case 4:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(c.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(e,e.uint32()));break;case 5:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},v.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},v.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.value&&e.hasOwnProperty("value")){if(!Array.isArray(e.value))return"value: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},P.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},P.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!u.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!u.isInteger(e.end)?"end: integer expected":null},P.fromObject=function(e){var t;return e instanceof c.google.protobuf.EnumDescriptorProto.EnumReservedRange?e:(t=new c.google.protobuf.EnumDescriptorProto.EnumReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},P.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},P.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},P),v),n.EnumValueDescriptorProto=(w.prototype.name="",w.prototype.number=0,w.prototype.options=null,w.create=function(e){return new w(e)},w.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.number&&Object.hasOwnProperty.call(e,"number")&&t.uint32(16).int32(e.number),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&c.google.protobuf.EnumValueOptions.encode(e.options,t.uint32(26).fork()).ldelim(),t},w.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},w.decode=function(e,t){e instanceof s||(e=s.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new c.google.protobuf.EnumValueDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.number=e.int32();break;case 3:o.options=c.google.protobuf.EnumValueOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},w.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},w.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!u.isInteger(e.number))return"number: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=c.google.protobuf.EnumValueOptions.verify(e.options);if(e)return"options."+e}return null},w.fromObject=function(e){if(e instanceof c.google.protobuf.EnumValueDescriptorProto)return e;var t=new c.google.protobuf.EnumValueDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected");t.options=c.google.protobuf.EnumValueOptions.fromObject(e.options)}return t},w.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.number=0,n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.options&&e.hasOwnProperty("options")&&(n.options=c.google.protobuf.EnumValueOptions.toObject(e.options,t)),n},w.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},w),n.ServiceDescriptorProto=(j.prototype.name="",j.prototype.method=u.emptyArray,j.prototype.options=null,j.create=function(e){return new j(e)},j.encode=function(e,t){if(t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.method&&e.method.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.method&&o.method.length||(o.method=[]),o.method.push(c.google.protobuf.MethodDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=c.google.protobuf.ServiceOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},j.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},j.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.method&&e.hasOwnProperty("method")){if(!Array.isArray(e.method))return"method: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.inputType=e.string();break;case 3:o.outputType=e.string();break;case 4:o.options=c.google.protobuf.MethodOptions.decode(e,e.uint32());break;case 5:o.clientStreaming=e.bool();break;case 6:o.serverStreaming=e.bool();break;default:e.skipType(7&r)}}return o},x.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},x.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.inputType&&e.hasOwnProperty("inputType")&&!u.isString(e.inputType))return"inputType: string expected";if(null!=e.outputType&&e.hasOwnProperty("outputType")&&!u.isString(e.outputType))return"outputType: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=c.google.protobuf.MethodOptions.verify(e.options);if(t)return"options."+t}return null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&"boolean"!=typeof e.clientStreaming?"clientStreaming: boolean expected":null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&"boolean"!=typeof e.serverStreaming?"serverStreaming: boolean expected":null},x.fromObject=function(e){if(e instanceof c.google.protobuf.MethodDescriptorProto)return e;var t=new c.google.protobuf.MethodDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.inputType&&(t.inputType=String(e.inputType)),null!=e.outputType&&(t.outputType=String(e.outputType)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected");t.options=c.google.protobuf.MethodOptions.fromObject(e.options)}return null!=e.clientStreaming&&(t.clientStreaming=Boolean(e.clientStreaming)),null!=e.serverStreaming&&(t.serverStreaming=Boolean(e.serverStreaming)),t},x.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.inputType="",n.outputType="",n.options=null,n.clientStreaming=!1,n.serverStreaming=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.inputType&&e.hasOwnProperty("inputType")&&(n.inputType=e.inputType),null!=e.outputType&&e.hasOwnProperty("outputType")&&(n.outputType=e.outputType),null!=e.options&&e.hasOwnProperty("options")&&(n.options=c.google.protobuf.MethodOptions.toObject(e.options,t)),null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&(n.clientStreaming=e.clientStreaming),null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&(n.serverStreaming=e.serverStreaming),n},x.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},x),n.FileOptions=(S.prototype.javaPackage="",S.prototype.javaOuterClassname="",S.prototype.javaMultipleFiles=!1,S.prototype.javaGenerateEqualsAndHash=!1,S.prototype.javaStringCheckUtf8=!1,S.prototype.optimizeFor=1,S.prototype.goPackage="",S.prototype.ccGenericServices=!1,S.prototype.javaGenericServices=!1,S.prototype.pyGenericServices=!1,S.prototype.phpGenericServices=!1,S.prototype.deprecated=!1,S.prototype.ccEnableArenas=!0,S.prototype.objcClassPrefix="",S.prototype.csharpNamespace="",S.prototype.swiftPrefix="",S.prototype.phpClassPrefix="",S.prototype.phpNamespace="",S.prototype.phpMetadataNamespace="",S.prototype.rubyPackage="",S.prototype.uninterpretedOption=u.emptyArray,S.create=function(e){return new S(e)},S.encode=function(e,t){if(t=t||r.create(),null!=e.javaPackage&&Object.hasOwnProperty.call(e,"javaPackage")&&t.uint32(10).string(e.javaPackage),null!=e.javaOuterClassname&&Object.hasOwnProperty.call(e,"javaOuterClassname")&&t.uint32(66).string(e.javaOuterClassname),null!=e.optimizeFor&&Object.hasOwnProperty.call(e,"optimizeFor")&&t.uint32(72).int32(e.optimizeFor),null!=e.javaMultipleFiles&&Object.hasOwnProperty.call(e,"javaMultipleFiles")&&t.uint32(80).bool(e.javaMultipleFiles),null!=e.goPackage&&Object.hasOwnProperty.call(e,"goPackage")&&t.uint32(90).string(e.goPackage),null!=e.ccGenericServices&&Object.hasOwnProperty.call(e,"ccGenericServices")&&t.uint32(128).bool(e.ccGenericServices),null!=e.javaGenericServices&&Object.hasOwnProperty.call(e,"javaGenericServices")&&t.uint32(136).bool(e.javaGenericServices),null!=e.pyGenericServices&&Object.hasOwnProperty.call(e,"pyGenericServices")&&t.uint32(144).bool(e.pyGenericServices),null!=e.javaGenerateEqualsAndHash&&Object.hasOwnProperty.call(e,"javaGenerateEqualsAndHash")&&t.uint32(160).bool(e.javaGenerateEqualsAndHash),null!=e.deprecated&&Object.hasOwnProperty.call(e,"deprecated")&&t.uint32(184).bool(e.deprecated),null!=e.javaStringCheckUtf8&&Object.hasOwnProperty.call(e,"javaStringCheckUtf8")&&t.uint32(216).bool(e.javaStringCheckUtf8),null!=e.ccEnableArenas&&Object.hasOwnProperty.call(e,"ccEnableArenas")&&t.uint32(248).bool(e.ccEnableArenas),null!=e.objcClassPrefix&&Object.hasOwnProperty.call(e,"objcClassPrefix")&&t.uint32(290).string(e.objcClassPrefix),null!=e.csharpNamespace&&Object.hasOwnProperty.call(e,"csharpNamespace")&&t.uint32(298).string(e.csharpNamespace),null!=e.swiftPrefix&&Object.hasOwnProperty.call(e,"swiftPrefix")&&t.uint32(314).string(e.swiftPrefix),null!=e.phpClassPrefix&&Object.hasOwnProperty.call(e,"phpClassPrefix")&&t.uint32(322).string(e.phpClassPrefix),null!=e.phpNamespace&&Object.hasOwnProperty.call(e,"phpNamespace")&&t.uint32(330).string(e.phpNamespace),null!=e.phpGenericServices&&Object.hasOwnProperty.call(e,"phpGenericServices")&&t.uint32(336).bool(e.phpGenericServices),null!=e.phpMetadataNamespace&&Object.hasOwnProperty.call(e,"phpMetadataNamespace")&&t.uint32(354).string(e.phpMetadataNamespace),null!=e.rubyPackage&&Object.hasOwnProperty.call(e,"rubyPackage")&&t.uint32(362).string(e.rubyPackage),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3){case 1:o.javaPackage=e.string();break;case 8:o.javaOuterClassname=e.string();break;case 10:o.javaMultipleFiles=e.bool();break;case 20:o.javaGenerateEqualsAndHash=e.bool();break;case 27:o.javaStringCheckUtf8=e.bool();break;case 9:o.optimizeFor=e.int32();break;case 11:o.goPackage=e.string();break;case 16:o.ccGenericServices=e.bool();break;case 17:o.javaGenericServices=e.bool();break;case 18:o.pyGenericServices=e.bool();break;case 42:o.phpGenericServices=e.bool();break;case 23:o.deprecated=e.bool();break;case 31:o.ccEnableArenas=e.bool();break;case 36:o.objcClassPrefix=e.string();break;case 37:o.csharpNamespace=e.string();break;case 39:o.swiftPrefix=e.string();break;case 40:o.phpClassPrefix=e.string();break;case 41:o.phpNamespace=e.string();break;case 44:o.phpMetadataNamespace=e.string();break;case 45:o.rubyPackage=e.string();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},S.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},S.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.javaPackage&&e.hasOwnProperty("javaPackage")&&!u.isString(e.javaPackage))return"javaPackage: string expected";if(null!=e.javaOuterClassname&&e.hasOwnProperty("javaOuterClassname")&&!u.isString(e.javaOuterClassname))return"javaOuterClassname: string expected";if(null!=e.javaMultipleFiles&&e.hasOwnProperty("javaMultipleFiles")&&"boolean"!=typeof e.javaMultipleFiles)return"javaMultipleFiles: boolean expected";if(null!=e.javaGenerateEqualsAndHash&&e.hasOwnProperty("javaGenerateEqualsAndHash")&&"boolean"!=typeof e.javaGenerateEqualsAndHash)return"javaGenerateEqualsAndHash: boolean expected";if(null!=e.javaStringCheckUtf8&&e.hasOwnProperty("javaStringCheckUtf8")&&"boolean"!=typeof e.javaStringCheckUtf8)return"javaStringCheckUtf8: boolean expected";if(null!=e.optimizeFor&&e.hasOwnProperty("optimizeFor"))switch(e.optimizeFor){default:return"optimizeFor: enum value expected";case 1:case 2:case 3:}if(null!=e.goPackage&&e.hasOwnProperty("goPackage")&&!u.isString(e.goPackage))return"goPackage: string expected";if(null!=e.ccGenericServices&&e.hasOwnProperty("ccGenericServices")&&"boolean"!=typeof e.ccGenericServices)return"ccGenericServices: boolean expected";if(null!=e.javaGenericServices&&e.hasOwnProperty("javaGenericServices")&&"boolean"!=typeof e.javaGenericServices)return"javaGenericServices: boolean expected";if(null!=e.pyGenericServices&&e.hasOwnProperty("pyGenericServices")&&"boolean"!=typeof e.pyGenericServices)return"pyGenericServices: boolean expected";if(null!=e.phpGenericServices&&e.hasOwnProperty("phpGenericServices")&&"boolean"!=typeof e.phpGenericServices)return"phpGenericServices: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.ccEnableArenas&&e.hasOwnProperty("ccEnableArenas")&&"boolean"!=typeof e.ccEnableArenas)return"ccEnableArenas: boolean expected";if(null!=e.objcClassPrefix&&e.hasOwnProperty("objcClassPrefix")&&!u.isString(e.objcClassPrefix))return"objcClassPrefix: string expected";if(null!=e.csharpNamespace&&e.hasOwnProperty("csharpNamespace")&&!u.isString(e.csharpNamespace))return"csharpNamespace: string expected";if(null!=e.swiftPrefix&&e.hasOwnProperty("swiftPrefix")&&!u.isString(e.swiftPrefix))return"swiftPrefix: string expected";if(null!=e.phpClassPrefix&&e.hasOwnProperty("phpClassPrefix")&&!u.isString(e.phpClassPrefix))return"phpClassPrefix: string expected";if(null!=e.phpNamespace&&e.hasOwnProperty("phpNamespace")&&!u.isString(e.phpNamespace))return"phpNamespace: string expected";if(null!=e.phpMetadataNamespace&&e.hasOwnProperty("phpMetadataNamespace")&&!u.isString(e.phpMetadataNamespace))return"phpMetadataNamespace: string expected";if(null!=e.rubyPackage&&e.hasOwnProperty("rubyPackage")&&!u.isString(e.rubyPackage))return"rubyPackage: string expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.messageSetWireFormat=e.bool();break;case 2:o.noStandardDescriptorAccessor=e.bool();break;case 3:o.deprecated=e.bool();break;case 7:o.mapEntry=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},k.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},k.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.messageSetWireFormat&&e.hasOwnProperty("messageSetWireFormat")&&"boolean"!=typeof e.messageSetWireFormat)return"messageSetWireFormat: boolean expected";if(null!=e.noStandardDescriptorAccessor&&e.hasOwnProperty("noStandardDescriptorAccessor")&&"boolean"!=typeof e.noStandardDescriptorAccessor)return"noStandardDescriptorAccessor: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.mapEntry&&e.hasOwnProperty("mapEntry")&&"boolean"!=typeof e.mapEntry)return"mapEntry: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.ctype=e.int32();break;case 2:o.packed=e.bool();break;case 6:o.jstype=e.int32();break;case 5:o.lazy=e.bool();break;case 3:o.deprecated=e.bool();break;case 10:o.weak=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},D.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},D.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.ctype&&e.hasOwnProperty("ctype"))switch(e.ctype){default:return"ctype: enum value expected";case 0:case 1:case 2:}if(null!=e.packed&&e.hasOwnProperty("packed")&&"boolean"!=typeof e.packed)return"packed: boolean expected";if(null!=e.jstype&&e.hasOwnProperty("jstype"))switch(e.jstype){default:return"jstype: enum value expected";case 0:case 1:case 2:}if(null!=e.lazy&&e.hasOwnProperty("lazy")&&"boolean"!=typeof e.lazy)return"lazy: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.weak&&e.hasOwnProperty("weak")&&"boolean"!=typeof e.weak)return"weak: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},M.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},M.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.allowAlias=e.bool();break;case 3:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},T.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},T.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.allowAlias&&e.hasOwnProperty("allowAlias")&&"boolean"!=typeof e.allowAlias)return"allowAlias: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},E.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},E.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1049:o[".google.api.defaultHost"]=e.string();break;case 1050:o[".google.api.oauthScopes"]=e.string();break;default:e.skipType(7&r)}}return o},A.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},A.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 34:o.idempotencyLevel=e.int32();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 72295728:o[".google.api.http"]=c.google.api.HttpRule.decode(e,e.uint32());break;case 1051:o[".google.api.methodSignature"]&&o[".google.api.methodSignature"].length||(o[".google.api.methodSignature"]=[]),o[".google.api.methodSignature"].push(e.string());break;default:e.skipType(7&r)}}return o},N.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},N.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.idempotencyLevel&&e.hasOwnProperty("idempotencyLevel"))switch(e.idempotencyLevel){default:return"idempotencyLevel: enum value expected";case 0:case 1:case 2:}if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.name&&o.name.length||(o.name=[]),o.name.push(c.google.protobuf.UninterpretedOption.NamePart.decode(e,e.uint32()));break;case 3:o.identifierValue=e.string();break;case 4:o.positiveIntValue=e.uint64();break;case 5:o.negativeIntValue=e.int64();break;case 6:o.doubleValue=e.double();break;case 7:o.stringValue=e.bytes();break;case 8:o.aggregateValue=e.string();break;default:e.skipType(7&r)}}return o},I.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},I.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")){if(!Array.isArray(e.name))return"name: array expected";for(var t=0;t>>0,e.positiveIntValue.high>>>0).toNumber(!0))),null!=e.negativeIntValue&&(u.Long?(t.negativeIntValue=u.Long.fromValue(e.negativeIntValue)).unsigned=!1:"string"==typeof e.negativeIntValue?t.negativeIntValue=parseInt(e.negativeIntValue,10):"number"==typeof e.negativeIntValue?t.negativeIntValue=e.negativeIntValue:"object"==typeof e.negativeIntValue&&(t.negativeIntValue=new u.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber())),null!=e.doubleValue&&(t.doubleValue=Number(e.doubleValue)),null!=e.stringValue&&("string"==typeof e.stringValue?u.base64.decode(e.stringValue,t.stringValue=u.newBuffer(u.base64.length(e.stringValue)),0):e.stringValue.length&&(t.stringValue=e.stringValue)),null!=e.aggregateValue&&(t.aggregateValue=String(e.aggregateValue)),t},I.toObject=function(e,t){var n,o={};if(((t=t||{}).arrays||t.defaults)&&(o.name=[]),t.defaults&&(o.identifierValue="",u.Long?(n=new u.Long(0,0,!0),o.positiveIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.positiveIntValue=t.longs===String?"0":0,u.Long?(n=new u.Long(0,0,!1),o.negativeIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.negativeIntValue=t.longs===String?"0":0,o.doubleValue=0,t.bytes===String?o.stringValue="":(o.stringValue=[],t.bytes!==Array&&(o.stringValue=u.newBuffer(o.stringValue))),o.aggregateValue=""),e.name&&e.name.length){o.name=[];for(var r=0;r>>0,e.positiveIntValue.high>>>0).toNumber(!0):e.positiveIntValue),null!=e.negativeIntValue&&e.hasOwnProperty("negativeIntValue")&&("number"==typeof e.negativeIntValue?o.negativeIntValue=t.longs===String?String(e.negativeIntValue):e.negativeIntValue:o.negativeIntValue=t.longs===String?u.Long.prototype.toString.call(e.negativeIntValue):t.longs===Number?new u.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber():e.negativeIntValue),null!=e.doubleValue&&e.hasOwnProperty("doubleValue")&&(o.doubleValue=t.json&&!isFinite(e.doubleValue)?String(e.doubleValue):e.doubleValue),null!=e.stringValue&&e.hasOwnProperty("stringValue")&&(o.stringValue=t.bytes===String?u.base64.encode(e.stringValue,0,e.stringValue.length):t.bytes===Array?Array.prototype.slice.call(e.stringValue):e.stringValue),null!=e.aggregateValue&&e.hasOwnProperty("aggregateValue")&&(o.aggregateValue=e.aggregateValue),o},I.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},I.NamePart=(R.prototype.namePart="",R.prototype.isExtension=!1,R.create=function(e){return new R(e)},R.encode=function(e,t){return(t=t||r.create()).uint32(10).string(e.namePart),t.uint32(16).bool(e.isExtension),t},R.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},R.decode=function(e,t){e instanceof s||(e=s.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new c.google.protobuf.UninterpretedOption.NamePart;e.pos>>3){case 1:o.namePart=e.string();break;case 2:o.isExtension=e.bool();break;default:e.skipType(7&r)}}if(!o.hasOwnProperty("namePart"))throw u.ProtocolError("missing required 'namePart'",{instance:o});if(o.hasOwnProperty("isExtension"))return o;throw u.ProtocolError("missing required 'isExtension'",{instance:o})},R.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},R.verify=function(e){return"object"!=typeof e||null===e?"object expected":u.isString(e.namePart)?"boolean"!=typeof e.isExtension?"isExtension: boolean expected":null:"namePart: string expected"},R.fromObject=function(e){var t;return e instanceof c.google.protobuf.UninterpretedOption.NamePart?e:(t=new c.google.protobuf.UninterpretedOption.NamePart,null!=e.namePart&&(t.namePart=String(e.namePart)),null!=e.isExtension&&(t.isExtension=Boolean(e.isExtension)),t)},R.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.namePart="",n.isExtension=!1),null!=e.namePart&&e.hasOwnProperty("namePart")&&(n.namePart=e.namePart),null!=e.isExtension&&e.hasOwnProperty("isExtension")&&(n.isExtension=e.isExtension),n},R.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},R),I),n.SourceCodeInfo=(_.prototype.location=u.emptyArray,_.create=function(e){return new _(e)},_.encode=function(e,t){if(t=t||r.create(),null!=e.location&&e.location.length)for(var n=0;n>>3==1?(o.location&&o.location.length||(o.location=[]),o.location.push(c.google.protobuf.SourceCodeInfo.Location.decode(e,e.uint32()))):e.skipType(7&r)}return o},_.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},_.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.location&&e.hasOwnProperty("location")){if(!Array.isArray(e.location))return"location: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3==1?(o.annotation&&o.annotation.length||(o.annotation=[]),o.annotation.push(c.google.protobuf.GeneratedCodeInfo.Annotation.decode(e,e.uint32()))):e.skipType(7&r)}return o},J.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},J.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.annotation&&e.hasOwnProperty("annotation")){if(!Array.isArray(e.annotation))return"annotation: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.type_url=e.string();break;case 2:o.value=e.bytes();break;default:e.skipType(7&r)}}return o},H.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},H.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.type_url&&e.hasOwnProperty("type_url")&&!u.isString(e.type_url)?"type_url: string expected":null!=e.value&&e.hasOwnProperty("value")&&!(e.value&&"number"==typeof e.value.length||u.isString(e.value))?"value: buffer expected":null},H.fromObject=function(e){var t;return e instanceof c.google.protobuf.Any?e:(t=new c.google.protobuf.Any,null!=e.type_url&&(t.type_url=String(e.type_url)),null!=e.value&&("string"==typeof e.value?u.base64.decode(e.value,t.value=u.newBuffer(u.base64.length(e.value)),0):e.value.length&&(t.value=e.value)),t)},H.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.type_url="",t.bytes===String?n.value="":(n.value=[],t.bytes!==Array&&(n.value=u.newBuffer(n.value)))),null!=e.type_url&&e.hasOwnProperty("type_url")&&(n.type_url=e.type_url),null!=e.value&&e.hasOwnProperty("value")&&(n.value=t.bytes===String?u.base64.encode(e.value,0,e.value.length):t.bytes===Array?Array.prototype.slice.call(e.value):e.value),n},H.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},H),n),F),c}); /***/ }), -/***/ 7545: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); +/***/ 59081: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/* module decorator */ module = __nccwpck_require__.nmd(module); +(e=>{"function"==typeof define&&define.amd?define(["protobufjs/minimal"],e): true&&module&&module.exports&&(module.exports=e(__nccwpck_require__(96916)))})(function(o){var e,t,n,F,a=o.Reader,r=o.Writer,i=o.util,p=o.roots.operations_protos||(o.roots.operations_protos={});function G(e,t,n){o.rpc.Service.call(this,e,t,n)}function l(e){if(e)for(var t=Object.keys(e),n=0;n>>3){case 1:o.name=e.string();break;case 2:o.metadata=p.google.protobuf.Any.decode(e,e.uint32());break;case 3:o.done=e.bool();break;case 4:o.error=p.google.rpc.Status.decode(e,e.uint32());break;case 5:o.response=p.google.protobuf.Any.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},l.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},l.verify=function(e){if("object"!=typeof e||null===e)return"object expected";var t,n={};if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.metadata&&e.hasOwnProperty("metadata")&&(t=p.google.protobuf.Any.verify(e.metadata)))return"metadata."+t;if(null!=e.done&&e.hasOwnProperty("done")&&"boolean"!=typeof e.done)return"done: boolean expected";if(null!=e.error&&e.hasOwnProperty("error")&&(n.result=1,t=p.google.rpc.Status.verify(e.error)))return"error."+t;if(null!=e.response&&e.hasOwnProperty("response")){if(1===n.result)return"result: multiple values";if(n.result=1,t=p.google.protobuf.Any.verify(e.response))return"response."+t}return null},l.fromObject=function(e){if(e instanceof p.google.longrunning.Operation)return e;var t=new p.google.longrunning.Operation;if(null!=e.name&&(t.name=String(e.name)),null!=e.metadata){if("object"!=typeof e.metadata)throw TypeError(".google.longrunning.Operation.metadata: object expected");t.metadata=p.google.protobuf.Any.fromObject(e.metadata)}if(null!=e.done&&(t.done=Boolean(e.done)),null!=e.error){if("object"!=typeof e.error)throw TypeError(".google.longrunning.Operation.error: object expected");t.error=p.google.rpc.Status.fromObject(e.error)}if(null!=e.response){if("object"!=typeof e.response)throw TypeError(".google.longrunning.Operation.response: object expected");t.response=p.google.protobuf.Any.fromObject(e.response)}return t},l.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.metadata=null,n.done=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.metadata&&e.hasOwnProperty("metadata")&&(n.metadata=p.google.protobuf.Any.toObject(e.metadata,t)),null!=e.done&&e.hasOwnProperty("done")&&(n.done=e.done),null!=e.error&&e.hasOwnProperty("error")&&(n.error=p.google.rpc.Status.toObject(e.error,t),t.oneofs)&&(n.result="error"),null!=e.response&&e.hasOwnProperty("response")&&(n.response=p.google.protobuf.Any.toObject(e.response,t),t.oneofs)&&(n.result="response"),n},l.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},l),t.GetOperationRequest=(B.prototype.name="",B.create=function(e){return new B(e)},B.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),t},B.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},B.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.longrunning.GetOperationRequest;e.pos>>3==1?o.name=e.string():e.skipType(7&r)}return o},B.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},B.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name)?"name: string expected":null},B.fromObject=function(e){var t;return e instanceof p.google.longrunning.GetOperationRequest?e:(t=new p.google.longrunning.GetOperationRequest,null!=e.name&&(t.name=String(e.name)),t)},B.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name=""),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),n},B.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},B),t.ListOperationsRequest=(s.prototype.name="",s.prototype.filter="",s.prototype.pageSize=0,s.prototype.pageToken="",s.create=function(e){return new s(e)},s.encode=function(e,t){return t=t||r.create(),null!=e.filter&&Object.hasOwnProperty.call(e,"filter")&&t.uint32(10).string(e.filter),null!=e.pageSize&&Object.hasOwnProperty.call(e,"pageSize")&&t.uint32(16).int32(e.pageSize),null!=e.pageToken&&Object.hasOwnProperty.call(e,"pageToken")&&t.uint32(26).string(e.pageToken),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(34).string(e.name),t},s.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},s.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.longrunning.ListOperationsRequest;e.pos>>3){case 4:o.name=e.string();break;case 1:o.filter=e.string();break;case 2:o.pageSize=e.int32();break;case 3:o.pageToken=e.string();break;default:e.skipType(7&r)}}return o},s.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},s.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name)?"name: string expected":null!=e.filter&&e.hasOwnProperty("filter")&&!i.isString(e.filter)?"filter: string expected":null!=e.pageSize&&e.hasOwnProperty("pageSize")&&!i.isInteger(e.pageSize)?"pageSize: integer expected":null!=e.pageToken&&e.hasOwnProperty("pageToken")&&!i.isString(e.pageToken)?"pageToken: string expected":null},s.fromObject=function(e){var t;return e instanceof p.google.longrunning.ListOperationsRequest?e:(t=new p.google.longrunning.ListOperationsRequest,null!=e.name&&(t.name=String(e.name)),null!=e.filter&&(t.filter=String(e.filter)),null!=e.pageSize&&(t.pageSize=0|e.pageSize),null!=e.pageToken&&(t.pageToken=String(e.pageToken)),t)},s.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.filter="",n.pageSize=0,n.pageToken="",n.name=""),null!=e.filter&&e.hasOwnProperty("filter")&&(n.filter=e.filter),null!=e.pageSize&&e.hasOwnProperty("pageSize")&&(n.pageSize=e.pageSize),null!=e.pageToken&&e.hasOwnProperty("pageToken")&&(n.pageToken=e.pageToken),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),n},s.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},s),t.ListOperationsResponse=(u.prototype.operations=i.emptyArray,u.prototype.nextPageToken="",u.create=function(e){return new u(e)},u.encode=function(e,t){if(t=t||r.create(),null!=e.operations&&e.operations.length)for(var n=0;n>>3){case 1:o.operations&&o.operations.length||(o.operations=[]),o.operations.push(p.google.longrunning.Operation.decode(e,e.uint32()));break;case 2:o.nextPageToken=e.string();break;default:e.skipType(7&r)}}return o},u.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},u.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.operations&&e.hasOwnProperty("operations")){if(!Array.isArray(e.operations))return"operations: array expected";for(var t=0;t>>3==1?o.name=e.string():e.skipType(7&r)}return o},L.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},L.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name)?"name: string expected":null},L.fromObject=function(e){var t;return e instanceof p.google.longrunning.CancelOperationRequest?e:(t=new p.google.longrunning.CancelOperationRequest,null!=e.name&&(t.name=String(e.name)),t)},L.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name=""),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),n},L.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},L),t.DeleteOperationRequest=(U.prototype.name="",U.create=function(e){return new U(e)},U.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),t},U.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},U.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.longrunning.DeleteOperationRequest;e.pos>>3==1?o.name=e.string():e.skipType(7&r)}return o},U.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},U.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name)?"name: string expected":null},U.fromObject=function(e){var t;return e instanceof p.google.longrunning.DeleteOperationRequest?e:(t=new p.google.longrunning.DeleteOperationRequest,null!=e.name&&(t.name=String(e.name)),t)},U.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name=""),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),n},U.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},U),t.WaitOperationRequest=(c.prototype.name="",c.prototype.timeout=null,c.create=function(e){return new c(e)},c.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.timeout&&Object.hasOwnProperty.call(e,"timeout")&&p.google.protobuf.Duration.encode(e.timeout,t.uint32(18).fork()).ldelim(),t},c.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},c.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.longrunning.WaitOperationRequest;e.pos>>3){case 1:o.name=e.string();break;case 2:o.timeout=p.google.protobuf.Duration.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},c.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},c.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.timeout&&e.hasOwnProperty("timeout")){e=p.google.protobuf.Duration.verify(e.timeout);if(e)return"timeout."+e}return null},c.fromObject=function(e){if(e instanceof p.google.longrunning.WaitOperationRequest)return e;var t=new p.google.longrunning.WaitOperationRequest;if(null!=e.name&&(t.name=String(e.name)),null!=e.timeout){if("object"!=typeof e.timeout)throw TypeError(".google.longrunning.WaitOperationRequest.timeout: object expected");t.timeout=p.google.protobuf.Duration.fromObject(e.timeout)}return t},c.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.timeout=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.timeout&&e.hasOwnProperty("timeout")&&(n.timeout=p.google.protobuf.Duration.toObject(e.timeout,t)),n},c.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},c),t.OperationInfo=(d.prototype.responseType="",d.prototype.metadataType="",d.create=function(e){return new d(e)},d.encode=function(e,t){return t=t||r.create(),null!=e.responseType&&Object.hasOwnProperty.call(e,"responseType")&&t.uint32(10).string(e.responseType),null!=e.metadataType&&Object.hasOwnProperty.call(e,"metadataType")&&t.uint32(18).string(e.metadataType),t},d.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},d.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.longrunning.OperationInfo;e.pos>>3){case 1:o.responseType=e.string();break;case 2:o.metadataType=e.string();break;default:e.skipType(7&r)}}return o},d.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},d.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.responseType&&e.hasOwnProperty("responseType")&&!i.isString(e.responseType)?"responseType: string expected":null!=e.metadataType&&e.hasOwnProperty("metadataType")&&!i.isString(e.metadataType)?"metadataType: string expected":null},d.fromObject=function(e){var t;return e instanceof p.google.longrunning.OperationInfo?e:(t=new p.google.longrunning.OperationInfo,null!=e.responseType&&(t.responseType=String(e.responseType)),null!=e.metadataType&&(t.metadataType=String(e.metadataType)),t)},d.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.responseType="",n.metadataType=""),null!=e.responseType&&e.hasOwnProperty("responseType")&&(n.responseType=e.responseType),null!=e.metadataType&&e.hasOwnProperty("metadataType")&&(n.metadataType=e.metadataType),n},d.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},d),t),F.api=((n={}).Http=(g.prototype.rules=i.emptyArray,g.prototype.fullyDecodeReservedExpansion=!1,g.create=function(e){return new g(e)},g.encode=function(e,t){if(t=t||r.create(),null!=e.rules&&e.rules.length)for(var n=0;n>>3){case 1:o.rules&&o.rules.length||(o.rules=[]),o.rules.push(p.google.api.HttpRule.decode(e,e.uint32()));break;case 2:o.fullyDecodeReservedExpansion=e.bool();break;default:e.skipType(7&r)}}return o},g.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},g.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.rules&&e.hasOwnProperty("rules")){if(!Array.isArray(e.rules))return"rules: array expected";for(var t=0;t>>3){case 1:o.selector=e.string();break;case 2:o.get=e.string();break;case 3:o.put=e.string();break;case 4:o.post=e.string();break;case 5:o.delete=e.string();break;case 6:o.patch=e.string();break;case 8:o.custom=p.google.api.CustomHttpPattern.decode(e,e.uint32());break;case 7:o.body=e.string();break;case 12:o.responseBody=e.string();break;case 11:o.additionalBindings&&o.additionalBindings.length||(o.additionalBindings=[]),o.additionalBindings.push(p.google.api.HttpRule.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},f.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},f.verify=function(e){if("object"!=typeof e||null===e)return"object expected";var t={};if(null!=e.selector&&e.hasOwnProperty("selector")&&!i.isString(e.selector))return"selector: string expected";if(null!=e.get&&e.hasOwnProperty("get")&&(t.pattern=1,!i.isString(e.get)))return"get: string expected";if(null!=e.put&&e.hasOwnProperty("put")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!i.isString(e.put))return"put: string expected"}if(null!=e.post&&e.hasOwnProperty("post")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!i.isString(e.post))return"post: string expected"}if(null!=e.delete&&e.hasOwnProperty("delete")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!i.isString(e.delete))return"delete: string expected"}if(null!=e.patch&&e.hasOwnProperty("patch")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!i.isString(e.patch))return"patch: string expected"}if(null!=e.custom&&e.hasOwnProperty("custom")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,n=p.google.api.CustomHttpPattern.verify(e.custom))return"custom."+n}if(null!=e.body&&e.hasOwnProperty("body")&&!i.isString(e.body))return"body: string expected";if(null!=e.responseBody&&e.hasOwnProperty("responseBody")&&!i.isString(e.responseBody))return"responseBody: string expected";if(null!=e.additionalBindings&&e.hasOwnProperty("additionalBindings")){if(!Array.isArray(e.additionalBindings))return"additionalBindings: array expected";for(var n,o=0;o>>3){case 1:o.kind=e.string();break;case 2:o.path=e.string();break;default:e.skipType(7&r)}}return o},y.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},y.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.kind&&e.hasOwnProperty("kind")&&!i.isString(e.kind)?"kind: string expected":null!=e.path&&e.hasOwnProperty("path")&&!i.isString(e.path)?"path: string expected":null},y.fromObject=function(e){var t;return e instanceof p.google.api.CustomHttpPattern?e:(t=new p.google.api.CustomHttpPattern,null!=e.kind&&(t.kind=String(e.kind)),null!=e.path&&(t.path=String(e.path)),t)},y.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.kind="",n.path=""),null!=e.kind&&e.hasOwnProperty("kind")&&(n.kind=e.kind),null!=e.path&&e.hasOwnProperty("path")&&(n.path=e.path),n},y.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},y),n),F.protobuf=((t={}).FileDescriptorSet=(J.prototype.file=i.emptyArray,J.create=function(e){return new J(e)},J.encode=function(e,t){if(t=t||r.create(),null!=e.file&&e.file.length)for(var n=0;n>>3==1?(o.file&&o.file.length||(o.file=[]),o.file.push(p.google.protobuf.FileDescriptorProto.decode(e,e.uint32()))):e.skipType(7&r)}return o},J.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},J.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.file&&e.hasOwnProperty("file")){if(!Array.isArray(e.file))return"file: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.package=e.string();break;case 3:o.dependency&&o.dependency.length||(o.dependency=[]),o.dependency.push(e.string());break;case 10:if(o.publicDependency&&o.publicDependency.length||(o.publicDependency=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.name=e.string();break;case 2:o.field&&o.field.length||(o.field=[]),o.field.push(p.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 6:o.extension&&o.extension.length||(o.extension=[]),o.extension.push(p.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 3:o.nestedType&&o.nestedType.length||(o.nestedType=[]),o.nestedType.push(p.google.protobuf.DescriptorProto.decode(e,e.uint32()));break;case 4:o.enumType&&o.enumType.length||(o.enumType=[]),o.enumType.push(p.google.protobuf.EnumDescriptorProto.decode(e,e.uint32()));break;case 5:o.extensionRange&&o.extensionRange.length||(o.extensionRange=[]),o.extensionRange.push(p.google.protobuf.DescriptorProto.ExtensionRange.decode(e,e.uint32()));break;case 8:o.oneofDecl&&o.oneofDecl.length||(o.oneofDecl=[]),o.oneofDecl.push(p.google.protobuf.OneofDescriptorProto.decode(e,e.uint32()));break;case 7:o.options=p.google.protobuf.MessageOptions.decode(e,e.uint32());break;case 9:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(p.google.protobuf.DescriptorProto.ReservedRange.decode(e,e.uint32()));break;case 10:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},O.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},O.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.field&&e.hasOwnProperty("field")){if(!Array.isArray(e.field))return"field: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;case 3:o.options=p.google.protobuf.ExtensionRangeOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},b.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},b.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.start&&e.hasOwnProperty("start")&&!i.isInteger(e.start))return"start: integer expected";if(null!=e.end&&e.hasOwnProperty("end")&&!i.isInteger(e.end))return"end: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=p.google.protobuf.ExtensionRangeOptions.verify(e.options);if(e)return"options."+e}return null},b.fromObject=function(e){if(e instanceof p.google.protobuf.DescriptorProto.ExtensionRange)return e;var t=new p.google.protobuf.DescriptorProto.ExtensionRange;if(null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected");t.options=p.google.protobuf.ExtensionRangeOptions.fromObject(e.options)}return t},b.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0,n.options=null),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),null!=e.options&&e.hasOwnProperty("options")&&(n.options=p.google.protobuf.ExtensionRangeOptions.toObject(e.options,t)),n},b.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},b),O.ReservedRange=(m.prototype.start=0,m.prototype.end=0,m.create=function(e){return new m(e)},m.encode=function(e,t){return t=t||r.create(),null!=e.start&&Object.hasOwnProperty.call(e,"start")&&t.uint32(8).int32(e.start),null!=e.end&&Object.hasOwnProperty.call(e,"end")&&t.uint32(16).int32(e.end),t},m.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},m.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.protobuf.DescriptorProto.ReservedRange;e.pos>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},m.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},m.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!i.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!i.isInteger(e.end)?"end: integer expected":null},m.fromObject=function(e){var t;return e instanceof p.google.protobuf.DescriptorProto.ReservedRange?e:(t=new p.google.protobuf.DescriptorProto.ReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},m.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},m.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},m),O),t.ExtensionRangeOptions=(M.prototype.uninterpretedOption=i.emptyArray,M.create=function(e){return new M(e)},M.encode=function(e,t){if(t=t||r.create(),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},M.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},M.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 3:o.number=e.int32();break;case 4:o.label=e.int32();break;case 5:o.type=e.int32();break;case 6:o.typeName=e.string();break;case 2:o.extendee=e.string();break;case 7:o.defaultValue=e.string();break;case 9:o.oneofIndex=e.int32();break;case 10:o.jsonName=e.string();break;case 8:o.options=p.google.protobuf.FieldOptions.decode(e,e.uint32());break;case 17:o.proto3Optional=e.bool();break;default:e.skipType(7&r)}}return o},v.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},v.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!i.isInteger(e.number))return"number: integer expected";if(null!=e.label&&e.hasOwnProperty("label"))switch(e.label){default:return"label: enum value expected";case 1:case 2:case 3:}if(null!=e.type&&e.hasOwnProperty("type"))switch(e.type){default:return"type: enum value expected";case 1:case 2:case 3:case 4:case 5:case 6:case 7:case 8:case 9:case 10:case 11:case 12:case 13:case 14:case 15:case 16:case 17:case 18:}if(null!=e.typeName&&e.hasOwnProperty("typeName")&&!i.isString(e.typeName))return"typeName: string expected";if(null!=e.extendee&&e.hasOwnProperty("extendee")&&!i.isString(e.extendee))return"extendee: string expected";if(null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&!i.isString(e.defaultValue))return"defaultValue: string expected";if(null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&!i.isInteger(e.oneofIndex))return"oneofIndex: integer expected";if(null!=e.jsonName&&e.hasOwnProperty("jsonName")&&!i.isString(e.jsonName))return"jsonName: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=p.google.protobuf.FieldOptions.verify(e.options);if(t)return"options."+t}return null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&"boolean"!=typeof e.proto3Optional?"proto3Optional: boolean expected":null},v.fromObject=function(e){if(e instanceof p.google.protobuf.FieldDescriptorProto)return e;var t=new p.google.protobuf.FieldDescriptorProto;switch(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),e.label){case"LABEL_OPTIONAL":case 1:t.label=1;break;case"LABEL_REQUIRED":case 2:t.label=2;break;case"LABEL_REPEATED":case 3:t.label=3}switch(e.type){case"TYPE_DOUBLE":case 1:t.type=1;break;case"TYPE_FLOAT":case 2:t.type=2;break;case"TYPE_INT64":case 3:t.type=3;break;case"TYPE_UINT64":case 4:t.type=4;break;case"TYPE_INT32":case 5:t.type=5;break;case"TYPE_FIXED64":case 6:t.type=6;break;case"TYPE_FIXED32":case 7:t.type=7;break;case"TYPE_BOOL":case 8:t.type=8;break;case"TYPE_STRING":case 9:t.type=9;break;case"TYPE_GROUP":case 10:t.type=10;break;case"TYPE_MESSAGE":case 11:t.type=11;break;case"TYPE_BYTES":case 12:t.type=12;break;case"TYPE_UINT32":case 13:t.type=13;break;case"TYPE_ENUM":case 14:t.type=14;break;case"TYPE_SFIXED32":case 15:t.type=15;break;case"TYPE_SFIXED64":case 16:t.type=16;break;case"TYPE_SINT32":case 17:t.type=17;break;case"TYPE_SINT64":case 18:t.type=18}if(null!=e.typeName&&(t.typeName=String(e.typeName)),null!=e.extendee&&(t.extendee=String(e.extendee)),null!=e.defaultValue&&(t.defaultValue=String(e.defaultValue)),null!=e.oneofIndex&&(t.oneofIndex=0|e.oneofIndex),null!=e.jsonName&&(t.jsonName=String(e.jsonName)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected");t.options=p.google.protobuf.FieldOptions.fromObject(e.options)}return null!=e.proto3Optional&&(t.proto3Optional=Boolean(e.proto3Optional)),t},v.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.extendee="",n.number=0,n.label=t.enums===String?"LABEL_OPTIONAL":1,n.type=t.enums===String?"TYPE_DOUBLE":1,n.typeName="",n.defaultValue="",n.options=null,n.oneofIndex=0,n.jsonName="",n.proto3Optional=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.extendee&&e.hasOwnProperty("extendee")&&(n.extendee=e.extendee),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.label&&e.hasOwnProperty("label")&&(n.label=t.enums===String?p.google.protobuf.FieldDescriptorProto.Label[e.label]:e.label),null!=e.type&&e.hasOwnProperty("type")&&(n.type=t.enums===String?p.google.protobuf.FieldDescriptorProto.Type[e.type]:e.type),null!=e.typeName&&e.hasOwnProperty("typeName")&&(n.typeName=e.typeName),null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&(n.defaultValue=e.defaultValue),null!=e.options&&e.hasOwnProperty("options")&&(n.options=p.google.protobuf.FieldOptions.toObject(e.options,t)),null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&(n.oneofIndex=e.oneofIndex),null!=e.jsonName&&e.hasOwnProperty("jsonName")&&(n.jsonName=e.jsonName),null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&(n.proto3Optional=e.proto3Optional),n},v.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},v.Type=(n={},(e=Object.create(n))[n[1]="TYPE_DOUBLE"]=1,e[n[2]="TYPE_FLOAT"]=2,e[n[3]="TYPE_INT64"]=3,e[n[4]="TYPE_UINT64"]=4,e[n[5]="TYPE_INT32"]=5,e[n[6]="TYPE_FIXED64"]=6,e[n[7]="TYPE_FIXED32"]=7,e[n[8]="TYPE_BOOL"]=8,e[n[9]="TYPE_STRING"]=9,e[n[10]="TYPE_GROUP"]=10,e[n[11]="TYPE_MESSAGE"]=11,e[n[12]="TYPE_BYTES"]=12,e[n[13]="TYPE_UINT32"]=13,e[n[14]="TYPE_ENUM"]=14,e[n[15]="TYPE_SFIXED32"]=15,e[n[16]="TYPE_SFIXED64"]=16,e[n[17]="TYPE_SINT32"]=17,e[n[18]="TYPE_SINT64"]=18,e),v.Label=(n={},(e=Object.create(n))[n[1]="LABEL_OPTIONAL"]=1,e[n[2]="LABEL_REQUIRED"]=2,e[n[3]="LABEL_REPEATED"]=3,e),v),t.OneofDescriptorProto=(w.prototype.name="",w.prototype.options=null,w.create=function(e){return new w(e)},w.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&p.google.protobuf.OneofOptions.encode(e.options,t.uint32(18).fork()).ldelim(),t},w.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},w.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.protobuf.OneofDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.options=p.google.protobuf.OneofOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},w.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},w.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.options&&e.hasOwnProperty("options")){e=p.google.protobuf.OneofOptions.verify(e.options);if(e)return"options."+e}return null},w.fromObject=function(e){if(e instanceof p.google.protobuf.OneofDescriptorProto)return e;var t=new p.google.protobuf.OneofDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected");t.options=p.google.protobuf.OneofOptions.fromObject(e.options)}return t},w.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.options&&e.hasOwnProperty("options")&&(n.options=p.google.protobuf.OneofOptions.toObject(e.options,t)),n},w.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},w),t.EnumDescriptorProto=(P.prototype.name="",P.prototype.value=i.emptyArray,P.prototype.options=null,P.prototype.reservedRange=i.emptyArray,P.prototype.reservedName=i.emptyArray,P.create=function(e){return new P(e)},P.encode=function(e,t){if(t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.value&&e.value.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.value&&o.value.length||(o.value=[]),o.value.push(p.google.protobuf.EnumValueDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=p.google.protobuf.EnumOptions.decode(e,e.uint32());break;case 4:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(p.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(e,e.uint32()));break;case 5:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},P.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},P.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.value&&e.hasOwnProperty("value")){if(!Array.isArray(e.value))return"value: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},_.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},_.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!i.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!i.isInteger(e.end)?"end: integer expected":null},_.fromObject=function(e){var t;return e instanceof p.google.protobuf.EnumDescriptorProto.EnumReservedRange?e:(t=new p.google.protobuf.EnumDescriptorProto.EnumReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},_.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},_.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},_),P),t.EnumValueDescriptorProto=(j.prototype.name="",j.prototype.number=0,j.prototype.options=null,j.create=function(e){return new j(e)},j.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.number&&Object.hasOwnProperty.call(e,"number")&&t.uint32(16).int32(e.number),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&p.google.protobuf.EnumValueOptions.encode(e.options,t.uint32(26).fork()).ldelim(),t},j.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},j.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.protobuf.EnumValueDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.number=e.int32();break;case 3:o.options=p.google.protobuf.EnumValueOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},j.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},j.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!i.isInteger(e.number))return"number: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=p.google.protobuf.EnumValueOptions.verify(e.options);if(e)return"options."+e}return null},j.fromObject=function(e){if(e instanceof p.google.protobuf.EnumValueDescriptorProto)return e;var t=new p.google.protobuf.EnumValueDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected");t.options=p.google.protobuf.EnumValueOptions.fromObject(e.options)}return t},j.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.number=0,n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.options&&e.hasOwnProperty("options")&&(n.options=p.google.protobuf.EnumValueOptions.toObject(e.options,t)),n},j.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},j),t.ServiceDescriptorProto=(S.prototype.name="",S.prototype.method=i.emptyArray,S.prototype.options=null,S.create=function(e){return new S(e)},S.encode=function(e,t){if(t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.method&&e.method.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.method&&o.method.length||(o.method=[]),o.method.push(p.google.protobuf.MethodDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=p.google.protobuf.ServiceOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},S.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},S.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.method&&e.hasOwnProperty("method")){if(!Array.isArray(e.method))return"method: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.inputType=e.string();break;case 3:o.outputType=e.string();break;case 4:o.options=p.google.protobuf.MethodOptions.decode(e,e.uint32());break;case 5:o.clientStreaming=e.bool();break;case 6:o.serverStreaming=e.bool();break;default:e.skipType(7&r)}}return o},x.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},x.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.inputType&&e.hasOwnProperty("inputType")&&!i.isString(e.inputType))return"inputType: string expected";if(null!=e.outputType&&e.hasOwnProperty("outputType")&&!i.isString(e.outputType))return"outputType: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=p.google.protobuf.MethodOptions.verify(e.options);if(t)return"options."+t}return null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&"boolean"!=typeof e.clientStreaming?"clientStreaming: boolean expected":null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&"boolean"!=typeof e.serverStreaming?"serverStreaming: boolean expected":null},x.fromObject=function(e){if(e instanceof p.google.protobuf.MethodDescriptorProto)return e;var t=new p.google.protobuf.MethodDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.inputType&&(t.inputType=String(e.inputType)),null!=e.outputType&&(t.outputType=String(e.outputType)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected");t.options=p.google.protobuf.MethodOptions.fromObject(e.options)}return null!=e.clientStreaming&&(t.clientStreaming=Boolean(e.clientStreaming)),null!=e.serverStreaming&&(t.serverStreaming=Boolean(e.serverStreaming)),t},x.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.inputType="",n.outputType="",n.options=null,n.clientStreaming=!1,n.serverStreaming=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.inputType&&e.hasOwnProperty("inputType")&&(n.inputType=e.inputType),null!=e.outputType&&e.hasOwnProperty("outputType")&&(n.outputType=e.outputType),null!=e.options&&e.hasOwnProperty("options")&&(n.options=p.google.protobuf.MethodOptions.toObject(e.options,t)),null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&(n.clientStreaming=e.clientStreaming),null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&(n.serverStreaming=e.serverStreaming),n},x.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},x),t.FileOptions=(k.prototype.javaPackage="",k.prototype.javaOuterClassname="",k.prototype.javaMultipleFiles=!1,k.prototype.javaGenerateEqualsAndHash=!1,k.prototype.javaStringCheckUtf8=!1,k.prototype.optimizeFor=1,k.prototype.goPackage="",k.prototype.ccGenericServices=!1,k.prototype.javaGenericServices=!1,k.prototype.pyGenericServices=!1,k.prototype.phpGenericServices=!1,k.prototype.deprecated=!1,k.prototype.ccEnableArenas=!0,k.prototype.objcClassPrefix="",k.prototype.csharpNamespace="",k.prototype.swiftPrefix="",k.prototype.phpClassPrefix="",k.prototype.phpNamespace="",k.prototype.phpMetadataNamespace="",k.prototype.rubyPackage="",k.prototype.uninterpretedOption=i.emptyArray,k.create=function(e){return new k(e)},k.encode=function(e,t){if(t=t||r.create(),null!=e.javaPackage&&Object.hasOwnProperty.call(e,"javaPackage")&&t.uint32(10).string(e.javaPackage),null!=e.javaOuterClassname&&Object.hasOwnProperty.call(e,"javaOuterClassname")&&t.uint32(66).string(e.javaOuterClassname),null!=e.optimizeFor&&Object.hasOwnProperty.call(e,"optimizeFor")&&t.uint32(72).int32(e.optimizeFor),null!=e.javaMultipleFiles&&Object.hasOwnProperty.call(e,"javaMultipleFiles")&&t.uint32(80).bool(e.javaMultipleFiles),null!=e.goPackage&&Object.hasOwnProperty.call(e,"goPackage")&&t.uint32(90).string(e.goPackage),null!=e.ccGenericServices&&Object.hasOwnProperty.call(e,"ccGenericServices")&&t.uint32(128).bool(e.ccGenericServices),null!=e.javaGenericServices&&Object.hasOwnProperty.call(e,"javaGenericServices")&&t.uint32(136).bool(e.javaGenericServices),null!=e.pyGenericServices&&Object.hasOwnProperty.call(e,"pyGenericServices")&&t.uint32(144).bool(e.pyGenericServices),null!=e.javaGenerateEqualsAndHash&&Object.hasOwnProperty.call(e,"javaGenerateEqualsAndHash")&&t.uint32(160).bool(e.javaGenerateEqualsAndHash),null!=e.deprecated&&Object.hasOwnProperty.call(e,"deprecated")&&t.uint32(184).bool(e.deprecated),null!=e.javaStringCheckUtf8&&Object.hasOwnProperty.call(e,"javaStringCheckUtf8")&&t.uint32(216).bool(e.javaStringCheckUtf8),null!=e.ccEnableArenas&&Object.hasOwnProperty.call(e,"ccEnableArenas")&&t.uint32(248).bool(e.ccEnableArenas),null!=e.objcClassPrefix&&Object.hasOwnProperty.call(e,"objcClassPrefix")&&t.uint32(290).string(e.objcClassPrefix),null!=e.csharpNamespace&&Object.hasOwnProperty.call(e,"csharpNamespace")&&t.uint32(298).string(e.csharpNamespace),null!=e.swiftPrefix&&Object.hasOwnProperty.call(e,"swiftPrefix")&&t.uint32(314).string(e.swiftPrefix),null!=e.phpClassPrefix&&Object.hasOwnProperty.call(e,"phpClassPrefix")&&t.uint32(322).string(e.phpClassPrefix),null!=e.phpNamespace&&Object.hasOwnProperty.call(e,"phpNamespace")&&t.uint32(330).string(e.phpNamespace),null!=e.phpGenericServices&&Object.hasOwnProperty.call(e,"phpGenericServices")&&t.uint32(336).bool(e.phpGenericServices),null!=e.phpMetadataNamespace&&Object.hasOwnProperty.call(e,"phpMetadataNamespace")&&t.uint32(354).string(e.phpMetadataNamespace),null!=e.rubyPackage&&Object.hasOwnProperty.call(e,"rubyPackage")&&t.uint32(362).string(e.rubyPackage),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3){case 1:o.javaPackage=e.string();break;case 8:o.javaOuterClassname=e.string();break;case 10:o.javaMultipleFiles=e.bool();break;case 20:o.javaGenerateEqualsAndHash=e.bool();break;case 27:o.javaStringCheckUtf8=e.bool();break;case 9:o.optimizeFor=e.int32();break;case 11:o.goPackage=e.string();break;case 16:o.ccGenericServices=e.bool();break;case 17:o.javaGenericServices=e.bool();break;case 18:o.pyGenericServices=e.bool();break;case 42:o.phpGenericServices=e.bool();break;case 23:o.deprecated=e.bool();break;case 31:o.ccEnableArenas=e.bool();break;case 36:o.objcClassPrefix=e.string();break;case 37:o.csharpNamespace=e.string();break;case 39:o.swiftPrefix=e.string();break;case 40:o.phpClassPrefix=e.string();break;case 41:o.phpNamespace=e.string();break;case 44:o.phpMetadataNamespace=e.string();break;case 45:o.rubyPackage=e.string();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},k.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},k.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.javaPackage&&e.hasOwnProperty("javaPackage")&&!i.isString(e.javaPackage))return"javaPackage: string expected";if(null!=e.javaOuterClassname&&e.hasOwnProperty("javaOuterClassname")&&!i.isString(e.javaOuterClassname))return"javaOuterClassname: string expected";if(null!=e.javaMultipleFiles&&e.hasOwnProperty("javaMultipleFiles")&&"boolean"!=typeof e.javaMultipleFiles)return"javaMultipleFiles: boolean expected";if(null!=e.javaGenerateEqualsAndHash&&e.hasOwnProperty("javaGenerateEqualsAndHash")&&"boolean"!=typeof e.javaGenerateEqualsAndHash)return"javaGenerateEqualsAndHash: boolean expected";if(null!=e.javaStringCheckUtf8&&e.hasOwnProperty("javaStringCheckUtf8")&&"boolean"!=typeof e.javaStringCheckUtf8)return"javaStringCheckUtf8: boolean expected";if(null!=e.optimizeFor&&e.hasOwnProperty("optimizeFor"))switch(e.optimizeFor){default:return"optimizeFor: enum value expected";case 1:case 2:case 3:}if(null!=e.goPackage&&e.hasOwnProperty("goPackage")&&!i.isString(e.goPackage))return"goPackage: string expected";if(null!=e.ccGenericServices&&e.hasOwnProperty("ccGenericServices")&&"boolean"!=typeof e.ccGenericServices)return"ccGenericServices: boolean expected";if(null!=e.javaGenericServices&&e.hasOwnProperty("javaGenericServices")&&"boolean"!=typeof e.javaGenericServices)return"javaGenericServices: boolean expected";if(null!=e.pyGenericServices&&e.hasOwnProperty("pyGenericServices")&&"boolean"!=typeof e.pyGenericServices)return"pyGenericServices: boolean expected";if(null!=e.phpGenericServices&&e.hasOwnProperty("phpGenericServices")&&"boolean"!=typeof e.phpGenericServices)return"phpGenericServices: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.ccEnableArenas&&e.hasOwnProperty("ccEnableArenas")&&"boolean"!=typeof e.ccEnableArenas)return"ccEnableArenas: boolean expected";if(null!=e.objcClassPrefix&&e.hasOwnProperty("objcClassPrefix")&&!i.isString(e.objcClassPrefix))return"objcClassPrefix: string expected";if(null!=e.csharpNamespace&&e.hasOwnProperty("csharpNamespace")&&!i.isString(e.csharpNamespace))return"csharpNamespace: string expected";if(null!=e.swiftPrefix&&e.hasOwnProperty("swiftPrefix")&&!i.isString(e.swiftPrefix))return"swiftPrefix: string expected";if(null!=e.phpClassPrefix&&e.hasOwnProperty("phpClassPrefix")&&!i.isString(e.phpClassPrefix))return"phpClassPrefix: string expected";if(null!=e.phpNamespace&&e.hasOwnProperty("phpNamespace")&&!i.isString(e.phpNamespace))return"phpNamespace: string expected";if(null!=e.phpMetadataNamespace&&e.hasOwnProperty("phpMetadataNamespace")&&!i.isString(e.phpMetadataNamespace))return"phpMetadataNamespace: string expected";if(null!=e.rubyPackage&&e.hasOwnProperty("rubyPackage")&&!i.isString(e.rubyPackage))return"rubyPackage: string expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.messageSetWireFormat=e.bool();break;case 2:o.noStandardDescriptorAccessor=e.bool();break;case 3:o.deprecated=e.bool();break;case 7:o.mapEntry=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},D.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},D.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.messageSetWireFormat&&e.hasOwnProperty("messageSetWireFormat")&&"boolean"!=typeof e.messageSetWireFormat)return"messageSetWireFormat: boolean expected";if(null!=e.noStandardDescriptorAccessor&&e.hasOwnProperty("noStandardDescriptorAccessor")&&"boolean"!=typeof e.noStandardDescriptorAccessor)return"noStandardDescriptorAccessor: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.mapEntry&&e.hasOwnProperty("mapEntry")&&"boolean"!=typeof e.mapEntry)return"mapEntry: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.ctype=e.int32();break;case 2:o.packed=e.bool();break;case 6:o.jstype=e.int32();break;case 5:o.lazy=e.bool();break;case 3:o.deprecated=e.bool();break;case 10:o.weak=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},T.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},T.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.ctype&&e.hasOwnProperty("ctype"))switch(e.ctype){default:return"ctype: enum value expected";case 0:case 1:case 2:}if(null!=e.packed&&e.hasOwnProperty("packed")&&"boolean"!=typeof e.packed)return"packed: boolean expected";if(null!=e.jstype&&e.hasOwnProperty("jstype"))switch(e.jstype){default:return"jstype: enum value expected";case 0:case 1:case 2:}if(null!=e.lazy&&e.hasOwnProperty("lazy")&&"boolean"!=typeof e.lazy)return"lazy: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.weak&&e.hasOwnProperty("weak")&&"boolean"!=typeof e.weak)return"weak: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},H.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},H.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.allowAlias=e.bool();break;case 3:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},E.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},E.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.allowAlias&&e.hasOwnProperty("allowAlias")&&"boolean"!=typeof e.allowAlias)return"allowAlias: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},z.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},z.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1049:o[".google.api.defaultHost"]=e.string();break;case 1050:o[".google.api.oauthScopes"]=e.string();break;default:e.skipType(7&r)}}return o},A.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},A.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 34:o.idempotencyLevel=e.int32();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1049:o[".google.longrunning.operationInfo"]=p.google.longrunning.OperationInfo.decode(e,e.uint32());break;case 72295728:o[".google.api.http"]=p.google.api.HttpRule.decode(e,e.uint32());break;case 1051:o[".google.api.methodSignature"]&&o[".google.api.methodSignature"].length||(o[".google.api.methodSignature"]=[]),o[".google.api.methodSignature"].push(e.string());break;default:e.skipType(7&r)}}return o},N.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},N.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.idempotencyLevel&&e.hasOwnProperty("idempotencyLevel"))switch(e.idempotencyLevel){default:return"idempotencyLevel: enum value expected";case 0:case 1:case 2:}if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.name&&o.name.length||(o.name=[]),o.name.push(p.google.protobuf.UninterpretedOption.NamePart.decode(e,e.uint32()));break;case 3:o.identifierValue=e.string();break;case 4:o.positiveIntValue=e.uint64();break;case 5:o.negativeIntValue=e.int64();break;case 6:o.doubleValue=e.double();break;case 7:o.stringValue=e.bytes();break;case 8:o.aggregateValue=e.string();break;default:e.skipType(7&r)}}return o},I.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},I.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")){if(!Array.isArray(e.name))return"name: array expected";for(var t=0;t>>0,e.positiveIntValue.high>>>0).toNumber(!0))),null!=e.negativeIntValue&&(i.Long?(t.negativeIntValue=i.Long.fromValue(e.negativeIntValue)).unsigned=!1:"string"==typeof e.negativeIntValue?t.negativeIntValue=parseInt(e.negativeIntValue,10):"number"==typeof e.negativeIntValue?t.negativeIntValue=e.negativeIntValue:"object"==typeof e.negativeIntValue&&(t.negativeIntValue=new i.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber())),null!=e.doubleValue&&(t.doubleValue=Number(e.doubleValue)),null!=e.stringValue&&("string"==typeof e.stringValue?i.base64.decode(e.stringValue,t.stringValue=i.newBuffer(i.base64.length(e.stringValue)),0):e.stringValue.length&&(t.stringValue=e.stringValue)),null!=e.aggregateValue&&(t.aggregateValue=String(e.aggregateValue)),t},I.toObject=function(e,t){var n,o={};if(((t=t||{}).arrays||t.defaults)&&(o.name=[]),t.defaults&&(o.identifierValue="",i.Long?(n=new i.Long(0,0,!0),o.positiveIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.positiveIntValue=t.longs===String?"0":0,i.Long?(n=new i.Long(0,0,!1),o.negativeIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.negativeIntValue=t.longs===String?"0":0,o.doubleValue=0,t.bytes===String?o.stringValue="":(o.stringValue=[],t.bytes!==Array&&(o.stringValue=i.newBuffer(o.stringValue))),o.aggregateValue=""),e.name&&e.name.length){o.name=[];for(var r=0;r>>0,e.positiveIntValue.high>>>0).toNumber(!0):e.positiveIntValue),null!=e.negativeIntValue&&e.hasOwnProperty("negativeIntValue")&&("number"==typeof e.negativeIntValue?o.negativeIntValue=t.longs===String?String(e.negativeIntValue):e.negativeIntValue:o.negativeIntValue=t.longs===String?i.Long.prototype.toString.call(e.negativeIntValue):t.longs===Number?new i.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber():e.negativeIntValue),null!=e.doubleValue&&e.hasOwnProperty("doubleValue")&&(o.doubleValue=t.json&&!isFinite(e.doubleValue)?String(e.doubleValue):e.doubleValue),null!=e.stringValue&&e.hasOwnProperty("stringValue")&&(o.stringValue=t.bytes===String?i.base64.encode(e.stringValue,0,e.stringValue.length):t.bytes===Array?Array.prototype.slice.call(e.stringValue):e.stringValue),null!=e.aggregateValue&&e.hasOwnProperty("aggregateValue")&&(o.aggregateValue=e.aggregateValue),o},I.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},I.NamePart=(q.prototype.namePart="",q.prototype.isExtension=!1,q.create=function(e){return new q(e)},q.encode=function(e,t){return(t=t||r.create()).uint32(10).string(e.namePart),t.uint32(16).bool(e.isExtension),t},q.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},q.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.protobuf.UninterpretedOption.NamePart;e.pos>>3){case 1:o.namePart=e.string();break;case 2:o.isExtension=e.bool();break;default:e.skipType(7&r)}}if(!o.hasOwnProperty("namePart"))throw i.ProtocolError("missing required 'namePart'",{instance:o});if(o.hasOwnProperty("isExtension"))return o;throw i.ProtocolError("missing required 'isExtension'",{instance:o})},q.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},q.verify=function(e){return"object"!=typeof e||null===e?"object expected":i.isString(e.namePart)?"boolean"!=typeof e.isExtension?"isExtension: boolean expected":null:"namePart: string expected"},q.fromObject=function(e){var t;return e instanceof p.google.protobuf.UninterpretedOption.NamePart?e:(t=new p.google.protobuf.UninterpretedOption.NamePart,null!=e.namePart&&(t.namePart=String(e.namePart)),null!=e.isExtension&&(t.isExtension=Boolean(e.isExtension)),t)},q.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.namePart="",n.isExtension=!1),null!=e.namePart&&e.hasOwnProperty("namePart")&&(n.namePart=e.namePart),null!=e.isExtension&&e.hasOwnProperty("isExtension")&&(n.isExtension=e.isExtension),n},q.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},q),I),t.SourceCodeInfo=(Y.prototype.location=i.emptyArray,Y.create=function(e){return new Y(e)},Y.encode=function(e,t){if(t=t||r.create(),null!=e.location&&e.location.length)for(var n=0;n>>3==1?(o.location&&o.location.length||(o.location=[]),o.location.push(p.google.protobuf.SourceCodeInfo.Location.decode(e,e.uint32()))):e.skipType(7&r)}return o},Y.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},Y.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.location&&e.hasOwnProperty("location")){if(!Array.isArray(e.location))return"location: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3==1?(o.annotation&&o.annotation.length||(o.annotation=[]),o.annotation.push(p.google.protobuf.GeneratedCodeInfo.Annotation.decode(e,e.uint32()))):e.skipType(7&r)}return o},W.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},W.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.annotation&&e.hasOwnProperty("annotation")){if(!Array.isArray(e.annotation))return"annotation: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.type_url=e.string();break;case 2:o.value=e.bytes();break;default:e.skipType(7&r)}}return o},X.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},X.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.type_url&&e.hasOwnProperty("type_url")&&!i.isString(e.type_url)?"type_url: string expected":null!=e.value&&e.hasOwnProperty("value")&&!(e.value&&"number"==typeof e.value.length||i.isString(e.value))?"value: buffer expected":null},X.fromObject=function(e){var t;return e instanceof p.google.protobuf.Any?e:(t=new p.google.protobuf.Any,null!=e.type_url&&(t.type_url=String(e.type_url)),null!=e.value&&("string"==typeof e.value?i.base64.decode(e.value,t.value=i.newBuffer(i.base64.length(e.value)),0):e.value.length&&(t.value=e.value)),t)},X.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.type_url="",t.bytes===String?n.value="":(n.value=[],t.bytes!==Array&&(n.value=i.newBuffer(n.value)))),null!=e.type_url&&e.hasOwnProperty("type_url")&&(n.type_url=e.type_url),null!=e.value&&e.hasOwnProperty("value")&&(n.value=t.bytes===String?i.base64.encode(e.value,0,e.value.length):t.bytes===Array?Array.prototype.slice.call(e.value):e.value),n},X.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},X),t.Duration=(K.prototype.seconds=i.Long?i.Long.fromBits(0,0,!1):0,K.prototype.nanos=0,K.create=function(e){return new K(e)},K.encode=function(e,t){return t=t||r.create(),null!=e.seconds&&Object.hasOwnProperty.call(e,"seconds")&&t.uint32(8).int64(e.seconds),null!=e.nanos&&Object.hasOwnProperty.call(e,"nanos")&&t.uint32(16).int32(e.nanos),t},K.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},K.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.protobuf.Duration;e.pos>>3){case 1:o.seconds=e.int64();break;case 2:o.nanos=e.int32();break;default:e.skipType(7&r)}}return o},K.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},K.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.seconds&&e.hasOwnProperty("seconds")&&!(i.isInteger(e.seconds)||e.seconds&&i.isInteger(e.seconds.low)&&i.isInteger(e.seconds.high))?"seconds: integer|Long expected":null!=e.nanos&&e.hasOwnProperty("nanos")&&!i.isInteger(e.nanos)?"nanos: integer expected":null},K.fromObject=function(e){var t;return e instanceof p.google.protobuf.Duration?e:(t=new p.google.protobuf.Duration,null!=e.seconds&&(i.Long?(t.seconds=i.Long.fromValue(e.seconds)).unsigned=!1:"string"==typeof e.seconds?t.seconds=parseInt(e.seconds,10):"number"==typeof e.seconds?t.seconds=e.seconds:"object"==typeof e.seconds&&(t.seconds=new i.LongBits(e.seconds.low>>>0,e.seconds.high>>>0).toNumber())),null!=e.nanos&&(t.nanos=0|e.nanos),t)},K.toObject=function(e,t){var n,o={};return(t=t||{}).defaults&&(i.Long?(n=new i.Long(0,0,!1),o.seconds=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.seconds=t.longs===String?"0":0,o.nanos=0),null!=e.seconds&&e.hasOwnProperty("seconds")&&("number"==typeof e.seconds?o.seconds=t.longs===String?String(e.seconds):e.seconds:o.seconds=t.longs===String?i.Long.prototype.toString.call(e.seconds):t.longs===Number?new i.LongBits(e.seconds.low>>>0,e.seconds.high>>>0).toNumber():e.seconds),null!=e.nanos&&e.hasOwnProperty("nanos")&&(o.nanos=e.nanos),o},K.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},K),t.Empty=(Q.create=function(e){return new Q(e)},Q.encode=function(e,t){return t=t||r.create()},Q.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},Q.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,t=new p.google.protobuf.Empty;e.pos>>3){case 1:o.code=e.int32();break;case 2:o.message=e.string();break;case 3:o.details&&o.details.length||(o.details=[]),o.details.push(p.google.protobuf.Any.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},V.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},V.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.code&&e.hasOwnProperty("code")&&!i.isInteger(e.code))return"code: integer expected";if(null!=e.message&&e.hasOwnProperty("message")&&!i.isString(e.message))return"message: string expected";if(null!=e.details&&e.hasOwnProperty("details")){if(!Array.isArray(e.details))return"details: array expected";for(var t=0;t { +/***/ 55398: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.createAPICaller = createAPICaller; +const normalApiCaller_1 = __nccwpck_require__(56513); +function createAPICaller(settings, descriptor) { + if (!descriptor) { + return new normalApiCaller_1.NormalApiCaller(); + } + return descriptor.getApiCaller(settings); +} +//# sourceMappingURL=apiCaller.js.map /***/ }), -/***/ 28006: +/***/ 41283: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(197), exports); -tslib_1.__exportStar(__nccwpck_require__(7545), exports); -tslib_1.__exportStar(__nccwpck_require__(49123), exports); -tslib_1.__exportStar(__nccwpck_require__(84476), exports); - +exports.BundleApiCaller = void 0; +const call_1 = __nccwpck_require__(53047); +const googleError_1 = __nccwpck_require__(6634); +/** + * An implementation of APICaller for bundled calls. + * Uses BundleExecutor to do bundling. + */ +class BundleApiCaller { + constructor(bundler) { + this.bundler = bundler; + } + init(callback) { + if (callback) { + return new call_1.OngoingCall(callback); + } + return new call_1.OngoingCallPromise(); + } + wrap(func) { + return func; + } + call(apiCall, argument, settings, status) { + if (!settings.isBundling) { + throw new googleError_1.GoogleError('Bundling enabled with no isBundling!'); + } + status.call((argument, callback) => { + this.bundler.schedule(apiCall, argument, callback); + return status; + }, argument); + } + fail(canceller, err) { + canceller.callback(err); + } + result(canceller) { + return canceller.promise; + } +} +exports.BundleApiCaller = BundleApiCaller; +//# sourceMappingURL=bundleApiCaller.js.map /***/ }), -/***/ 84476: -/***/ ((__unused_webpack_module, exports) => { +/***/ 94847: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.BundleDescriptor = void 0; +const normalApiCaller_1 = __nccwpck_require__(56513); +const bundleApiCaller_1 = __nccwpck_require__(41283); +const bundleExecutor_1 = __nccwpck_require__(59491); +const util_1 = __nccwpck_require__(26969); +/** + * A descriptor for calls that can be bundled into one call. + */ +class BundleDescriptor { + /** + * Describes the structure of bundled call. + * + * requestDiscriminatorFields may include '.' as a separator, which is used to + * indicate object traversal. This allows fields in nested objects to be used + * to determine what request to bundle. + * + * @property {String} bundledField + * @property {String} requestDiscriminatorFields + * @property {String} subresponseField + * @property {Function} byteLengthFunction + * + * @param {String} bundledField - the repeated field in the request message + * that will have its elements aggregated by bundling. + * @param {String} requestDiscriminatorFields - a list of fields in the + * target request message class that are used to detemrine which request + * messages should be bundled together. + * @param {String} subresponseField - an optional field, when present it + * indicates the field in the response message that should be used to + * demultiplex the response into multiple response messages. + * @param {Function} byteLengthFunction - a function to obtain the byte + * length to be consumed for the bundled field messages. Because Node.JS + * protobuf.js/gRPC uses builtin Objects for the user-visible data and + * internally they are encoded/decoded in protobuf manner, this function + * is actually necessary to calculate the byte length. + * @constructor + */ + constructor(bundledField, requestDiscriminatorFields, subresponseField, byteLengthFunction) { + if (!byteLengthFunction && typeof subresponseField === 'function') { + byteLengthFunction = subresponseField; + subresponseField = null; + } + this.bundledField = bundledField; + this.requestDiscriminatorFields = + requestDiscriminatorFields.map(util_1.toCamelCase); + this.subresponseField = subresponseField; + this.byteLengthFunction = byteLengthFunction; + } + getApiCaller(settings) { + if (settings.isBundling === false) { + return new normalApiCaller_1.NormalApiCaller(); + } + return new bundleApiCaller_1.BundleApiCaller(new bundleExecutor_1.BundleExecutor(settings.bundleOptions, this)); + } +} +exports.BundleDescriptor = BundleDescriptor; +//# sourceMappingURL=bundleDescriptor.js.map /***/ }), -/***/ 55756: +/***/ 59491: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(74075), exports); -tslib_1.__exportStar(__nccwpck_require__(11239), exports); -tslib_1.__exportStar(__nccwpck_require__(63274), exports); -tslib_1.__exportStar(__nccwpck_require__(78340), exports); -tslib_1.__exportStar(__nccwpck_require__(4744), exports); -tslib_1.__exportStar(__nccwpck_require__(68270), exports); -tslib_1.__exportStar(__nccwpck_require__(57628), exports); -tslib_1.__exportStar(__nccwpck_require__(89035), exports); -tslib_1.__exportStar(__nccwpck_require__(7225), exports); -tslib_1.__exportStar(__nccwpck_require__(54126), exports); -tslib_1.__exportStar(__nccwpck_require__(21550), exports); -tslib_1.__exportStar(__nccwpck_require__(88508), exports); -tslib_1.__exportStar(__nccwpck_require__(47447), exports); -tslib_1.__exportStar(__nccwpck_require__(18883), exports); -tslib_1.__exportStar(__nccwpck_require__(12842), exports); -tslib_1.__exportStar(__nccwpck_require__(28006), exports); -tslib_1.__exportStar(__nccwpck_require__(52866), exports); -tslib_1.__exportStar(__nccwpck_require__(17756), exports); -tslib_1.__exportStar(__nccwpck_require__(45489), exports); -tslib_1.__exportStar(__nccwpck_require__(26524), exports); -tslib_1.__exportStar(__nccwpck_require__(14603), exports); -tslib_1.__exportStar(__nccwpck_require__(83752), exports); -tslib_1.__exportStar(__nccwpck_require__(30774), exports); -tslib_1.__exportStar(__nccwpck_require__(14089), exports); -tslib_1.__exportStar(__nccwpck_require__(45678), exports); -tslib_1.__exportStar(__nccwpck_require__(69926), exports); -tslib_1.__exportStar(__nccwpck_require__(9945), exports); -tslib_1.__exportStar(__nccwpck_require__(28564), exports); -tslib_1.__exportStar(__nccwpck_require__(61285), exports); -tslib_1.__exportStar(__nccwpck_require__(50364), exports); -tslib_1.__exportStar(__nccwpck_require__(69304), exports); -tslib_1.__exportStar(__nccwpck_require__(46098), exports); -tslib_1.__exportStar(__nccwpck_require__(10375), exports); -tslib_1.__exportStar(__nccwpck_require__(66894), exports); -tslib_1.__exportStar(__nccwpck_require__(57887), exports); -tslib_1.__exportStar(__nccwpck_require__(66255), exports); - +exports.BundleExecutor = void 0; +const status_1 = __nccwpck_require__(53501); +const googleError_1 = __nccwpck_require__(6634); +const warnings_1 = __nccwpck_require__(16328); +const bundlingUtils_1 = __nccwpck_require__(16255); +const task_1 = __nccwpck_require__(43302); +function noop() { } +/** + * BundleExecutor stores several timers for each bundle (calls are bundled based + * on the options passed, each bundle has unique ID that is calculated based on + * field values). Each timer fires and sends a call after certain amount of + * time, and if a new request comes to the same bundle, the timer can be + * restarted. + */ +class BundleExecutor { + /** + * Organizes requests for an api service that requires to bundle them. + * + * @param {BundleOptions} bundleOptions - configures strategy this instance + * uses when executing bundled functions. + * @param {BundleDescriptor} bundleDescriptor - the description of the bundling. + * @constructor + */ + constructor(bundleOptions, bundleDescriptor) { + this._options = bundleOptions; + this._descriptor = bundleDescriptor; + this._tasks = {}; + this._timers = {}; + this._invocations = {}; + this._invocationId = 0; + } + /** + * Schedule a method call. + * + * @param {function} apiCall - the function for an API call. + * @param {Object} request - the request object to be bundled with others. + * @param {APICallback} callback - the callback to be called when the method finished. + * @return {function()} - the function to cancel the scheduled invocation. + */ + schedule(apiCall, request, callback) { + const bundleId = (0, bundlingUtils_1.computeBundleId)(request, this._descriptor.requestDiscriminatorFields); + callback = (callback || noop); + if (bundleId === undefined) { + (0, warnings_1.warn)('bundling_schedule_bundleid_undefined', 'The request does not have enough information for request bundling. ' + + `Invoking immediately. Request: ${JSON.stringify(request)} ` + + `discriminator fields: ${this._descriptor.requestDiscriminatorFields}`); + return apiCall(request, callback); + } + if (request[this._descriptor.bundledField] === undefined) { + (0, warnings_1.warn)('bundling_no_bundled_field', `Request does not contain field ${this._descriptor.bundledField} that must present for bundling. ` + + `Invoking immediately. Request: ${JSON.stringify(request)}`); + return apiCall(request, callback); + } + if (!(bundleId in this._tasks)) { + this._tasks[bundleId] = new task_1.Task(apiCall, request, this._descriptor.bundledField, this._descriptor.subresponseField); + } + let task = this._tasks[bundleId]; + callback.id = String(this._invocationId++); + this._invocations[callback.id] = bundleId; + const bundledField = request[this._descriptor.bundledField]; + const elementCount = bundledField.length; + let requestBytes = 0; + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + bundledField.forEach(obj => { + requestBytes += this._descriptor.byteLengthFunction(obj); + }); + const countLimit = this._options.elementCountLimit || 0; + const byteLimit = this._options.requestByteLimit || 0; + if ((countLimit > 0 && elementCount > countLimit) || + (byteLimit > 0 && requestBytes >= byteLimit)) { + let message; + if (countLimit > 0 && elementCount > countLimit) { + message = + 'The number of elements ' + + elementCount + + ' exceeds the limit ' + + this._options.elementCountLimit; + } + else { + message = + 'The required bytes ' + + requestBytes + + ' exceeds the limit ' + + this._options.requestByteLimit; + } + const error = new googleError_1.GoogleError(message); + error.code = status_1.Status.INVALID_ARGUMENT; + callback(error); + return { + cancel: noop, + }; + } + const existingCount = task.getElementCount(); + const existingBytes = task.getRequestByteSize(); + if ((countLimit > 0 && elementCount + existingCount >= countLimit) || + (byteLimit > 0 && requestBytes + existingBytes >= byteLimit)) { + this._runNow(bundleId); + this._tasks[bundleId] = new task_1.Task(apiCall, request, this._descriptor.bundledField, this._descriptor.subresponseField); + task = this._tasks[bundleId]; + } + task.extend(bundledField, requestBytes, callback); + const ret = { + cancel() { + self._cancel(callback.id); + }, + }; + const countThreshold = this._options.elementCountThreshold || 0; + const sizeThreshold = this._options.requestByteThreshold || 0; + if ((countThreshold > 0 && task.getElementCount() >= countThreshold) || + (sizeThreshold > 0 && task.getRequestByteSize() >= sizeThreshold)) { + this._runNow(bundleId); + return ret; + } + if (!(bundleId in this._timers) && this._options.delayThreshold > 0) { + this._timers[bundleId] = setTimeout(() => { + delete this._timers[bundleId]; + this._runNow(bundleId); + }, this._options.delayThreshold); + } + return ret; + } + /** + * Clears scheduled timeout if it exists. + * + * @param {String} bundleId - the id for the task whose timeout needs to be + * cleared. + * @private + */ + _maybeClearTimeout(bundleId) { + if (bundleId in this._timers) { + const timerId = this._timers[bundleId]; + delete this._timers[bundleId]; + clearTimeout(timerId); + } + } + /** + * Cancels an event. + * + * @param {String} id - The id for the event in the task. + * @private + */ + _cancel(id) { + if (!(id in this._invocations)) { + return; + } + const bundleId = this._invocations[id]; + if (!(bundleId in this._tasks)) { + return; + } + const task = this._tasks[bundleId]; + delete this._invocations[id]; + if (task.cancel(id)) { + this._maybeClearTimeout(bundleId); + delete this._tasks[bundleId]; + } + } + /** + * Invokes a task. + * + * @param {String} bundleId - The id for the task. + * @private + */ + _runNow(bundleId) { + if (!(bundleId in this._tasks)) { + (0, warnings_1.warn)('bundle_runnow_bundleid_unknown', `No such bundleid: ${bundleId}`); + return; + } + this._maybeClearTimeout(bundleId); + const task = this._tasks[bundleId]; + delete this._tasks[bundleId]; + task.run().forEach(id => { + delete this._invocations[id]; + }); + } +} +exports.BundleExecutor = BundleExecutor; +//# sourceMappingURL=bundleExecutor.js.map /***/ }), -/***/ 52866: +/***/ 16255: /***/ ((__unused_webpack_module, exports) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.computeBundleId = computeBundleId; +/** + * Compute the identifier of the `obj`. The objects of the same ID + * will be bundled together. + * + * @param {RequestType} obj - The request object. + * @param {String[]} discriminatorFields - The array of field names. + * A field name may include '.' as a separator, which is used to + * indicate object traversal. + * @return {String|undefined} - the identifier string, or undefined if any + * discriminator fields do not exist. + */ +function computeBundleId(obj, discriminatorFields) { + const ids = []; + let hasIds = false; + for (const field of discriminatorFields) { + const id = at(obj, field); + if (id === undefined) { + ids.push(null); + } + else { + hasIds = true; + ids.push(id); + } + } + if (!hasIds) { + return undefined; + } + return JSON.stringify(ids); +} +/** + * Given an object field path that may contain dots, dig into the obj and find + * the value at the given path. + * @example + * const obj = { + * a: { + * b: 5 + * } + * } + * const id = at(obj, 'a.b'); + * // id = 5 + * @param field Path to the property with `.` notation + * @param obj The object to traverse + * @returns the value at the given path + */ +function at(obj, field) { + const pathParts = field.split('.'); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let currentObj = obj; + for (const pathPart of pathParts) { + currentObj = currentObj === null || currentObj === void 0 ? void 0 : currentObj[pathPart]; + } + return currentObj; +} +//# sourceMappingURL=bundlingUtils.js.map /***/ }), -/***/ 17756: -/***/ ((__unused_webpack_module, exports) => { +/***/ 43302: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SMITHY_CONTEXT_KEY = void 0; -exports.SMITHY_CONTEXT_KEY = "__smithy_context"; - +exports.Task = void 0; +exports.deepCopyForResponse = deepCopyForResponse; +const status_1 = __nccwpck_require__(53501); +const googleError_1 = __nccwpck_require__(6634); +/** + * Creates a deep copy of the object with the consideration of subresponse + * fields for bundling. + * + * @param {Object} obj - The source object. + * @param {Object?} subresponseInfo - The information to copy the subset of + * the field for the response. Do nothing if it's null. + * @param {String} subresponseInfo.field - The field name. + * @param {number} subresponseInfo.start - The offset where the copying + * element should starts with. + * @param {number} subresponseInfo.end - The ending index where the copying + * region of the elements ends. + * @return {Object} The copied object. + * @private + */ +function deepCopyForResponse( +// eslint-disable-next-line @typescript-eslint/no-explicit-any +obj, subresponseInfo) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let result; + if (obj === null) { + return null; + } + if (obj === undefined) { + return undefined; + } + if (Array.isArray(obj)) { + result = []; + obj.forEach(element => { + result.push(deepCopyForResponse(element, null)); + }); + return result; + } + // Some objects (such as ByteBuffer) have copy method. + if (obj.copy !== undefined) { + return obj.copy(); + } + // ArrayBuffer should be copied through slice(). + if (obj instanceof ArrayBuffer) { + return obj.slice(0); + } + if (typeof obj === 'object') { + result = {}; + Object.keys(obj).forEach(key => { + if (subresponseInfo && + key === subresponseInfo.field && + Array.isArray(obj[key])) { + // Note that subresponses are not deep-copied. This is safe because + // those subresponses are not shared among callbacks. + result[key] = obj[key].slice(subresponseInfo.start, subresponseInfo.end); + } + else { + result[key] = deepCopyForResponse(obj[key], null); + } + }); + return result; + } + return obj; +} +class Task { + /** + * A task coordinates the execution of a single bundle. + * + * @param {function} apiCall - The function to conduct calling API. + * @param {Object} bundlingRequest - The base request object to be used + * for the actual API call. + * @param {string} bundledField - The name of the field in bundlingRequest + * to be bundled. + * @param {string=} subresponseField - The name of the field in the response + * to be passed to the callback. + * @constructor + * @private + */ + constructor(apiCall, bundlingRequest, bundledField, subresponseField) { + this._apiCall = apiCall; + this._request = bundlingRequest; + this._bundledField = bundledField; + this._subresponseField = subresponseField; + this._data = []; + } + /** + * Returns the number of elements in a task. + * @return {number} The number of elements. + */ + getElementCount() { + let count = 0; + for (let i = 0; i < this._data.length; ++i) { + count += this._data[i].elements.length; + } + return count; + } + /** + * Returns the total byte size of the elements in a task. + * @return {number} The byte size. + */ + getRequestByteSize() { + let size = 0; + for (let i = 0; i < this._data.length; ++i) { + size += this._data[i].bytes; + } + return size; + } + /** + * Invokes the actual API call with current elements. + * @return {string[]} - the list of ids for invocations to be run. + */ + run() { + if (this._data.length === 0) { + return []; + } + const request = this._request; + const elements = []; + const ids = []; + for (let i = 0; i < this._data.length; ++i) { + elements.push(...this._data[i].elements); + ids.push(this._data[i].callback.id); + } + request[this._bundledField] = elements; + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + this.callCanceller = this._apiCall(request, (err, response) => { + const responses = []; + if (err) { + self._data.forEach(() => { + responses.push(undefined); + }); + } + else { + let subresponseInfo = null; + if (self._subresponseField) { + subresponseInfo = { + field: self._subresponseField, + start: 0, + }; + } + self._data.forEach(data => { + if (subresponseInfo) { + subresponseInfo.end = + subresponseInfo.start + data.elements.length; + } + responses.push(deepCopyForResponse(response, subresponseInfo)); + if (subresponseInfo) { + subresponseInfo.start = subresponseInfo.end; + } + }); + } + for (let i = 0; i < self._data.length; ++i) { + if (self._data[i].cancelled) { + const error = new googleError_1.GoogleError('cancelled'); + error.code = status_1.Status.CANCELLED; + self._data[i].callback(error); + } + else { + self._data[i].callback(err, responses[i]); + } + } + }); + return ids; + } + /** + * Appends the list of elements into the task. + * @param {Object[]} elements - the new list of elements. + * @param {number} bytes - the byte size required to encode elements in the API. + * @param {APICallback} callback - the callback of the method call. + */ + extend(elements, bytes, callback) { + this._data.push({ + elements, + bytes, + callback, + }); + } + /** + * Cancels a part of elements. + * @param {string} id - The identifier of the part of elements. + * @return {boolean} Whether the entire task will be canceled or not. + */ + cancel(id) { + if (this.callCanceller) { + let allCancelled = true; + this._data.forEach(d => { + if (d.callback.id === id) { + d.cancelled = true; + } + if (!d.cancelled) { + allCancelled = false; + } + }); + if (allCancelled) { + this.callCanceller.cancel(); + } + return allCancelled; + } + for (let i = 0; i < this._data.length; ++i) { + if (this._data[i].callback.id === id) { + const error = new googleError_1.GoogleError('cancelled'); + error.code = status_1.Status.CANCELLED; + this._data[i].callback(error); + this._data.splice(i, 1); + break; + } + } + return this._data.length === 0; + } +} +exports.Task = Task; +//# sourceMappingURL=task.js.map /***/ }), -/***/ 45489: -/***/ ((__unused_webpack_module, exports) => { +/***/ 53047: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.OngoingCallPromise = exports.OngoingCall = void 0; +const status_1 = __nccwpck_require__(53501); +const googleError_1 = __nccwpck_require__(6634); +class OngoingCall { + /** + * OngoingCall manages callback, API calls, and cancellation + * of the API calls. + * @param {APICallback=} callback + * The callback to be called asynchronously when the API call + * finishes. + * @constructor + * @property {APICallback} callback + * The callback function to be called. + * @private + */ + constructor(callback) { + this.callback = callback; + this.completed = false; + } + /** + * Cancels the ongoing promise. + */ + cancel() { + if (this.completed) { + return; + } + this.completed = true; + if (this.cancelFunc) { + this.cancelFunc(); + } + else { + const error = new googleError_1.GoogleError('cancelled'); + error.code = status_1.Status.CANCELLED; + this.callback(error); + } + } + /** + * Call calls the specified function. Result will be used to fulfill + * the promise. + * + * @param {SimpleCallbackFunction} func + * A function for an API call. + * @param {Object} argument + * A request object. + */ + call(func, argument) { + if (this.completed) { + return; + } + const canceller = func(argument, (err, response, next, rawResponse) => { + this.completed = true; + setImmediate(this.callback, err, response, next, rawResponse); + }); + if (canceller instanceof Promise) { + canceller.catch(err => { + setImmediate(this.callback, new googleError_1.GoogleError(err), null, null, null); + }); + } + this.cancelFunc = () => canceller.cancel(); + } +} +exports.OngoingCall = OngoingCall; +class OngoingCallPromise extends OngoingCall { + /** + * GaxPromise is GRPCCallbackWrapper, but it holds a promise when + * the API call finishes. + * @constructor + * @private + */ + constructor() { + let resolveCallback; + let rejectCallback; + const callback = (err, response, next, rawResponse) => { + if (err) { + // If gRPC metadata exist, parsed google.rpc.status details. + if (err.metadata) { + rejectCallback(googleError_1.GoogleError.parseGRPCStatusDetails(err)); + } + else { + rejectCallback(err); + } + } + else if (response !== undefined) { + resolveCallback([response, next || null, rawResponse || null]); + } + else { + throw new googleError_1.GoogleError('Neither error nor response are defined'); + } + }; + const promise = new Promise((resolve, reject) => { + resolveCallback = resolve; + rejectCallback = reject; + }); + super(callback); + this.promise = promise; + this.promise.cancel = () => { + this.cancel(); + }; + } +} +exports.OngoingCallPromise = OngoingCallPromise; +//# sourceMappingURL=call.js.map /***/ }), -/***/ 26524: -/***/ ((__unused_webpack_module, exports) => { +/***/ 79712: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IniSectionType = void 0; -var IniSectionType; -(function (IniSectionType) { - IniSectionType["PROFILE"] = "profile"; - IniSectionType["SSO_SESSION"] = "sso-session"; - IniSectionType["SERVICES"] = "services"; -})(IniSectionType = exports.IniSectionType || (exports.IniSectionType = {})); - +exports.createApiCall = createApiCall; +/** + * Provides function wrappers that implement page streaming and retrying. + */ +const apiCaller_1 = __nccwpck_require__(55398); +const gax_1 = __nccwpck_require__(80978); +const retries_1 = __nccwpck_require__(69742); +const timeout_1 = __nccwpck_require__(4531); +const streamingApiCaller_1 = __nccwpck_require__(32936); +const warnings_1 = __nccwpck_require__(16328); +/** + * Converts an rpc call into an API call governed by the settings. + * + * In typical usage, `func` will be a promise to a callable used to make an rpc + * request. This will mostly likely be a bound method from a request stub used + * to make an rpc call. It is not a direct function but a Promise instance, + * because of its asynchronism (typically, obtaining the auth information). + * + * The result is a function which manages the API call with the given settings + * and the options on the invocation. + * + * @param {Promise|GRPCCall} func - is either a promise to be used to make + * a bare RPC call, or just a bare RPC call. + * @param {CallSettings} settings - provides the settings for this call + * @param {Descriptor} descriptor - optionally specify the descriptor for + * the method call. + * @return {GaxCall} func - a bound method on a request stub used + * to make an rpc call. + */ +function createApiCall(func, settings, descriptor, +// eslint-disable-next-line @typescript-eslint/no-unused-vars +_fallback // unused here, used in fallback.ts implementation +) { + // we want to be able to accept both promise resolving to a function and a + // function. Currently client librares are only calling this method with a + // promise, but it will change. + const funcPromise = typeof func === 'function' ? Promise.resolve(func) : func; + // the following apiCaller will be used for all calls of this function... + const apiCaller = (0, apiCaller_1.createAPICaller)(settings, descriptor); + return (request, callOptions, callback) => { + var _a, _b; + let currentApiCaller = apiCaller; + let thisSettings; + if (currentApiCaller instanceof streamingApiCaller_1.StreamingApiCaller) { + const gaxStreamingRetries = (_b = (_a = currentApiCaller.descriptor) === null || _a === void 0 ? void 0 : _a.gaxStreamingRetries) !== null && _b !== void 0 ? _b : false; + // If Gax streaming retries are enabled, check settings passed at call time and convert parameters if needed + const convertedRetryOptions = (0, gax_1.convertRetryOptions)(callOptions, gaxStreamingRetries); + thisSettings = settings.merge(convertedRetryOptions); + } + else { + thisSettings = settings.merge(callOptions); + } + // special case: if bundling is disabled for this one call, + // use default API caller instead + if (settings.isBundling && !thisSettings.isBundling) { + currentApiCaller = (0, apiCaller_1.createAPICaller)(settings, undefined); + } + const ongoingCall = currentApiCaller.init(callback); + funcPromise + .then((func) => { + var _a, _b; + var _c; + // Initially, the function is just what gRPC server stub contains. + func = currentApiCaller.wrap(func); + const streaming = (_a = currentApiCaller.descriptor) === null || _a === void 0 ? void 0 : _a.streaming; + const retry = thisSettings.retry; + if (streaming && retry) { + if (retry.retryCodes.length > 0 && retry.shouldRetryFn) { + (0, warnings_1.warn)('either_retrycodes_or_shouldretryfn', 'Only one of retryCodes or shouldRetryFn may be defined. Ignoring retryCodes.'); + retry.retryCodes = []; + } + if (!currentApiCaller.descriptor + .gaxStreamingRetries && + retry.getResumptionRequestFn) { + throw new Error('getResumptionRequestFn can only be used when gaxStreamingRetries is set to true.'); + } + } + if (!streaming && retry) { + if (retry.shouldRetryFn) { + throw new Error('Using a function to determine retry eligibility is only supported with server streaming calls'); + } + if (retry.getResumptionRequestFn) { + throw new Error('Resumption strategy can only be used with server streaming retries'); + } + if (retry.retryCodes && retry.retryCodes.length > 0) { + (_b = (_c = retry.backoffSettings).initialRpcTimeoutMillis) !== null && _b !== void 0 ? _b : (_c.initialRpcTimeoutMillis = thisSettings.timeout); + return (0, retries_1.retryable)(func, thisSettings.retry, thisSettings.otherArgs, thisSettings.apiName); + } + } + return (0, timeout_1.addTimeoutArg)(func, thisSettings.timeout, thisSettings.otherArgs); + }) + .then((apiCall) => { + // After adding retries / timeouts, the call function becomes simpler: + // it only accepts request and callback. + currentApiCaller.call(apiCall, request, thisSettings, ongoingCall); + }) + .catch(err => { + currentApiCaller.fail(ongoingCall, err); + }); + // Calls normally return a "cancellable promise" that can be used to `await` for the actual result, + // or to cancel the ongoing call. + return currentApiCaller.result(ongoingCall); + }; +} +//# sourceMappingURL=createApiCall.js.map /***/ }), -/***/ 14603: -/***/ ((__unused_webpack_module, exports) => { +/***/ 13627: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.BundleDescriptor = exports.StreamDescriptor = exports.PageDescriptor = exports.LongrunningDescriptor = void 0; +var longRunningDescriptor_1 = __nccwpck_require__(8096); +Object.defineProperty(exports, "LongrunningDescriptor", ({ enumerable: true, get: function () { return longRunningDescriptor_1.LongRunningDescriptor; } })); +var pageDescriptor_1 = __nccwpck_require__(71172); +Object.defineProperty(exports, "PageDescriptor", ({ enumerable: true, get: function () { return pageDescriptor_1.PageDescriptor; } })); +var streamDescriptor_1 = __nccwpck_require__(65880); +Object.defineProperty(exports, "StreamDescriptor", ({ enumerable: true, get: function () { return streamDescriptor_1.StreamDescriptor; } })); +var bundleDescriptor_1 = __nccwpck_require__(94847); +Object.defineProperty(exports, "BundleDescriptor", ({ enumerable: true, get: function () { return bundleDescriptor_1.BundleDescriptor; } })); +//# sourceMappingURL=descriptor.js.map /***/ }), -/***/ 83752: -/***/ ((__unused_webpack_module, exports) => { +/***/ 90418: +/***/ ((module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fallback = exports.GoogleError = exports.operation = exports.Operation = exports.warn = exports.protobufMinimal = exports.protobuf = exports.LocationProtos = exports.IamProtos = exports.operationsProtos = exports.GrpcClient = exports.defaultToObjectOptions = exports.makeUUID = exports.LocationsClient = exports.IamClient = exports.OperationsClient = exports.StreamType = exports.StreamDescriptor = exports.PageDescriptor = exports.LongrunningDescriptor = exports.BundleDescriptor = exports.version = exports.createDefaultBackoffSettings = exports.RetryOptions = exports.constructSettings = exports.CallSettings = exports.routingHeader = exports.PathTemplate = void 0; +exports.lro = lro; +exports.createApiCall = createApiCall; +const objectHash = __nccwpck_require__(24856); +const protobuf = __nccwpck_require__(85881); +exports.protobuf = protobuf; +const gax = __nccwpck_require__(80978); +const routingHeader = __nccwpck_require__(8827); +exports.routingHeader = routingHeader; +const status_1 = __nccwpck_require__(53501); +const google_auth_library_1 = __nccwpck_require__(20810); +const operationsClient_1 = __nccwpck_require__(82450); +const createApiCall_1 = __nccwpck_require__(79712); +const fallbackRest = __nccwpck_require__(21088); +const featureDetection_1 = __nccwpck_require__(11154); +const fallbackServiceStub_1 = __nccwpck_require__(38255); +const streaming_1 = __nccwpck_require__(67389); +const util_1 = __nccwpck_require__(26969); +const IamProtos = __nccwpck_require__(83555); +exports.IamProtos = IamProtos; +const LocationProtos = __nccwpck_require__(22560); +exports.LocationProtos = LocationProtos; +const operationsProtos = __nccwpck_require__(59081); +exports.operationsProtos = operationsProtos; +var pathTemplate_1 = __nccwpck_require__(20513); +Object.defineProperty(exports, "PathTemplate", ({ enumerable: true, get: function () { return pathTemplate_1.PathTemplate; } })); +var gax_1 = __nccwpck_require__(80978); +Object.defineProperty(exports, "CallSettings", ({ enumerable: true, get: function () { return gax_1.CallSettings; } })); +Object.defineProperty(exports, "constructSettings", ({ enumerable: true, get: function () { return gax_1.constructSettings; } })); +Object.defineProperty(exports, "RetryOptions", ({ enumerable: true, get: function () { return gax_1.RetryOptions; } })); +Object.defineProperty(exports, "createDefaultBackoffSettings", ({ enumerable: true, get: function () { return gax_1.createDefaultBackoffSettings; } })); +exports.version = (__nccwpck_require__(13385).version) + '-fallback'; +var descriptor_1 = __nccwpck_require__(13627); +Object.defineProperty(exports, "BundleDescriptor", ({ enumerable: true, get: function () { return descriptor_1.BundleDescriptor; } })); +Object.defineProperty(exports, "LongrunningDescriptor", ({ enumerable: true, get: function () { return descriptor_1.LongrunningDescriptor; } })); +Object.defineProperty(exports, "PageDescriptor", ({ enumerable: true, get: function () { return descriptor_1.PageDescriptor; } })); +Object.defineProperty(exports, "StreamDescriptor", ({ enumerable: true, get: function () { return descriptor_1.StreamDescriptor; } })); +var streaming_2 = __nccwpck_require__(67389); +Object.defineProperty(exports, "StreamType", ({ enumerable: true, get: function () { return streaming_2.StreamType; } })); +var operationsClient_2 = __nccwpck_require__(82450); +Object.defineProperty(exports, "OperationsClient", ({ enumerable: true, get: function () { return operationsClient_2.OperationsClient; } })); +var iamService_1 = __nccwpck_require__(28118); +Object.defineProperty(exports, "IamClient", ({ enumerable: true, get: function () { return iamService_1.IamClient; } })); +var locationService_1 = __nccwpck_require__(58591); +Object.defineProperty(exports, "LocationsClient", ({ enumerable: true, get: function () { return locationService_1.LocationsClient; } })); +var util_2 = __nccwpck_require__(26969); +Object.defineProperty(exports, "makeUUID", ({ enumerable: true, get: function () { return util_2.makeUUID; } })); +exports.defaultToObjectOptions = { + keepCase: false, + longs: String, + enums: String, + defaults: true, + oneofs: true, +}; +const CLIENT_VERSION_HEADER = 'x-goog-api-client'; +class GrpcClient { + /** + * In rare cases users might need to deallocate all memory consumed by loaded protos. + * This method will delete the proto cache content. + */ + static clearProtoCache() { + GrpcClient.protoCache.clear(); + } + /** + * gRPC-fallback version of GrpcClient + * Implements GrpcClient API for a browser using grpc-fallback protocol (sends serialized protobuf to HTTP/1 $rpc endpoint). + * + * @param {Object=} options.auth - An instance of OAuth2Client to use in browser, or an instance of GoogleAuth from google-auth-library + * to use in Node.js. Required for browser, optional for Node.js. + * @constructor + */ + constructor(options = {}) { + var _a; + if (!(0, featureDetection_1.isNodeJS)()) { + if (!options.auth) { + throw new Error(JSON.stringify(options) + + 'You need to pass auth instance to use gRPC-fallback client in browser or other non-Node.js environments. Use OAuth2Client from google-auth-library.'); + } + this.auth = options.auth; + } + else { + this.auth = + options.auth || + new google_auth_library_1.GoogleAuth(options); + } + this.fallback = options.fallback ? true : false; + this.grpcVersion = (__nccwpck_require__(13385).version); + this.httpRules = options.httpRules; + this.numericEnums = (_a = options.numericEnums) !== null && _a !== void 0 ? _a : false; + } + /** + * gRPC-fallback version of loadProto + * Loads the protobuf root object from a JSON object created from a proto file + * @param {Object} jsonObject - A JSON version of a protofile created usin protobuf.js + * @returns {Object} Root namespace of proto JSON + */ + loadProto(jsonObject) { + const rootObject = protobuf.Root.fromJSON(jsonObject); + return rootObject; + } + loadProtoJSON(json, ignoreCache = false) { + const hash = objectHash(JSON.stringify(json)).toString(); + const cached = GrpcClient.protoCache.get(hash); + if (cached && !ignoreCache) { + return cached; + } + const root = protobuf.Root.fromJSON(json); + GrpcClient.protoCache.set(hash, root); + return root; + } + static getServiceMethods(service) { + const methods = {}; + for (const [methodName, methodObject] of Object.entries(service.methods)) { + const methodNameLowerCamelCase = (0, util_1.toLowerCamelCase)(methodName); + methods[methodNameLowerCamelCase] = methodObject; + } + return methods; + } + /** + * gRPC-fallback version of constructSettings + * A wrapper of {@link constructSettings} function under the gRPC context. + * + * Most of parameters are common among constructSettings, please take a look. + * @param {string} serviceName - The fullly-qualified name of the service. + * @param {Object} clientConfig - A dictionary of the client config. + * @param {Object} configOverrides - A dictionary of overriding configs. + * @param {Object} headers - A dictionary of additional HTTP header name to + * its value. + * @return {Object} A mapping of method names to CallSettings. + */ + constructSettings(serviceName, clientConfig, configOverrides, headers) { + function buildMetadata(abTests, moreHeaders) { + const metadata = {}; + if (!headers) { + headers = {}; + } + // Since gRPC expects each header to be an array, + // we are doing the same for fallback here. + for (const key in headers) { + metadata[key] = Array.isArray(headers[key]) + ? headers[key] + : [headers[key]]; + } + // gRPC-fallback request must have 'grpc-web/' in 'x-goog-api-client' + const clientVersions = []; + if (metadata[CLIENT_VERSION_HEADER] && + metadata[CLIENT_VERSION_HEADER][0]) { + clientVersions.push(...metadata[CLIENT_VERSION_HEADER][0].split(' ')); + } + clientVersions.push(`grpc-web/${exports.version}`); + metadata[CLIENT_VERSION_HEADER] = [clientVersions.join(' ')]; + if (!moreHeaders) { + return metadata; + } + for (const key in moreHeaders) { + if (key.toLowerCase() !== CLIENT_VERSION_HEADER) { + const value = moreHeaders[key]; + if (Array.isArray(value)) { + if (metadata[key] === undefined) { + metadata[key] = value; + } + else { + if (Array.isArray(metadata[key])) { + metadata[key].push(...value); + } + else { + throw new Error(`Can not add value ${value} to the call metadata.`); + } + } + } + else { + metadata[key] = [value]; + } + } + } + return metadata; + } + return gax.constructSettings(serviceName, clientConfig, configOverrides, status_1.Status, { metadataBuilder: buildMetadata }); + } + /** + * gRPC-fallback version of createStub + * Creates a gRPC-fallback stub with authentication headers built from supplied OAuth2Client instance + * + * @param {function} CreateStub - The constructor function of the stub. + * @param {Object} service - A protobufjs Service object (as returned by lookupService) + * @param {Object} opts - Connection options, as described below. + * @param {string} opts.servicePath - The hostname of the API endpoint service. + * @param {number} opts.port - The port of the service. + * @return {Promise} A promise which resolves to a gRPC-fallback service stub, which is a protobuf.js service stub instance modified to match the gRPC stub API + */ + async createStub(service, opts, + // For consistency with createStub in grpc.ts, customServicePath is defined: + // eslint-disable-next-line @typescript-eslint/no-unused-vars + customServicePath) { + if (!this.authClient) { + if (this.auth && 'getClient' in this.auth) { + this.authClient = (await this.auth.getClient()); + } + else if (this.auth && 'getRequestHeaders' in this.auth) { + this.authClient = this.auth; + } + } + if (!this.authClient) { + throw new Error('No authentication was provided'); + } + if (!opts.universeDomain) { + opts.universeDomain = 'googleapis.com'; + } + if (opts.universeDomain) { + const universeFromAuth = this.authClient.universeDomain; + if (universeFromAuth && opts.universeDomain !== universeFromAuth) { + throw new Error(`The configured universe domain (${opts.universeDomain}) does not match the universe domain found in the credentials (${universeFromAuth}). ` + + "If you haven't configured the universe domain explicitly, googleapis.com is the default."); + } + } + service.resolveAll(); + const methods = GrpcClient.getServiceMethods(service); + const protocol = opts.protocol || 'https'; + let servicePath = opts.servicePath; + if (!servicePath && + service.options && + service.options['(google.api.default_host)']) { + servicePath = service.options['(google.api.default_host)']; + } + if (!servicePath) { + throw new Error(`Cannot determine service API path for service ${service.name}.`); + } + let servicePort; + const match = servicePath.match(/^(.*):(\d+)$/); + if (match) { + servicePath = match[1]; + servicePort = parseInt(match[2]); + } + if (opts.port) { + servicePort = opts.port; + } + else if (!servicePort) { + servicePort = 443; + } + const encoder = fallbackRest.encodeRequest; + const decoder = fallbackRest.decodeResponse; + const serviceStub = (0, fallbackServiceStub_1.generateServiceStub)(methods, protocol, servicePath, servicePort, this.authClient, encoder, decoder, this.numericEnums); + return serviceStub; + } + /** + * Creates a 'bytelength' function for a given proto message class. + * + * See {@link BundleDescriptor} about the meaning of the return value. + * + * @param {function} message - a constructor function that is generated by + * protobuf.js. Assumes 'encoder' field in the message. + * @return {function(Object):number} - a function to compute the byte length + * for an object. + */ + static createByteLengthFunction(message) { + return gax.createByteLengthFunction(message); + } +} +exports.GrpcClient = GrpcClient; +GrpcClient.protoCache = new Map(); +/** + * gRPC-fallback version of lro + * + * @param {Object=} options.auth - An instance of google-auth-library. + * @return {Object} A OperationsClientBuilder that will return a OperationsClient + */ +function lro(options) { + options = Object.assign({ scopes: [] }, options); + if (options.protoJson) { + options = Object.assign(options, { fallback: true }); + } + const gaxGrpc = new GrpcClient(options); + return new operationsClient_1.OperationsClientBuilder(gaxGrpc, options.protoJson); +} +/** + * gRPC-fallback version of createApiCall + * + * Converts an rpc call into an API call governed by the settings. + * + * In typical usage, `func` will be a promise to a callable used to make an rpc + * request. This will mostly likely be a bound method from a request stub used + * to make an rpc call. It is not a direct function but a Promise instance, + * because of its asynchronism (typically, obtaining the auth information). + * + * The result is a function which manages the API call with the given settings + * and the options on the invocation. + * + * Throws exception on unsupported streaming calls + * + * @param {Promise|GRPCCall} func - is either a promise to be used to make + * a bare RPC call, or just a bare RPC call. + * @param {CallSettings} settings - provides the settings for this call + * @param {Descriptor} descriptor - optionally specify the descriptor for + * the method call. + * @return {GaxCall} func - a bound method on a request stub used + * to make an rpc call. + */ +function createApiCall(func, settings, descriptor, +// eslint-disable-next-line @typescript-eslint/no-unused-vars +_fallback // unused; for compatibility only +) { + if (descriptor && + 'streaming' in descriptor && + descriptor.type !== streaming_1.StreamType.SERVER_STREAMING) { + return () => { + throw new Error('The REST transport currently does not support client-streaming or bidi-stream calls.'); + }; + } + if (descriptor && 'streaming' in descriptor && !(0, featureDetection_1.isNodeJS)()) { + return () => { + throw new Error('Server streaming over the REST transport is only supported in Node.js.'); + }; + } + return (0, createApiCall_1.createApiCall)(func, settings, descriptor); +} +exports.protobufMinimal = __nccwpck_require__(96916); +var warnings_1 = __nccwpck_require__(16328); +Object.defineProperty(exports, "warn", ({ enumerable: true, get: function () { return warnings_1.warn; } })); +var longrunning_1 = __nccwpck_require__(83481); +Object.defineProperty(exports, "Operation", ({ enumerable: true, get: function () { return longrunning_1.Operation; } })); +Object.defineProperty(exports, "operation", ({ enumerable: true, get: function () { return longrunning_1.operation; } })); +var googleError_1 = __nccwpck_require__(6634); +Object.defineProperty(exports, "GoogleError", ({ enumerable: true, get: function () { return googleError_1.GoogleError; } })); +// Different environments or bundlers may or may not respect "browser" field +// in package.json (e.g. Electron does not respect it, but if you run the code +// through webpack first, it will follow the "browser" field). +// To make it safer and more compatible, let's make sure that if you do +// const gax = require("google-gax"); +// you can always ask for gax.fallback, regardless of "browser" field being +// understood or not. +const fallback = module.exports; +exports.fallback = fallback; +//# sourceMappingURL=fallback.js.map + +/***/ }), + +/***/ 21088: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +"use strict"; + +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.encodeRequest = encodeRequest; +exports.decodeResponse = decodeResponse; +// proto-over-HTTP request encoding and decoding +const serializer = __nccwpck_require__(2027); +const fallback_1 = __nccwpck_require__(90418); +const googleError_1 = __nccwpck_require__(6634); +const transcoding_1 = __nccwpck_require__(86707); +function encodeRequest(rpc, protocol, servicePath, servicePort, request, numericEnums) { + const headers = { + 'Content-Type': 'application/json', + }; + const message = rpc.resolvedRequestType.fromObject(request); + const json = serializer.toProto3JSON(message, { + numericEnums, + }); + if (!json) { + throw new Error(`Cannot send null request to RPC ${rpc.name}.`); + } + if (typeof json !== 'object' || Array.isArray(json)) { + throw new Error(`Request to RPC ${rpc.name} must be an object.`); + } + const transcoded = (0, transcoding_1.transcode)(json, rpc.parsedOptions); + if (!transcoded) { + throw new Error(`Cannot build HTTP request for ${JSON.stringify(json)}, method: ${rpc.name}`); + } + // If numeric enums feature is requested, add extra parameter to the query string + if (numericEnums) { + transcoded.queryString = + (transcoded.queryString ? `${transcoded.queryString}&` : '') + + '$alt=json%3Benum-encoding=int'; + } + // Converts httpMethod to method that permitted in standard Fetch API spec + // https://fetch.spec.whatwg.org/#methods + const method = transcoded.httpMethod.toUpperCase(); + const body = JSON.stringify(transcoded.data); + const url = `${protocol}://${servicePath}:${servicePort}/${transcoded.url.replace(/^\//, '')}?${transcoded.queryString}`; + return { + method, + url, + headers, + body, + }; +} +function decodeResponse(rpc, ok, response) { + // eslint-disable-next-line n/no-unsupported-features/node-builtins + const decodedString = new TextDecoder().decode(response); + const json = JSON.parse(decodedString); + if (!ok) { + const error = googleError_1.GoogleError.parseHttpError(json); + throw error; + } + const message = serializer.fromProto3JSON(rpc.resolvedResponseType, json); + if (!message) { + throw new Error(`Received null response from RPC ${rpc.name}`); + } + return rpc.resolvedResponseType.toObject(message, fallback_1.defaultToObjectOptions); +} +//# sourceMappingURL=fallbackRest.js.map /***/ }), -/***/ 30774: -/***/ ((__unused_webpack_module, exports) => { +/***/ 38255: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.generateServiceStub = generateServiceStub; +/* global window */ +/* global AbortController */ +const node_fetch_1 = __nccwpck_require__(80467); +const abort_controller_1 = __nccwpck_require__(61659); +const featureDetection_1 = __nccwpck_require__(11154); +const streamArrayParser_1 = __nccwpck_require__(25735); +const stream_1 = __nccwpck_require__(12781); +function generateServiceStub(rpcs, protocol, servicePath, servicePort, authClient, requestEncoder, responseDecoder, numericEnums) { + const fetch = (0, featureDetection_1.hasWindowFetch)() + ? window.fetch + : node_fetch_1.default; + const serviceStub = { + // close method should close all cancel controllers. If this feature request in the future, we can have a cancelControllerFactory that tracks created cancel controllers, and abort them all in close method. + close: () => { + return { cancel: () => { } }; + }, + }; + for (const [rpcName, rpc] of Object.entries(rpcs)) { + serviceStub[rpcName] = (request, options, _metadata, callback) => { + options !== null && options !== void 0 ? options : (options = {}); + // We cannot use async-await in this function because we need to return the canceller object as soon as possible. + // Using plain old promises instead. + let fetchParameters; + try { + fetchParameters = requestEncoder(rpc, protocol, servicePath, servicePort, request, numericEnums); + } + catch (err) { + // we could not encode parameters; pass error to the callback + // and return a no-op canceler object. + if (callback) { + callback(err); + } + return { + cancel() { }, + }; + } + const cancelController = (0, featureDetection_1.hasAbortController)() + ? new AbortController() + : new abort_controller_1.AbortController(); + const cancelSignal = cancelController.signal; + let cancelRequested = false; + const url = fetchParameters.url; + const headers = fetchParameters.headers; + for (const key of Object.keys(options)) { + headers[key] = options[key][0]; + } + const streamArrayParser = new streamArrayParser_1.StreamArrayParser(rpc); + authClient + .getRequestHeaders() + .then(authHeader => { + const fetchRequest = { + headers: { + ...authHeader, + ...headers, + }, + body: fetchParameters.body, + method: fetchParameters.method, + signal: cancelSignal, + }; + if (fetchParameters.method === 'GET' || + fetchParameters.method === 'DELETE') { + delete fetchRequest['body']; + } + return fetch(url, fetchRequest); + }) + .then((response) => { + if (response.ok && rpc.responseStream) { + (0, stream_1.pipeline)(response.body, streamArrayParser, (err) => { + if (err && + (!cancelRequested || + (err instanceof Error && err.name !== 'AbortError'))) { + if (callback) { + callback(err); + } + streamArrayParser.emit('error', err); + } + }); + return; + } + else { + return Promise.all([ + Promise.resolve(response.ok), + response.arrayBuffer(), + ]) + .then(([ok, buffer]) => { + const response = responseDecoder(rpc, ok, buffer); + callback(null, response); + }) + .catch((err) => { + if (!cancelRequested || err.name !== 'AbortError') { + if (rpc.responseStream) { + if (callback) { + callback(err); + } + streamArrayParser.emit('error', err); + } + else if (callback) { + callback(err); + } + else { + throw err; + } + } + }); + } + }) + .catch((err) => { + if (rpc.responseStream) { + if (callback) { + callback(err); + } + streamArrayParser.emit('error', err); + } + else if (callback) { + callback(err); + } + else { + throw err; + } + }); + if (rpc.responseStream) { + return streamArrayParser; + } + return { + cancel: () => { + cancelRequested = true; + cancelController.abort(); + }, + }; + }; + } + return serviceStub; +} +//# sourceMappingURL=fallbackServiceStub.js.map /***/ }), -/***/ 14089: +/***/ 11154: /***/ ((__unused_webpack_module, exports) => { "use strict"; +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.hasWindowFetch = hasWindowFetch; +exports.isNodeJS = isNodeJS; +exports.hasAbortController = hasAbortController; +/* global window */ +const features = { + windowFetch: typeof window !== 'undefined' && + (window === null || window === void 0 ? void 0 : window.fetch) && + typeof (window === null || window === void 0 ? void 0 : window.fetch) === 'function', + // eslint-disable-next-line n/no-unsupported-features/node-builtins + textEncoder: typeof TextEncoder !== 'undefined', + // eslint-disable-next-line n/no-unsupported-features/node-builtins + textDecoder: typeof TextDecoder !== 'undefined', + nodeJS: typeof process !== 'undefined' && ((_a = process === null || process === void 0 ? void 0 : process.versions) === null || _a === void 0 ? void 0 : _a.node), + abortController: typeof AbortController !== 'undefined', +}; +function hasWindowFetch() { + return features.windowFetch; +} +function isNodeJS() { + return features.nodeJS; +} +function hasAbortController() { + return features.abortController; +} +//# sourceMappingURL=featureDetection.js.map /***/ }), -/***/ 45678: -/***/ ((__unused_webpack_module, exports) => { +/***/ 80978: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.CallSettings = exports.RetryOptions = void 0; +exports.convertRetryOptions = convertRetryOptions; +exports.createRetryOptions = createRetryOptions; +exports.createBackoffSettings = createBackoffSettings; +exports.createDefaultBackoffSettings = createDefaultBackoffSettings; +exports.createMaxRetriesBackoffSettings = createMaxRetriesBackoffSettings; +exports.createBundleOptions = createBundleOptions; +exports.constructSettings = constructSettings; +exports.createByteLengthFunction = createByteLengthFunction; +const warnings_1 = __nccwpck_require__(16328); +const util_1 = __nccwpck_require__(26969); +const status_1 = __nccwpck_require__(53501); +/** + * Encapsulates the overridable settings for a particular API call. + * + * ``CallOptions`` is an optional arg for all GAX API calls. It is used to + * configure the settings of a specific API call. + * + * When provided, its values override the GAX service defaults for that + * particular call. + * + * Typically the API clients will accept this as the second to the last + * argument. See the examples below. + * @typedef {Object} CallOptions + * @property {number=} timeout - The client-side timeout for API calls. + * @property {RetryOptions=} retry - determines whether and how to retry + * on transient errors. When set to null, the call will not retry. + * @property {boolean=} autoPaginate - If set to false and the call is + * configured for paged iteration, page unrolling is not performed, instead + * the callback will be called with the response object. + * @property {Object=} pageToken - If set and the call is configured for + * paged iteration, paged iteration is not performed and requested with this + * pageToken. + * @property {number} maxResults - If set and the call is configured for + * paged iteration, the call will stop when the number of response elements + * reaches to the specified size. By default, it will unroll the page to + * the end of the list. + * @property {boolean=} isBundling - If set to false and the call is configured + * for bundling, bundling is not performed. + * @property {BackoffSettings=} longrunning - BackoffSettings used for polling. + * @example + * // suppress bundling for bundled method. + * api.bundlingMethod( + * param, {optParam: aValue, isBundling: false}, function(err, response) { + * // handle response. + * }); + * @example + * // suppress streaming for page-streaming method. + * api.pageStreamingMethod( + * param, {optParam: aValue, autoPaginate: false}, function(err, page) { + * // not returning a stream, but callback is called with the paged response. + * }); + */ +/** + * Per-call configurable settings for retrying upon transient failure. + * @implements {RetryOptionsType} + * @typedef {Object} RetryOptions + * @property {number[]} retryCodes + * @property {BackoffSettings} backoffSettings + * @property {(function)} shouldRetryFn + * @property {(function)} getResumptionRequestFn + */ +class RetryOptions { + constructor(retryCodes, backoffSettings, shouldRetryFn, getResumptionRequestFn) { + this.retryCodes = retryCodes; + this.backoffSettings = backoffSettings; + this.shouldRetryFn = shouldRetryFn; + this.getResumptionRequestFn = getResumptionRequestFn; + } +} +exports.RetryOptions = RetryOptions; +class CallSettings { + /** + * @param {Object} settings - An object containing parameters of this settings. + * @param {number} settings.timeout - The client-side timeout for API calls. + * This parameter is ignored for retrying calls. + * @param {RetryOptions} settings.retry - The configuration for retrying upon + * transient error. If set to null, this call will not retry. + * @param {boolean} settings.autoPaginate - If there is no `pageDescriptor`, + * this attrbute has no meaning. Otherwise, determines whether a page + * streamed response should make the page structure transparent to the user by + * flattening the repeated field in the returned generator. + * @param {number} settings.pageToken - If there is no `pageDescriptor`, + * this attribute has no meaning. Otherwise, determines the page token used + * in the page streaming request. + * @param {Object} settings.otherArgs - Additional arguments to be passed to + * the API calls. + * + * @constructor + */ + constructor(settings) { + var _a; + settings = settings || {}; + this.timeout = settings.timeout || 30 * 1000; + this.retry = settings.retry; + this.autoPaginate = + 'autoPaginate' in settings ? settings.autoPaginate : true; + this.maxResults = settings.maxResults; + this.otherArgs = settings.otherArgs || {}; + this.bundleOptions = settings.bundleOptions; + this.isBundling = 'isBundling' in settings ? settings.isBundling : true; + this.longrunning = + 'longrunning' in settings ? settings.longrunning : undefined; + this.apiName = (_a = settings.apiName) !== null && _a !== void 0 ? _a : undefined; + this.retryRequestOptions = settings.retryRequestOptions; + } + /** + * Returns a new CallSettings merged from this and a CallOptions object. + * + * @param {CallOptions} options - an instance whose values override + * those in this object. If null, ``merge`` returns a copy of this + * object + * @return {CallSettings} The merged CallSettings instance. + */ + merge(options) { + if (!options) { + return new CallSettings(this); + } + let timeout = this.timeout; + let retry = this.retry; + let autoPaginate = this.autoPaginate; + let maxResults = this.maxResults; + let otherArgs = this.otherArgs; + let isBundling = this.isBundling; + let longrunning = this.longrunning; + let apiName = this.apiName; + let retryRequestOptions = this.retryRequestOptions; + // If the user provides a timeout to the method, that timeout value will be used + // to override the backoff settings. + if ('timeout' in options) { + timeout = options.timeout; + } + // If a method-specific timeout is set in the service config, and the retry codes for that + // method are non-null, then that timeout value will be used to + // override backoff settings. + if (retry === null || retry === void 0 ? void 0 : retry.retryCodes) { + retry.backoffSettings.initialRpcTimeoutMillis = timeout; + retry.backoffSettings.maxRpcTimeoutMillis = timeout; + retry.backoffSettings.totalTimeoutMillis = timeout; + } + if ('retry' in options) { + retry = mergeRetryOptions(retry || {}, options.retry); + } + if ('autoPaginate' in options && !options.autoPaginate) { + autoPaginate = false; + } + if ('maxResults' in options) { + maxResults = options.maxResults; + } + if ('otherArgs' in options) { + otherArgs = {}; + for (const key in this.otherArgs) { + otherArgs[key] = this.otherArgs[key]; + } + for (const optionsKey in options.otherArgs) { + otherArgs[optionsKey] = options.otherArgs[optionsKey]; + } + } + if ('isBundling' in options) { + isBundling = options.isBundling; + } + if ('maxRetries' in options && options.maxRetries !== undefined) { + retry.backoffSettings.maxRetries = options.maxRetries; + delete retry.backoffSettings.totalTimeoutMillis; + } + if ('longrunning' in options) { + longrunning = options.longrunning; + } + if ('apiName' in options) { + apiName = options.apiName; + } + if ('retryRequestOptions' in options) { + retryRequestOptions = options.retryRequestOptions; + } + return new CallSettings({ + timeout, + retry, + bundleOptions: this.bundleOptions, + longrunning, + autoPaginate, + maxResults, + otherArgs, + isBundling, + apiName, + retryRequestOptions, + }); + } +} +exports.CallSettings = CallSettings; +/** + * Validates passed retry options in preparation for eventual parameter deprecation + * converts retryRequestOptions to retryOptions + * then sets retryRequestOptions to null + * + * @param {CallOptions} options - a list of passed retry option + * @return {CallOptions} A new CallOptions object. + * + */ +function convertRetryOptions(options, gaxStreamingRetries) { + var _a, _b, _c, _d; + // options will be undefined if no CallOptions object is passed at call time + if (!options) { + return options; + } + // if a user provided retry AND retryRequestOptions at call time, throw an error + // otherwise, convert supported parameters + if (!gaxStreamingRetries) { + return options; + } + if (options.retry && options.retryRequestOptions) { + throw new Error('Only one of retry or retryRequestOptions may be set'); + } // handles parameter conversion from retryRequestOptions to retryOptions + if (options.retryRequestOptions) { + if (options.retryRequestOptions.objectMode !== undefined) { + (0, warnings_1.warn)('retry_request_options', 'objectMode override is not supported. It is set to true internally by default in gax.', 'UnsupportedParameterWarning'); + } + if (options.retryRequestOptions.noResponseRetries !== undefined) { + (0, warnings_1.warn)('retry_request_options', 'noResponseRetries override is not supported. Please specify retry codes or a function to determine retry eligibility.', 'UnsupportedParameterWarning'); + } + if (options.retryRequestOptions.currentRetryAttempt !== undefined) { + (0, warnings_1.warn)('retry_request_options', 'currentRetryAttempt override is not supported. Retry attempts are tracked internally.', 'UnsupportedParameterWarning'); + } + let retryCodes = [status_1.Status.UNAVAILABLE]; + let shouldRetryFn; + if (options.retryRequestOptions.shouldRetryFn) { + retryCodes = []; + shouldRetryFn = options.retryRequestOptions.shouldRetryFn; + } + //Backoff settings + options.maxRetries = + (_b = (_a = options === null || options === void 0 ? void 0 : options.retryRequestOptions) === null || _a === void 0 ? void 0 : _a.retries) !== null && _b !== void 0 ? _b : options.maxRetries; + // create a default backoff settings object in case the user didn't provide overrides for everything + const backoffSettings = createDefaultBackoffSettings(); + let maxRetryDelayMillis; + let totalTimeoutMillis; + // maxRetryDelay - this is in seconds, need to convert to milliseconds + if (options.retryRequestOptions.maxRetryDelay !== undefined) { + maxRetryDelayMillis = options.retryRequestOptions.maxRetryDelay * 1000; + } + // retryDelayMultiplier - should be a one to one mapping to retryDelayMultiplier + const retryDelayMultiplier = (_d = (_c = options === null || options === void 0 ? void 0 : options.retryRequestOptions) === null || _c === void 0 ? void 0 : _c.retryDelayMultiplier) !== null && _d !== void 0 ? _d : backoffSettings.retryDelayMultiplier; + // this is in seconds and needs to be converted to milliseconds and the totalTimeoutMillis parameter + if (options.retryRequestOptions.totalTimeout !== undefined) { + totalTimeoutMillis = options.retryRequestOptions.totalTimeout * 1000; + } + // for the variables the user wants to override, override in the backoff settings object we made + backoffSettings.maxRetryDelayMillis = + maxRetryDelayMillis !== null && maxRetryDelayMillis !== void 0 ? maxRetryDelayMillis : backoffSettings.maxRetryDelayMillis; + backoffSettings.retryDelayMultiplier = + retryDelayMultiplier !== null && retryDelayMultiplier !== void 0 ? retryDelayMultiplier : backoffSettings.retryDelayMultiplier; + backoffSettings.totalTimeoutMillis = + totalTimeoutMillis !== null && totalTimeoutMillis !== void 0 ? totalTimeoutMillis : backoffSettings.totalTimeoutMillis; + const convertedRetryOptions = createRetryOptions(retryCodes, backoffSettings, shouldRetryFn); + options.retry = convertedRetryOptions; + delete options.retryRequestOptions; // completely remove them to avoid any further confusion + (0, warnings_1.warn)('retry_request_options', 'retryRequestOptions will be deprecated in a future release. Please use retryOptions to pass retry options at call time', 'DeprecationWarning'); + } + return options; +} +/** + * Per-call configurable settings for retrying upon transient failure. + * @param {number[]} retryCodes - a list of Google API canonical error codes OR a function that returns a boolean to determine retry behavior + * upon which a retry should be attempted. + * @param {BackoffSettings} backoffSettings - configures the retry + * exponential backoff algorithm. + * @param {function} shouldRetryFn - a function that determines whether a call should retry. If this is defined retryCodes must be empty + * @param {function} getResumptionRequestFn - a function with a resumption strategy - only used with server streaming retries + * @return {RetryOptions} A new RetryOptions object. + * + */ +function createRetryOptions(retryCodes, backoffSettings, shouldRetryFn, getResumptionRequestFn) { + return { + retryCodes, + backoffSettings, + shouldRetryFn, + getResumptionRequestFn, + }; +} +/** + * Parameters to the exponential backoff algorithm for retrying. + * + * @param {number} initialRetryDelayMillis - the initial delay time, + * in milliseconds, between the completion of the first failed request and the + * initiation of the first retrying request. + * @param {number} retryDelayMultiplier - the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @param {number} maxRetryDelayMillis - the maximum delay time, in + * milliseconds, between requests. When this value is reached, + * ``retryDelayMultiplier`` will no longer be used to increase delay time. + * @param {number} initialRpcTimeoutMillis - the initial timeout parameter + * to the request. + * @param {number} rpcTimeoutMultiplier - the multiplier by which to + * increase the timeout parameter between failed requests. + * @param {number} maxRpcTimeoutMillis - the maximum timeout parameter, in + * milliseconds, for a request. When this value is reached, + * ``rpcTimeoutMultiplier`` will no longer be used to increase the timeout. + * @param {number} totalTimeoutMillis - the total time, in milliseconds, + * starting from when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @return {BackoffSettings} a new settings. + * + */ +function createBackoffSettings(initialRetryDelayMillis, retryDelayMultiplier, maxRetryDelayMillis, initialRpcTimeoutMillis, rpcTimeoutMultiplier, maxRpcTimeoutMillis, totalTimeoutMillis) { + return { + initialRetryDelayMillis, + retryDelayMultiplier, + maxRetryDelayMillis, + initialRpcTimeoutMillis, + rpcTimeoutMultiplier, + maxRpcTimeoutMillis, + totalTimeoutMillis, + }; +} +function createDefaultBackoffSettings() { + return createBackoffSettings(100, 1.3, 60000, null, null, null, null); +} +/** + * Parameters to the exponential backoff algorithm for retrying. + * This function is unsupported, and intended for internal use only. + * + * @param {number} initialRetryDelayMillis - the initial delay time, + * in milliseconds, between the completion of the first failed request and the + * initiation of the first retrying request. + * @param {number} retryDelayMultiplier - the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @param {number} maxRetryDelayMillis - the maximum delay time, in + * milliseconds, between requests. When this value is reached, + * ``retryDelayMultiplier`` will no longer be used to increase delay time. + * @param {number} initialRpcTimeoutMillis - the initial timeout parameter + * to the request. + * @param {number} rpcTimeoutMultiplier - the multiplier by which to + * increase the timeout parameter between failed requests. + * @param {number} maxRpcTimeoutMillis - the maximum timeout parameter, in + * milliseconds, for a request. When this value is reached, + * ``rpcTimeoutMultiplier`` will no longer be used to increase the timeout. + * @param {number} maxRetries - the maximum number of retrying attempts that + * will be made. If reached, an error will be returned. + * @return {BackoffSettings} a new settings. + * + */ +function createMaxRetriesBackoffSettings(initialRetryDelayMillis, retryDelayMultiplier, maxRetryDelayMillis, initialRpcTimeoutMillis, rpcTimeoutMultiplier, maxRpcTimeoutMillis, maxRetries) { + return { + initialRetryDelayMillis, + retryDelayMultiplier, + maxRetryDelayMillis, + initialRpcTimeoutMillis, + rpcTimeoutMultiplier, + maxRpcTimeoutMillis, + maxRetries, + }; +} +/** + * Creates a new {@link BundleOptions}. + * + * @private + * @param {Object} options - An object to hold optional parameters. See + * properties for the content of options. + * @return {BundleOptions} - A new options. + */ +function createBundleOptions(options) { + const params = [ + 'element_count_threshold', + 'element_count_limit', + 'request_byte_threshold', + 'request_byte_limit', + 'delay_threshold_millis', + ]; + params.forEach(param => { + if (param in options && typeof options[param] !== 'number') { + throw new Error(`${param} should be a number`); + } + }); + const elementCountThreshold = options.element_count_threshold || 0; + const elementCountLimit = options.element_count_limit || 0; + const requestByteThreshold = options.request_byte_threshold || 0; + const requestByteLimit = options.request_byte_limit || 0; + const delayThreshold = options.delay_threshold_millis || 0; + if (elementCountThreshold === 0 && + requestByteThreshold === 0 && + delayThreshold === 0) { + throw new Error('one threshold should be > 0'); + } + return { + elementCountThreshold, + elementCountLimit, + requestByteThreshold, + requestByteLimit, + delayThreshold, + }; +} +/** + * Helper for {@link constructSettings} + * + * @private + * + * @param {Object} methodConfig - A dictionary representing a single + * `methods` entry of the standard API client config file. (See + * {@link constructSettings} for information on this yaml.) + * @param {?Object} retryCodes - A dictionary parsed from the + * `retry_codes_def` entry of the standard API client config + * file. (See {@link constructSettings} for information on this yaml.) + * @param {Object} retryParams - A dictionary parsed from the + * `retry_params` entry of the standard API client config + * file. (See {@link constructSettings} for information on this yaml.) + * @param {Object} retryNames - A dictionary mapping the string names + * used in the standard API client config file to API response + * status codes. + * @return {?RetryOptions} The new retry options. + */ +function constructRetry(methodConfig, retryCodes, retryParams, retryNames) { + if (!methodConfig) { + return null; + } + let codes = null; // this is one instance where it will NOT be an array OR a function because we do not allow shouldRetryFn in the client + if (retryCodes && 'retry_codes_name' in methodConfig) { + const retryCodesName = methodConfig['retry_codes_name']; + codes = (retryCodes[retryCodesName] || []).map(name => { + return Number(retryNames[name]); + }); + } + let backoffSettings = null; + if (retryParams && 'retry_params_name' in methodConfig) { + const params = retryParams[methodConfig.retry_params_name]; + backoffSettings = createBackoffSettings(params.initial_retry_delay_millis, params.retry_delay_multiplier, params.max_retry_delay_millis, params.initial_rpc_timeout_millis, params.rpc_timeout_multiplier, params.max_rpc_timeout_millis, params.total_timeout_millis); + } + return createRetryOptions(codes, backoffSettings); +} +/** + * Helper for {@link constructSettings} + * + * Takes two retry options, and merges them into a single RetryOption instance. + * + * @private + * + * @param {RetryOptions} retry - The base RetryOptions. + * @param {RetryOptions} overrides - The RetryOptions used for overriding + * `retry`. Use the values if it is not null. If entire `overrides` is null, + * ignore the base retry and return null. + * @return {?RetryOptions} The merged RetryOptions. + */ +function mergeRetryOptions(retry, overrides) { + if (!overrides) { + return null; + } + if (!overrides.retryCodes && + !overrides.backoffSettings && + !overrides.shouldRetryFn && + !overrides.getResumptionRequestFn) { + return retry; + } + const retryCodes = overrides.retryCodes + ? overrides.retryCodes + : retry.retryCodes; + const backoffSettings = overrides.backoffSettings + ? overrides.backoffSettings + : retry.backoffSettings; + const shouldRetryFn = overrides.shouldRetryFn + ? overrides.shouldRetryFn + : retry.shouldRetryFn; + const getResumptionRequestFn = overrides.getResumptionRequestFn + ? overrides.getResumptionRequestFn + : retry.getResumptionRequestFn; + return createRetryOptions(retryCodes, backoffSettings, shouldRetryFn, getResumptionRequestFn); +} +/** + * Constructs a dictionary mapping method names to {@link CallSettings}. + * + * The `clientConfig` parameter is parsed from a client configuration JSON + * file of the form: + * + * { + * "interfaces": { + * "google.fake.v1.ServiceName": { + * "retry_codes": { + * "idempotent": ["UNAVAILABLE", "DEADLINE_EXCEEDED"], + * "non_idempotent": [] + * }, + * "retry_params": { + * "default": { + * "initial_retry_delay_millis": 100, + * "retry_delay_multiplier": 1.2, + * "max_retry_delay_millis": 1000, + * "initial_rpc_timeout_millis": 2000, + * "rpc_timeout_multiplier": 1.5, + * "max_rpc_timeout_millis": 30000, + * "total_timeout_millis": 45000 + * } + * }, + * "methods": { + * "CreateFoo": { + * "retry_codes_name": "idempotent", + * "retry_params_name": "default" + * }, + * "Publish": { + * "retry_codes_name": "non_idempotent", + * "retry_params_name": "default", + * "bundling": { + * "element_count_threshold": 40, + * "element_count_limit": 200, + * "request_byte_threshold": 90000, + * "request_byte_limit": 100000, + * "delay_threshold_millis": 100 + * } + * } + * } + * } + * } + * } + * + * @param {String} serviceName - The fully-qualified name of this + * service, used as a key into the client config file (in the + * example above, this value should be 'google.fake.v1.ServiceName'). + * @param {Object} clientConfig - A dictionary parsed from the + * standard API client config file. + * @param {Object} configOverrides - A dictionary in the same structure of + * client_config to override the settings. + * @param {Object.} retryNames - A dictionary mapping the strings + * referring to response status codes to objects representing + * those codes. + * @param {Object} otherArgs - the non-request arguments to be passed to the API + * calls. + * @return {Object} A mapping from method name to CallSettings, or null if the + * service is not found in the config. + */ +function constructSettings(serviceName, clientConfig, configOverrides, retryNames, otherArgs) { + otherArgs = otherArgs || {}; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const defaults = {}; + const serviceConfig = (clientConfig.interfaces || {})[serviceName]; + if (!serviceConfig) { + return null; + } + // users can override the config from client side, like bundling options. + // The detailed structure of the clientConfig can be found here: https://github.com/googleapis/gax-nodejs/blob/main/src/gax.ts#L546 + // The way to override bundling options: + // + // const customConfig = {"interfaces": {"service": {"methods": {"methodName": {"bundling": {..}}}}}} + // const client = new Client({ projectId, customConfig }); + const overrides = (configOverrides.interfaces || {})[serviceName] || {}; + const methods = serviceConfig.methods; + const overridingMethods = overrides.methods || {}; + for (const methodName in methods) { + const methodConfig = methods[methodName]; + const jsName = (0, util_1.toLowerCamelCase)(methodName); + let retry = constructRetry(methodConfig, serviceConfig.retry_codes, serviceConfig.retry_params, retryNames); + let bundlingConfig = methodConfig.bundling; + let timeout = methodConfig.timeout_millis; + if (methodName in overridingMethods) { + const overridingMethod = overridingMethods[methodName]; + if (overridingMethod) { + if ('bundling' in overridingMethod) { + bundlingConfig = overridingMethod.bundling; + } + if ('timeout_millis' in overridingMethod) { + timeout = overridingMethod.timeout_millis; + } + } + retry = mergeRetryOptions(retry, constructRetry(overridingMethod, overrides.retry_codes, overrides.retry_params, retryNames)); + } + const apiName = serviceName; + defaults[jsName] = new CallSettings({ + timeout, + retry, + bundleOptions: bundlingConfig + ? createBundleOptions(bundlingConfig) + : null, + otherArgs, + apiName, + }); + } + return defaults; +} +function createByteLengthFunction(message) { + return function getByteLength(obj) { + try { + return message.encode(obj).finish().length; + } + catch (err) { + const stringified = JSON.stringify(obj); + (0, warnings_1.warn)('error_encoding_protobufjs_object', `Cannot encode protobuf.js object: ${stringified}: ${err}`); + // We failed to encode the object properly, let's just return an upper boundary of its length. + // It's only needed for calculating the size of the batch, so it's safe if it's bigger than needed. + return stringified.length; + } + }; +} +//# sourceMappingURL=gax.js.map /***/ }), -/***/ 69926: -/***/ ((__unused_webpack_module, exports) => { +/***/ 6634: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.GoogleErrorDecoder = exports.GoogleError = void 0; +const status_1 = __nccwpck_require__(53501); +const protobuf = __nccwpck_require__(85881); +const serializer = __nccwpck_require__(2027); +const fallback_1 = __nccwpck_require__(90418); +class GoogleError extends Error { + // Parse details field in google.rpc.status wire over gRPC medatadata. + // Promote google.rpc.ErrorInfo if exist. + static parseGRPCStatusDetails(err) { + const decoder = new GoogleErrorDecoder(); + try { + if (err.metadata && err.metadata.get('grpc-status-details-bin')) { + const statusDetailsObj = decoder.decodeGRPCStatusDetails(err.metadata.get('grpc-status-details-bin')); + if (statusDetailsObj && + statusDetailsObj.details && + statusDetailsObj.details.length > 0) { + err.statusDetails = statusDetailsObj.details; + } + if (statusDetailsObj && statusDetailsObj.errorInfo) { + err.reason = statusDetailsObj.errorInfo.reason; + err.domain = statusDetailsObj.errorInfo.domain; + err.errorInfoMetadata = statusDetailsObj.errorInfo.metadata; + } + } + } + catch (decodeErr) { + // ignoring the error + } + return err; + } + // Parse http JSON error and promote google.rpc.ErrorInfo if exist. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + static parseHttpError(json) { + if (Array.isArray(json)) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + json = json.find((obj) => { + return 'error' in obj; + }); + } + // fallback logic. + // related issue: https://github.com/googleapis/gax-nodejs/issues/1303 + // google error mapping: https://cloud.google.com/apis/design/errors + // if input json doesn't have 'error' fields, wrap the whole object with 'error' field + if (!json['error']) { + json['error'] = {}; + Object.keys(json) + .filter(key => key !== 'error') + .forEach(key => { + json['error'][key] = json[key]; + delete json[key]; + }); + } + const decoder = new GoogleErrorDecoder(); + const proto3Error = decoder.decodeHTTPError(json['error']); + const error = Object.assign(new GoogleError(json['error']['message']), proto3Error); + // Map Http Status Code to gRPC Status Code + if (json['error']['code']) { + error.code = (0, status_1.rpcCodeFromHttpStatusCode)(json['error']['code']); + } + else { + // If error code is absent, proto3 message default value is 0. We should + // keep error code as undefined. + delete error.code; + } + // Keep consistency with gRPC statusDetails fields. gRPC details has been occupied before. + // Rename "details" to "statusDetails". + if (error.details) { + try { + const statusDetailsObj = decoder.decodeHttpStatusDetails(error.details); + if (statusDetailsObj && + statusDetailsObj.details && + statusDetailsObj.details.length > 0) { + error.statusDetails = statusDetailsObj.details; + } + if (statusDetailsObj && statusDetailsObj.errorInfo) { + error.reason = statusDetailsObj.errorInfo.reason; + error.domain = statusDetailsObj.errorInfo.domain; + // error.metadata has been occupied for gRPC metadata, so we use + // errorInfoMetadata to represent ErrorInfo' metadata field. Keep + // consistency with gRPC ErrorInfo metadata field name. + error.errorInfoMetadata = statusDetailsObj.errorInfo.metadata; + } + } + catch (decodeErr) { + // ignoring the error + } + } + return error; + } +} +exports.GoogleError = GoogleError; +class GoogleErrorDecoder { + constructor() { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const errorProtoJson = __nccwpck_require__(54404); + this.root = protobuf.Root.fromJSON(errorProtoJson); + this.anyType = this.root.lookupType('google.protobuf.Any'); + this.statusType = this.root.lookupType('google.rpc.Status'); + } + decodeProtobufAny(anyValue) { + const match = anyValue.type_url.match(/^type.googleapis.com\/(.*)/); + if (!match) { + throw new Error(`Unknown type encoded in google.protobuf.any: ${anyValue.type_url}`); + } + const typeName = match[1]; + const type = this.root.lookupType(typeName); + if (!type) { + throw new Error(`Cannot lookup type ${typeName}`); + } + return type.decode(anyValue.value); + } + // Decodes gRPC-fallback error which is an instance of google.rpc.Status. + decodeRpcStatus(buffer) { + const uint8array = new Uint8Array(buffer); + const status = this.statusType.decode(uint8array); + // google.rpc.Status contains an array of google.protobuf.Any + // which need a special treatment + const details = []; + let errorInfo; + for (const detail of status.details) { + try { + const decodedDetail = this.decodeProtobufAny(detail); + details.push(decodedDetail); + if (detail.type_url === 'type.googleapis.com/google.rpc.ErrorInfo') { + errorInfo = decodedDetail; + } + } + catch (err) { + // cannot decode detail, likely because of the unknown type - just skip it + } + } + const result = { + code: status.code, + message: status.message, + statusDetails: details, + reason: errorInfo === null || errorInfo === void 0 ? void 0 : errorInfo.reason, + domain: errorInfo === null || errorInfo === void 0 ? void 0 : errorInfo.domain, + errorInfoMetadata: errorInfo === null || errorInfo === void 0 ? void 0 : errorInfo.metadata, + }; + return result; + } + // Construct an Error from a StatusObject. + // Adapted from https://github.com/grpc/grpc-node/blob/main/packages/grpc-js/src/call.ts#L79 + callErrorFromStatus(status) { + status.message = `${status.code} ${status_1.Status[status.code]}: ${status.message}`; + return Object.assign(new GoogleError(status.message), status); + } + // Decodes gRPC-fallback error which is an instance of google.rpc.Status, + // and puts it into the object similar to gRPC ServiceError object. + decodeErrorFromBuffer(buffer) { + return this.callErrorFromStatus(this.decodeRpcStatus(buffer)); + } + // Decodes gRPC metadata error details which is an instance of google.rpc.Status. + decodeGRPCStatusDetails(bufferArr) { + const details = []; + let errorInfo; + bufferArr.forEach(buffer => { + const uint8array = new Uint8Array(buffer); + const rpcStatus = this.statusType.decode(uint8array); + for (const detail of rpcStatus.details) { + try { + const decodedDetail = this.decodeProtobufAny(detail); + details.push(decodedDetail); + if (detail.type_url === 'type.googleapis.com/google.rpc.ErrorInfo') { + errorInfo = decodedDetail; + } + } + catch (err) { + // cannot decode detail, likely because of the unknown type - just skip it + } + } + }); + const result = { + details, + errorInfo, + }; + return result; + } + // Decodes http error which is an instance of google.rpc.Status. + decodeHTTPError(json) { + const errorMessage = serializer.fromProto3JSON(this.statusType, json); + if (!errorMessage) { + throw new Error(`Received error message ${json}, but failed to serialize as proto3 message`); + } + return this.statusType.toObject(errorMessage, fallback_1.defaultToObjectOptions); + } + // Decodes http error details which is an instance of Array. + decodeHttpStatusDetails(rawDetails) { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const details = []; + let errorInfo; + for (const detail of rawDetails) { + try { + const decodedDetail = this.decodeProtobufAny(detail); + details.push(decodedDetail); + if (detail.type_url === 'type.googleapis.com/google.rpc.ErrorInfo') { + errorInfo = decodedDetail; + } + } + catch (err) { + // cannot decode detail, likely because of the unknown type - just skip it + } + } + return { details, errorInfo }; + } +} +exports.GoogleErrorDecoder = GoogleErrorDecoder; +//# sourceMappingURL=googleError.js.map /***/ }), -/***/ 9945: -/***/ ((__unused_webpack_module, exports) => { +/***/ 68976: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.GoogleProtoFilesRoot = exports.GrpcClient = exports.ClientStub = void 0; +const grpcProtoLoader = __nccwpck_require__(98171); +const child_process_1 = __nccwpck_require__(32081); +const fs = __nccwpck_require__(57147); +const google_auth_library_1 = __nccwpck_require__(20810); +const grpc = __nccwpck_require__(7025); +const os = __nccwpck_require__(22037); +const path_1 = __nccwpck_require__(71017); +const path = __nccwpck_require__(71017); +const protobuf = __nccwpck_require__(85881); +const objectHash = __nccwpck_require__(24856); +const gax = __nccwpck_require__(80978); +const googleProtoFilesDir = __nccwpck_require__.ab + "protos"; +// INCLUDE_DIRS is passed to @grpc/proto-loader +const INCLUDE_DIRS = []; +INCLUDE_DIRS.push(googleProtoFilesDir); +// COMMON_PROTO_FILES logic is here for protobufjs loads (see +// GoogleProtoFilesRoot below) +const commonProtoFiles = __nccwpck_require__(41378); +// use the correct path separator for the OS we are running on +const COMMON_PROTO_FILES = commonProtoFiles.map(file => file.replace(/[/\\]/g, path.sep)); +/* + * Async version of readFile. + * + * @returns {Promise} Contents of file at path. + */ +async function readFileAsync(path) { + return new Promise((resolve, reject) => { + fs.readFile(path, 'utf8', (err, content) => { + if (err) + return reject(err); + else + resolve(content); + }); + }); +} +/* + * Async version of execFile. + * + * @returns {Promise} stdout from command execution. + */ +async function execFileAsync(command, args) { + return new Promise((resolve, reject) => { + (0, child_process_1.execFile)(command, args, (err, stdout) => { + if (err) + return reject(err); + else + resolve(stdout); + }); + }); +} +class ClientStub extends grpc.Client { +} +exports.ClientStub = ClientStub; +class GrpcClient { + /** + * Key for proto cache map. We are doing our best to make sure we respect + * the options, so if the same proto file is loaded with different set of + * options, the cache won't be used. Since some of the options are + * Functions (e.g. `enums: String` - see below in `loadProto()`), + * they will be omitted from the cache key. If the cache breaks anything + * for you, use the `ignoreCache` parameter of `loadProto()` to disable it. + */ + static protoCacheKey(filename, options) { + if (!filename || + (Array.isArray(filename) && (filename.length === 0 || !filename[0]))) { + return undefined; + } + return JSON.stringify(filename) + ' ' + JSON.stringify(options); + } + /** + * In rare cases users might need to deallocate all memory consumed by loaded protos. + * This method will delete the proto cache content. + */ + static clearProtoCache() { + GrpcClient.protoCache.clear(); + } + /** + * A class which keeps the context of gRPC and auth for the gRPC. + * + * @param {Object=} options - The optional parameters. It will be directly + * passed to google-auth-library library, so parameters like keyFile or + * credentials will be valid. + * @param {Object=} options.auth - An instance of google-auth-library. + * When specified, this auth instance will be used instead of creating + * a new one. + * @param {Object=} options.grpc - When specified, this will be used + * for the 'grpc' module in this context. By default, it will load the grpc + * module in the standard way. + * @constructor + */ + constructor(options = {}) { + var _a; + this.auth = options.auth || new google_auth_library_1.GoogleAuth(options); + this.fallback = false; + const minimumVersion = 10; + const major = Number((_a = process.version.match(/^v(\d+)/)) === null || _a === void 0 ? void 0 : _a[1]); + if (Number.isNaN(major) || major < minimumVersion) { + const errorMessage = `Node.js v${minimumVersion}.0.0 is a minimum requirement. To learn about legacy version support visit: ` + + 'https://github.com/googleapis/google-cloud-node#supported-nodejs-versions'; + throw new Error(errorMessage); + } + if ('grpc' in options) { + this.grpc = options.grpc; + this.grpcVersion = ''; + } + else { + this.grpc = grpc; + this.grpcVersion = (__nccwpck_require__(56569)/* .version */ .i8); + } + } + /** + * Creates a gRPC credentials. It asks the auth data if necessary. + * @private + * @param {Object} opts - options values for configuring credentials. + * @param {Object=} opts.sslCreds - when specified, this is used instead + * of default channel credentials. + * @return {Promise} The promise which will be resolved to the gRPC credential. + */ + async _getCredentials(opts) { + if (opts.sslCreds) { + return opts.sslCreds; + } + const grpc = this.grpc; + const sslCreds = opts.cert && opts.key + ? grpc.credentials.createSsl(null, Buffer.from(opts.key), Buffer.from(opts.cert)) + : grpc.credentials.createSsl(); + const client = await this.auth.getClient(); + const credentials = grpc.credentials.combineChannelCredentials(sslCreds, grpc.credentials.createFromGoogleCredential(client)); + return credentials; + } + static defaultOptions() { + // This set of @grpc/proto-loader options + // 'closely approximates the existing behavior of grpc.load' + const includeDirs = INCLUDE_DIRS.slice(); + const options = { + keepCase: false, + longs: String, + enums: String, + defaults: true, + oneofs: true, + includeDirs, + }; + return options; + } + /** + * Loads the gRPC service from the proto file(s) at the given path and with the + * given options. Caches the loaded protos so the subsequent loads don't do + * any disk reads. + * @param filename The path to the proto file(s). + * @param options Options for loading the proto file. + * @param ignoreCache Defaults to `false`. Set it to `true` if the caching logic + * incorrectly decides that the options object is the same, or if you want to + * re-read the protos from disk for any other reason. + */ + loadFromProto(filename, options, ignoreCache = false) { + const cacheKey = GrpcClient.protoCacheKey(filename, options); + let grpcPackage = cacheKey + ? GrpcClient.protoCache.get(cacheKey) + : undefined; + if (ignoreCache || !grpcPackage) { + const packageDef = grpcProtoLoader.loadSync(filename, options); + grpcPackage = this.grpc.loadPackageDefinition(packageDef); + if (cacheKey) { + GrpcClient.protoCache.set(cacheKey, grpcPackage); + } + } + return grpcPackage; + } + /** + * Load gRPC proto service from a filename looking in googleapis common protos + * when necessary. Caches the loaded protos so the subsequent loads don't do + * any disk reads. + * @param {String} protoPath - The directory to search for the protofile. + * @param {String|String[]} filename - The filename(s) of the proto(s) to be loaded. + * If omitted, protoPath will be treated as a file path to load. + * @param ignoreCache Defaults to `false`. Set it to `true` if the caching logic + * incorrectly decides that the options object is the same, or if you want to + * re-read the protos from disk for any other reason. + * @return {Object} The gRPC loaded result (the toplevel namespace + * object). + */ + loadProto(protoPath, filename, ignoreCache = false) { + if (!filename) { + filename = path.basename(protoPath); + protoPath = path.dirname(protoPath); + } + if (Array.isArray(filename) && filename.length === 0) { + return {}; + } + const options = GrpcClient.defaultOptions(); + options.includeDirs.unshift(protoPath); + return this.loadFromProto(filename, options, ignoreCache); + } + static _resolveFile(protoPath, filename) { + if (fs.existsSync(path.join(protoPath, filename))) { + return path.join(protoPath, filename); + } + else if (COMMON_PROTO_FILES.indexOf(filename) > -1) { + return path.join(googleProtoFilesDir, filename); + } + throw new Error(filename + ' could not be found in ' + protoPath); + } + loadProtoJSON(json, ignoreCache = false) { + const hash = objectHash(JSON.stringify(json)).toString(); + const cached = GrpcClient.protoCache.get(hash); + if (cached && !ignoreCache) { + return cached; + } + const options = GrpcClient.defaultOptions(); + const packageDefinition = grpcProtoLoader.fromJSON(json, options); + const grpcPackage = this.grpc.loadPackageDefinition(packageDefinition); + GrpcClient.protoCache.set(hash, grpcPackage); + return grpcPackage; + } + metadataBuilder(headers) { + const Metadata = this.grpc.Metadata; + const baseMetadata = new Metadata(); + for (const key in headers) { + const value = headers[key]; + if (Array.isArray(value)) { + value.forEach(v => baseMetadata.add(key, v)); + } + else { + baseMetadata.set(key, `${value}`); + } + } + return function buildMetadata(abTests, moreHeaders) { + // TODO: bring the A/B testing info into the metadata. + let copied = false; + let metadata = baseMetadata; + if (moreHeaders) { + for (const key in moreHeaders) { + if (key.toLowerCase() !== 'x-goog-api-client') { + if (!copied) { + copied = true; + metadata = metadata.clone(); + } + const value = moreHeaders[key]; + if (Array.isArray(value)) { + value.forEach(v => metadata.add(key, v)); + } + else { + metadata.set(key, `${value}`); + } + } + } + } + return metadata; + }; + } + /** + * A wrapper of {@link constructSettings} function under the gRPC context. + * + * Most of parameters are common among constructSettings, please take a look. + * @param {string} serviceName - The fullly-qualified name of the service. + * @param {Object} clientConfig - A dictionary of the client config. + * @param {Object} configOverrides - A dictionary of overriding configs. + * @param {Object} headers - A dictionary of additional HTTP header name to + * its value. + * @return {Object} A mapping of method names to CallSettings. + */ + constructSettings(serviceName, clientConfig, configOverrides, headers) { + return gax.constructSettings(serviceName, clientConfig, configOverrides, this.grpc.status, { metadataBuilder: this.metadataBuilder(headers) }); + } + /** + * Creates a gRPC stub with current gRPC and auth. + * @param {function} CreateStub - The constructor function of the stub. + * @param {Object} options - The optional arguments to customize + * gRPC connection. This options will be passed to the constructor of + * gRPC client too. + * @param {string} options.servicePath - The name of the server of the service. + * @param {number} options.port - The port of the service. + * @param {grpcTypes.ClientCredentials=} options.sslCreds - The credentials to be used + * to set up gRPC connection. + * @param {string} defaultServicePath - The default service path. + * @return {Promise} A promise which resolves to a gRPC stub instance. + */ + async createStub(CreateStub, options, customServicePath) { + // The following options are understood by grpc-gcp and need a special treatment + // (should be passed without a `grpc.` prefix) + const grpcGcpOptions = [ + 'grpc.callInvocationTransformer', + 'grpc.channelFactoryOverride', + 'grpc.gcpApiConfig', + ]; + const [cert, key] = await this._detectClientCertificate(options, options.universeDomain); + const servicePath = this._mtlsServicePath(options.servicePath, customServicePath, cert && key); + const opts = Object.assign({}, options, { cert, key, servicePath }); + const serviceAddress = servicePath + ':' + opts.port; + if (!options.universeDomain) { + options.universeDomain = 'googleapis.com'; + } + if (options.universeDomain) { + const universeFromAuth = await this.auth.getUniverseDomain(); + if (universeFromAuth && options.universeDomain !== universeFromAuth) { + throw new Error(`The configured universe domain (${options.universeDomain}) does not match the universe domain found in the credentials (${universeFromAuth}). ` + + "If you haven't configured the universe domain explicitly, googleapis.com is the default."); + } + } + const creds = await this._getCredentials(opts); + const grpcOptions = {}; + // @grpc/grpc-js limits max receive/send message length starting from v0.8.0 + // https://github.com/grpc/grpc-node/releases/tag/%40grpc%2Fgrpc-js%400.8.0 + // To keep the existing behavior and avoid libraries breakage, we pass -1 there as suggested. + grpcOptions['grpc.max_receive_message_length'] = -1; + grpcOptions['grpc.max_send_message_length'] = -1; + grpcOptions['grpc.initial_reconnect_backoff_ms'] = 1000; + Object.keys(opts).forEach(key => { + const value = options[key]; + // the older versions had a bug which required users to call an option + // grpc.grpc.* to make it actually pass to gRPC as grpc.*, let's handle + // this here until the next major release + if (key.startsWith('grpc.grpc.')) { + key = key.replace(/^grpc\./, ''); + } + if (key.startsWith('grpc.')) { + if (grpcGcpOptions.includes(key)) { + key = key.replace(/^grpc\./, ''); + } + grpcOptions[key] = value; + } + if (key.startsWith('grpc-node.')) { + grpcOptions[key] = value; + } + }); + const stub = new CreateStub(serviceAddress, creds, grpcOptions); + return stub; + } + /** + * Detect mTLS client certificate based on logic described in + * https://google.aip.dev/auth/4114. + * + * @param {object} [options] - The configuration object. + * @returns {Promise} Resolves array of strings representing cert and key. + */ + async _detectClientCertificate(opts, universeDomain) { + var _a; + const certRegex = /(?-----BEGIN CERTIFICATE-----.*?-----END CERTIFICATE-----)/s; + const keyRegex = /(?-----BEGIN PRIVATE KEY-----.*?-----END PRIVATE KEY-----)/s; + // If GOOGLE_API_USE_CLIENT_CERTIFICATE is true...: + if (typeof process !== 'undefined' && + ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.GOOGLE_API_USE_CLIENT_CERTIFICATE) === 'true') { + if (universeDomain && universeDomain !== 'googleapis.com') { + throw new Error('mTLS is not supported outside of googleapis.com universe domain.'); + } + if ((opts === null || opts === void 0 ? void 0 : opts.cert) && (opts === null || opts === void 0 ? void 0 : opts.key)) { + return [opts.cert, opts.key]; + } + // If context aware metadata exists, run the cert provider command, + // parse the output to extract cert and key, and use this cert/key. + const metadataPath = (0, path_1.join)(os.homedir(), '.secureConnect', 'context_aware_metadata.json'); + const metadata = JSON.parse(await readFileAsync(metadataPath)); + if (!metadata.cert_provider_command) { + throw Error('no cert_provider_command found'); + } + const stdout = await execFileAsync(metadata.cert_provider_command[0], metadata.cert_provider_command.slice(1)); + const matchCert = stdout.toString().match(certRegex); + const matchKey = stdout.toString().match(keyRegex); + if (!((matchCert === null || matchCert === void 0 ? void 0 : matchCert.groups) && (matchKey === null || matchKey === void 0 ? void 0 : matchKey.groups))) { + throw Error('unable to parse certificate and key'); + } + else { + return [matchCert.groups.cert, matchKey.groups.key]; + } + } + // If GOOGLE_API_USE_CLIENT_CERTIFICATE is not set or false, + // use no cert or key: + return [undefined, undefined]; + } + /** + * Return service path, taking into account mTLS logic. + * See: https://google.aip.dev/auth/4114 + * + * @param {string|undefined} servicePath - The path of the service. + * @param {string|undefined} customServicePath - Did the user provide a custom service URL. + * @param {boolean} hasCertificate - Was a certificate found. + * @returns {string} The DNS address for this service. + */ + _mtlsServicePath(servicePath, customServicePath, hasCertificate) { + var _a, _b; + // If user provides a custom service path, return the current service + // path and do not attempt to add mtls subdomain: + if (customServicePath || !servicePath) + return servicePath; + if (typeof process !== 'undefined' && + ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.GOOGLE_API_USE_MTLS_ENDPOINT) === 'never') { + // It was explicitly asked that mtls endpoint not be used: + return servicePath; + } + else if ((typeof process !== 'undefined' && + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.GOOGLE_API_USE_MTLS_ENDPOINT) === 'always') || + hasCertificate) { + // Either auto-detect or explicit setting of endpoint: + return servicePath.replace('googleapis.com', 'mtls.googleapis.com'); + } + return servicePath; + } + /** + * Creates a 'bytelength' function for a given proto message class. + * + * See {@link BundleDescriptor} about the meaning of the return value. + * + * @param {function} message - a constructor function that is generated by + * protobuf.js. Assumes 'encoder' field in the message. + * @return {function(Object):number} - a function to compute the byte length + * for an object. + */ + static createByteLengthFunction(message) { + return gax.createByteLengthFunction(message); + } +} +exports.GrpcClient = GrpcClient; +GrpcClient.protoCache = new Map(); +class GoogleProtoFilesRoot extends protobuf.Root { + constructor(...args) { + super(...args); + } + // Causes the loading of an included proto to check if it is a common + // proto. If it is a common proto, use the bundled proto. + resolvePath(originPath, includePath) { + originPath = path.normalize(originPath); + includePath = path.normalize(includePath); + // Fully qualified paths don't need to be resolved. + if (path.isAbsolute(includePath)) { + if (!fs.existsSync(includePath)) { + throw new Error('The include `' + includePath + '` was not found.'); + } + return includePath; + } + if (COMMON_PROTO_FILES.indexOf(includePath) > -1) { + return path.join(googleProtoFilesDir, includePath); + } + return GoogleProtoFilesRoot._findIncludePath(originPath, includePath); + } + static _findIncludePath(originPath, includePath) { + originPath = path.normalize(originPath); + includePath = path.normalize(includePath); + let current = originPath; + let found = fs.existsSync(path.join(current, includePath)); + while (!found && current.length > 0) { + current = current.substring(0, current.lastIndexOf(path.sep)); + found = fs.existsSync(path.join(current, includePath)); + } + if (!found) { + throw new Error('The include `' + includePath + '` was not found.'); + } + return path.join(current, includePath); + } +} +exports.GoogleProtoFilesRoot = GoogleProtoFilesRoot; +//# sourceMappingURL=grpc.js.map /***/ }), -/***/ 28564: -/***/ ((__unused_webpack_module, exports) => { +/***/ 28118: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.IamClient = void 0; +const createApiCall_1 = __nccwpck_require__(79712); +const routingHeader = __nccwpck_require__(8827); +const gapicConfig = __nccwpck_require__(74155); +const fallback = __nccwpck_require__(90418); +let version = (__nccwpck_require__(13385).version); +const jsonProtos = __nccwpck_require__(53897); +/** + * Google Cloud IAM Client. + * This is manually written for providing methods [setIamPolicy, getIamPolicy, testIamPerssion] to the generated client. + */ +class IamClient { + constructor(gaxGrpc, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + this._terminated = false; + this.descriptors = { page: {}, stream: {}, longrunning: {} }; + this.innerApiCalls = {}; + this.gaxGrpc = gaxGrpc; + // Ensure that options include the service address and port. + const opts = Object.assign({ + servicePath: options.servicePath, + port: options.port, + clientConfig: options.clientConfig, + apiEndpoint: options.apiEndpoint, + fallback: options.fallback, + }, options); + version = opts.fallback ? fallback.version : version; + opts.scopes = this.constructor.scopes; + // Save options to use in initialize() method. + this._opts = opts; + // Save the auth object to the client, for use by other methods. + this.auth = gaxGrpc.auth; + // Determine the client header string. + const clientHeader = [`gax/${version}`, `gapic/${version}`]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } + else { + clientHeader.push(`gl-web/${version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this.gaxGrpc.loadProtoJSON(jsonProtos); + // Put together the default options sent with requests. + this._defaults = gaxGrpc.constructSettings('google.iam.v1.IAMPolicy', gapicConfig, opts.clientConfig || {}, { 'x-goog-api-client': clientHeader.join(' ') }); + this.innerApiCalls = {}; + } + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.iamPolicyStub) { + return this.iamPolicyStub; + } + // Put together the "service stub" for + // google.iam.v1.IAMPolicy. + this.iamPolicyStub = this.gaxGrpc.createStub(this._opts.fallback + ? this._protos.lookupService('google.iam.v1.IAMPolicy') + : this._protos.google.iam.v1.IAMPolicy, this._opts); + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const iamPolicyStubMethods = [ + 'getIamPolicy', + 'setIamPolicy', + 'testIamPermissions', + ]; + for (const methodName of iamPolicyStubMethods) { + const innerCallPromise = this.iamPolicyStub.then(stub => (...args) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, (err) => () => { + throw err; + }); + this.innerApiCalls[methodName] = (0, createApiCall_1.createApiCall)(innerCallPromise, this._defaults[methodName], this.descriptors.page[methodName]); + } + return this.iamPolicyStub; + } + /** + * The DNS address for this API service. + */ + static get servicePath() { + return 'cloudkms.googleapis.com'; + } + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + */ + static get apiEndpoint() { + return 'cloudkms.googleapis.com'; + } + /** + * The port for this API service. + */ + static get port() { + return 443; + } + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloudkms', + ]; + } + getProjectId(callback) { + if (this.auth && 'getProjectId' in this.auth) { + return this.auth.getProjectId(callback); + } + if (callback) { + callback(new Error('Cannot determine project ID.')); + } + else { + return Promise.reject('Cannot determine project ID.'); + } + } + getIamPolicy(request, optionsOrCallback, callback) { + let options; + if (optionsOrCallback instanceof Function && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + routingHeader.fromParams({ + resource: request.resource, + }); + this.initialize(); + return this.innerApiCalls.getIamPolicy(request, options, callback); + } + setIamPolicy(request, optionsOrCallback, callback) { + let options; + if (optionsOrCallback instanceof Function && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + routingHeader.fromParams({ + resource: request.resource, + }); + this.initialize(); + return this.innerApiCalls.setIamPolicy(request, options, callback); + } + testIamPermissions(request, optionsOrCallback, callback) { + let options; + if (optionsOrCallback instanceof Function && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + routingHeader.fromParams({ + resource: request.resource, + }); + this.initialize(); + return this.innerApiCalls.testIamPermissions(request, options, callback); + } + /** + * Terminate the GRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + */ + close() { + this.initialize(); + if (!this._terminated) { + return this.iamPolicyStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} +exports.IamClient = IamClient; +//# sourceMappingURL=iamService.js.map /***/ }), -/***/ 61285: -/***/ ((__unused_webpack_module, exports) => { +/***/ 12263: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.serializer = exports.warn = exports.ChannelCredentials = exports.makeUUID = exports.fallback = exports.protobufMinimal = exports.protobuf = exports.version = exports.createByteLengthFunction = exports.LocationsClient = exports.IamClient = exports.OperationsClient = exports.LocationProtos = exports.IamProtos = exports.operationsProtos = exports.routingHeader = exports.StreamType = exports.Status = exports.PathTemplate = exports.operation = exports.Operation = exports.GrpcClient = exports.GoogleProtoFilesRoot = exports.ClientStub = exports.GoogleError = exports.createMaxRetriesBackoffSettings = exports.createDefaultBackoffSettings = exports.createBackoffSettings = exports.createBundleOptions = exports.createRetryOptions = exports.RetryOptions = exports.constructSettings = exports.CallSettings = exports.StreamDescriptor = exports.PageDescriptor = exports.LongrunningDescriptor = exports.BundleDescriptor = exports.createApiCall = exports.OngoingCall = exports.grpc = exports.GoogleAuth = void 0; +exports.lro = lro; +const grpc = __nccwpck_require__(7025); +exports.grpc = grpc; +const grpc_1 = __nccwpck_require__(68976); +const IamProtos = __nccwpck_require__(83555); +exports.IamProtos = IamProtos; +const LocationProtos = __nccwpck_require__(22560); +exports.LocationProtos = LocationProtos; +const operationsProtos = __nccwpck_require__(59081); +exports.operationsProtos = operationsProtos; +const operationsClient = __nccwpck_require__(82450); +const routingHeader = __nccwpck_require__(8827); +exports.routingHeader = routingHeader; +var google_auth_library_1 = __nccwpck_require__(20810); +Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return google_auth_library_1.GoogleAuth; } })); +var call_1 = __nccwpck_require__(53047); +Object.defineProperty(exports, "OngoingCall", ({ enumerable: true, get: function () { return call_1.OngoingCall; } })); +var createApiCall_1 = __nccwpck_require__(79712); +Object.defineProperty(exports, "createApiCall", ({ enumerable: true, get: function () { return createApiCall_1.createApiCall; } })); +var descriptor_1 = __nccwpck_require__(13627); +Object.defineProperty(exports, "BundleDescriptor", ({ enumerable: true, get: function () { return descriptor_1.BundleDescriptor; } })); +Object.defineProperty(exports, "LongrunningDescriptor", ({ enumerable: true, get: function () { return descriptor_1.LongrunningDescriptor; } })); +Object.defineProperty(exports, "PageDescriptor", ({ enumerable: true, get: function () { return descriptor_1.PageDescriptor; } })); +Object.defineProperty(exports, "StreamDescriptor", ({ enumerable: true, get: function () { return descriptor_1.StreamDescriptor; } })); +var gax_1 = __nccwpck_require__(80978); +Object.defineProperty(exports, "CallSettings", ({ enumerable: true, get: function () { return gax_1.CallSettings; } })); +Object.defineProperty(exports, "constructSettings", ({ enumerable: true, get: function () { return gax_1.constructSettings; } })); +Object.defineProperty(exports, "RetryOptions", ({ enumerable: true, get: function () { return gax_1.RetryOptions; } })); +Object.defineProperty(exports, "createRetryOptions", ({ enumerable: true, get: function () { return gax_1.createRetryOptions; } })); +Object.defineProperty(exports, "createBundleOptions", ({ enumerable: true, get: function () { return gax_1.createBundleOptions; } })); +Object.defineProperty(exports, "createBackoffSettings", ({ enumerable: true, get: function () { return gax_1.createBackoffSettings; } })); +Object.defineProperty(exports, "createDefaultBackoffSettings", ({ enumerable: true, get: function () { return gax_1.createDefaultBackoffSettings; } })); +Object.defineProperty(exports, "createMaxRetriesBackoffSettings", ({ enumerable: true, get: function () { return gax_1.createMaxRetriesBackoffSettings; } })); +var googleError_1 = __nccwpck_require__(6634); +Object.defineProperty(exports, "GoogleError", ({ enumerable: true, get: function () { return googleError_1.GoogleError; } })); +var grpc_2 = __nccwpck_require__(68976); +Object.defineProperty(exports, "ClientStub", ({ enumerable: true, get: function () { return grpc_2.ClientStub; } })); +Object.defineProperty(exports, "GoogleProtoFilesRoot", ({ enumerable: true, get: function () { return grpc_2.GoogleProtoFilesRoot; } })); +Object.defineProperty(exports, "GrpcClient", ({ enumerable: true, get: function () { return grpc_2.GrpcClient; } })); +var longrunning_1 = __nccwpck_require__(83481); +Object.defineProperty(exports, "Operation", ({ enumerable: true, get: function () { return longrunning_1.Operation; } })); +Object.defineProperty(exports, "operation", ({ enumerable: true, get: function () { return longrunning_1.operation; } })); +var pathTemplate_1 = __nccwpck_require__(20513); +Object.defineProperty(exports, "PathTemplate", ({ enumerable: true, get: function () { return pathTemplate_1.PathTemplate; } })); +var status_1 = __nccwpck_require__(53501); +Object.defineProperty(exports, "Status", ({ enumerable: true, get: function () { return status_1.Status; } })); +var streaming_1 = __nccwpck_require__(67389); +Object.defineProperty(exports, "StreamType", ({ enumerable: true, get: function () { return streaming_1.StreamType; } })); +function lro(options) { + options = Object.assign({ scopes: lro.ALL_SCOPES }, options); + const gaxGrpc = new grpc_1.GrpcClient(options); + return new operationsClient.OperationsClientBuilder(gaxGrpc); +} +lro.SERVICE_ADDRESS = operationsClient.SERVICE_ADDRESS; +lro.ALL_SCOPES = operationsClient.ALL_SCOPES; +var operationsClient_1 = __nccwpck_require__(82450); +Object.defineProperty(exports, "OperationsClient", ({ enumerable: true, get: function () { return operationsClient_1.OperationsClient; } })); +var iamService_1 = __nccwpck_require__(28118); +Object.defineProperty(exports, "IamClient", ({ enumerable: true, get: function () { return iamService_1.IamClient; } })); +var locationService_1 = __nccwpck_require__(58591); +Object.defineProperty(exports, "LocationsClient", ({ enumerable: true, get: function () { return locationService_1.LocationsClient; } })); +exports.createByteLengthFunction = grpc_1.GrpcClient.createByteLengthFunction; +exports.version = __nccwpck_require__(13385).version; +const protobuf = __nccwpck_require__(85881); +exports.protobuf = protobuf; +exports.protobufMinimal = __nccwpck_require__(96916); +const fallback = __nccwpck_require__(90418); +exports.fallback = fallback; +var util_1 = __nccwpck_require__(26969); +Object.defineProperty(exports, "makeUUID", ({ enumerable: true, get: function () { return util_1.makeUUID; } })); +var grpc_js_1 = __nccwpck_require__(7025); +Object.defineProperty(exports, "ChannelCredentials", ({ enumerable: true, get: function () { return grpc_js_1.ChannelCredentials; } })); +var warnings_1 = __nccwpck_require__(16328); +Object.defineProperty(exports, "warn", ({ enumerable: true, get: function () { return warnings_1.warn; } })); +const serializer = __nccwpck_require__(2027); +exports.serializer = serializer; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 50364: -/***/ ((__unused_webpack_module, exports) => { +/***/ 58591: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.RequestHandlerProtocol = void 0; -var RequestHandlerProtocol; -(function (RequestHandlerProtocol) { - RequestHandlerProtocol["HTTP_0_9"] = "http/0.9"; - RequestHandlerProtocol["HTTP_1_0"] = "http/1.0"; - RequestHandlerProtocol["TDS_8_0"] = "tds/8.0"; -})(RequestHandlerProtocol = exports.RequestHandlerProtocol || (exports.RequestHandlerProtocol = {})); - +exports.LocationsClient = void 0; +/* global window */ +const gax = __nccwpck_require__(80978); +const warnings_1 = __nccwpck_require__(16328); +const createApiCall_1 = __nccwpck_require__(79712); +const routingHeader = __nccwpck_require__(8827); +const pageDescriptor_1 = __nccwpck_require__(71172); +const jsonProtos = __nccwpck_require__(31663); +/** + * This file defines retry strategy and timeouts for all API methods in this library. + */ +const gapicConfig = __nccwpck_require__(88812); +const version = (__nccwpck_require__(13385).version); +/** + * Google Cloud Locations Client. + * This is manually written for providing methods [listLocations, getLocations] to the generated client. + */ +class LocationsClient { + /** + * Construct an instance of LocationsClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP fallback mode. + * In fallback mode, a special browser-compatible transport implementation is used + * instead of gRPC transport. In browser context (if the `window` object is defined) + * the fallback mode is enabled automatically; set `options.fallback` to `false` + * if you need to override this behavior. + */ + constructor(gaxGrpc, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + opts) { + var _a, _b; + this._terminated = false; + this.descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + // Ensure that options include all the required fields. + this.gaxGrpc = gaxGrpc; + const staticMembers = this.constructor; + const servicePath = (opts === null || opts === void 0 ? void 0 : opts.servicePath) || (opts === null || opts === void 0 ? void 0 : opts.apiEndpoint) || staticMembers.servicePath; + this._providedCustomServicePath = !!((opts === null || opts === void 0 ? void 0 : opts.servicePath) || (opts === null || opts === void 0 ? void 0 : opts.apiEndpoint)); + const port = (opts === null || opts === void 0 ? void 0 : opts.port) || staticMembers.port; + const clientConfig = (_a = opts === null || opts === void 0 ? void 0 : opts.clientConfig) !== null && _a !== void 0 ? _a : {}; + const fallback = (_b = opts === null || opts === void 0 ? void 0 : opts.fallback) !== null && _b !== void 0 ? _b : (typeof window !== 'undefined' && typeof (window === null || window === void 0 ? void 0 : window.fetch) === 'function'); + opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts); + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + // Save options to use in initialize() method. + this._opts = opts; + // Save the auth object to the client, for use by other methods. + this.auth = gaxGrpc.auth; + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + // Determine the client header string. + const clientHeader = [`gax/${version}`, `gapic/${version}`]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } + else { + clientHeader.push(`gl-web/${version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${gaxGrpc.grpcVersion}`); + } + else if (opts.fallback === 'rest') { + clientHeader.push(`rest/${gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = gaxGrpc.loadProtoJSON(jsonProtos); + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listLocations: new pageDescriptor_1.PageDescriptor('pageToken', 'nextPageToken', 'locations'), + }; + // Put together the default options sent with requests. + this._defaults = gaxGrpc.constructSettings('google.cloud.location.Locations', gapicConfig, opts.clientConfig || {}, { 'x-goog-api-client': clientHeader.join(' ') }); + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + // Add a warn function to the client constructor so it can be easily tested. + this.warn = warnings_1.warn; + } + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.locationsStub) { + return this.locationsStub; + } + // Put together the "service stub" for + // google.cloud.location.Locations. + this.locationsStub = this.gaxGrpc.createStub(this._opts.fallback + ? this._protos.lookupService('google.cloud.location.Locations') + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + this._protos.google.cloud.location.Locations, this._opts, this._providedCustomServicePath); + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const locationsStubMethods = ['listLocations', 'getLocation']; + for (const methodName of locationsStubMethods) { + const callPromise = this.locationsStub.then(stub => (...args) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, (err) => () => { + throw err; + }); + const descriptor = this.descriptors.page[methodName] || undefined; + const apiCall = (0, createApiCall_1.createApiCall)(callPromise, this._defaults[methodName], descriptor); + this.innerApiCalls[methodName] = apiCall; + } + return this.locationsStub; + } + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'cloud.googleapis.com'; + } + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'cloud.googleapis.com'; + } + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return ['https://www.googleapis.com/auth/cloud-platform']; + } + getProjectId(callback) { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + /** + * Gets information about a location. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Resource name for the location. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Location]{@link google.cloud.location.Location}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.getLocation(request); + */ + getLocation(request, optionsOrCallback, callback) { + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + routingHeader.fromParams({ + name: request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getLocation(request, options, callback); + } + /** + * Lists information about the supported locations for this service. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * The resource that owns the locations collection, if applicable. + * @param {string} request.filter + * The standard list filter. + * @param {number} request.pageSize + * The standard list page size. + * @param {string} request.pageToken + * The standard list page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [Location]{@link google.cloud.location.Location}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listLocationsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listLocations(request, optionsOrCallback, callback) { + request = request || {}; + let options; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + routingHeader.fromParams({ + name: request.name || '', + }); + this.initialize(); + return this.innerApiCalls.listLocations(request, options, callback); + } + /** + * Equivalent to `listLocations`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * The resource that owns the locations collection, if applicable. + * @param {string} request.filter + * The standard list filter. + * @param {number} request.pageSize + * The standard list page size. + * @param {string} request.pageToken + * The standard list page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [Location]{@link google.cloud.location.Location}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#auto-pagination) + * for more details and examples. + * @example + * const iterable = client.listLocationsAsync(request); + * for await (const response of iterable) { + * // process response + * } + */ + listLocationsAsync(request, options) { + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + routingHeader.fromParams({ + name: request.name || '', + }); + options = options || {}; + const callSettings = new gax.CallSettings(options); + this.initialize(); + return this.descriptors.page.listLocations.asyncIterate(this.innerApiCalls['listLocations'], request, callSettings); + } + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close() { + this.initialize(); + if (!this._terminated) { + return this.locationsStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} +exports.LocationsClient = LocationsClient; +//# sourceMappingURL=locationService.js.map /***/ }), -/***/ 69304: -/***/ ((__unused_webpack_module, exports) => { +/***/ 74729: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.LongrunningApiCaller = void 0; +const call_1 = __nccwpck_require__(53047); +const gax_1 = __nccwpck_require__(80978); +const longrunning_1 = __nccwpck_require__(83481); +class LongrunningApiCaller { + /** + * Creates an API caller that performs polling on a long running operation. + * + * @private + * @constructor + * @param {LongRunningDescriptor} longrunningDescriptor - Holds the + * decoders used for unpacking responses and the operationsClient + * used for polling the operation. + */ + constructor(longrunningDescriptor) { + this.longrunningDescriptor = longrunningDescriptor; + } + init(callback) { + if (callback) { + return new call_1.OngoingCall(callback); + } + return new call_1.OngoingCallPromise(); + } + wrap(func) { + return func; + } + call(apiCall, argument, settings, canceller) { + canceller.call((argument, callback) => { + return this._wrapOperation(apiCall, settings, argument, callback); + }, argument); + } + _wrapOperation(apiCall, settings, argument, callback) { + let backoffSettings = settings.longrunning; + if (!backoffSettings) { + backoffSettings = (0, gax_1.createDefaultBackoffSettings)(); + } + const longrunningDescriptor = this.longrunningDescriptor; + return apiCall(argument, (err, rawResponse) => { + if (err) { + callback(err, null, null, rawResponse); + return; + } + const operation = new longrunning_1.Operation(rawResponse, longrunningDescriptor, backoffSettings, settings); + callback(null, operation, rawResponse); + }); + } + fail(canceller, err) { + canceller.callback(err); + } + result(canceller) { + return canceller.promise; + } +} +exports.LongrunningApiCaller = LongrunningApiCaller; +//# sourceMappingURL=longRunningApiCaller.js.map /***/ }), -/***/ 46098: -/***/ ((__unused_webpack_module, exports) => { +/***/ 8096: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.LongRunningDescriptor = void 0; +const longRunningApiCaller_1 = __nccwpck_require__(74729); +/** + * A descriptor for long-running operations. + */ +class LongRunningDescriptor { + constructor(operationsClient, responseDecoder, metadataDecoder) { + this.operationsClient = operationsClient; + this.responseDecoder = responseDecoder; + this.metadataDecoder = metadataDecoder; + } + getApiCaller() { + return new longRunningApiCaller_1.LongrunningApiCaller(this); + } +} +exports.LongRunningDescriptor = LongRunningDescriptor; +//# sourceMappingURL=longRunningDescriptor.js.map /***/ }), -/***/ 10375: -/***/ ((__unused_webpack_module, exports) => { +/***/ 83481: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.Operation = void 0; +exports.operation = operation; +const events_1 = __nccwpck_require__(82361); +const status_1 = __nccwpck_require__(53501); +const googleError_1 = __nccwpck_require__(6634); +const operationProtos = __nccwpck_require__(59081); +class Operation extends events_1.EventEmitter { + /** + * Wrapper for a google.longrunnung.Operation. + * + * @constructor + * + * @param {google.longrunning.Operation} grpcOp - The operation to be wrapped. + * @param {LongRunningDescriptor} longrunningDescriptor - This defines the + * operations service client and unpacking mechanisms for the operation. + * @param {BackoffSettings} backoffSettings - The backoff settings used in + * in polling the operation. + * @param {CallOptions} callOptions - CallOptions used in making get operation + * requests. + */ + constructor(grpcOp, longrunningDescriptor, backoffSettings, callOptions) { + super(); + this.completeListeners = 0; + this.hasActiveListeners = false; + this.latestResponse = grpcOp; + this.name = this.latestResponse.name; + this.done = this.latestResponse.done; + this.error = this.latestResponse.error; + this.longrunningDescriptor = longrunningDescriptor; + this.result = null; + this.metadata = null; + this.backoffSettings = backoffSettings; + this._unpackResponse(grpcOp); + this._listenForEvents(); + this._callOptions = callOptions; + } + /** + * Begin listening for events on the operation. This method keeps track of how + * many "complete" listeners are registered and removed, making sure polling + * is handled automatically. + * + * As long as there is one active "complete" listener, the connection is open. + * When there are no more listeners, the polling stops. + * + * @private + */ + _listenForEvents() { + this.on('newListener', event => { + if (event === 'complete') { + this.completeListeners++; + if (!this.hasActiveListeners) { + this.hasActiveListeners = true; + this.startPolling_(); + } + } + }); + this.on('removeListener', event => { + if (event === 'complete' && --this.completeListeners === 0) { + this.hasActiveListeners = false; + } + }); + } + /** + * Cancels current polling api call and cancels the operation. + * + * @return {Promise} the promise of the OperationsClient#cancelOperation api + * request. + */ + cancel() { + if (this.currentCallPromise_) { + this.currentCallPromise_.cancel(); + } + const operationsClient = this.longrunningDescriptor.operationsClient; + const cancelRequest = new operationProtos.google.longrunning.CancelOperationRequest(); + cancelRequest.name = this.latestResponse.name; + return operationsClient.cancelOperation(cancelRequest); + } + getOperation(callback) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const operationsClient = this.longrunningDescriptor.operationsClient; + function promisifyResponse() { + if (!callback) { + return new Promise((resolve, reject) => { + if (self.latestResponse.error) { + const error = new googleError_1.GoogleError(self.latestResponse.error.message); + error.code = self.latestResponse.error.code; + reject(error); + } + else { + resolve([self.result, self.metadata, self.latestResponse]); + } + }); + } + return; + } + if (this.latestResponse.done) { + this._unpackResponse(this.latestResponse, callback); + return promisifyResponse(); + } + const request = new operationProtos.google.longrunning.GetOperationRequest(); + request.name = this.latestResponse.name; + this.currentCallPromise_ = operationsClient.getOperationInternal(request, this._callOptions); + const noCallbackPromise = this.currentCallPromise_.then(responses => { + self.latestResponse = responses[0]; + self._unpackResponse(responses[0], callback); + return promisifyResponse(); + }, (err) => { + if (callback) { + callback(err); + return; + } + return Promise.reject(err); + }); + if (!callback) { + return noCallbackPromise; + } + } + _unpackResponse(op, callback) { + const responseDecoder = this.longrunningDescriptor.responseDecoder; + const metadataDecoder = this.longrunningDescriptor.metadataDecoder; + let response; + let metadata; + if (op.done) { + if (op.result === 'error') { + const error = new googleError_1.GoogleError(op.error.message); + error.code = op.error.code; + this.error = error; + if (callback) { + callback(error); + } + return; + } + if (responseDecoder && op.response) { + this.response = op.response; + response = responseDecoder(op.response.value); + this.result = response; + this.done = true; + } + } + if (metadataDecoder && op.metadata) { + metadata = metadataDecoder(op.metadata.value); + this.metadata = metadata; + } + if (callback) { + callback(null, response, metadata, op); + } + } + /** + * Poll `getOperation` to check the operation's status. This runs a loop to + * ping using the backoff strategy specified at initialization. + * + * Note: This method is automatically called once a "complete" event handler + * is registered on the operation. + * + * @private + */ + startPolling_() { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + let now = new Date(); + const delayMult = this.backoffSettings.retryDelayMultiplier; + const maxDelay = this.backoffSettings.maxRetryDelayMillis; + let delay = this.backoffSettings.initialRetryDelayMillis; + let deadline = Infinity; + if (this.backoffSettings.totalTimeoutMillis) { + deadline = now.getTime() + this.backoffSettings.totalTimeoutMillis; + } + let previousMetadataBytes; + if (this.latestResponse.metadata) { + previousMetadataBytes = this.latestResponse.metadata.value; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + function emit(event, ...args) { + self.emit(event, ...args); + } + // Helper function to replace nodejs buffer's equals() + function arrayEquals(a, b) { + if (a.byteLength !== b.byteLength) { + return false; + } + for (let i = 0; i < a.byteLength; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; + } + function retry() { + if (!self.hasActiveListeners) { + return; + } + if (now.getTime() >= deadline) { + const error = new googleError_1.GoogleError('Total timeout exceeded before any response was received'); + error.code = status_1.Status.DEADLINE_EXCEEDED; + setImmediate(emit, 'error', error); + return; + } + self.getOperation((err, result, metadata, rawResponse) => { + if (err) { + setImmediate(emit, 'error', err); + return; + } + if (!result) { + if (rawResponse.metadata && + (!previousMetadataBytes || + (rawResponse && + !arrayEquals(rawResponse.metadata.value, previousMetadataBytes)))) { + setImmediate(emit, 'progress', metadata, rawResponse); + previousMetadataBytes = rawResponse.metadata.value; + } + // special case: some APIs fail to set either result or error + // but set done = true (e.g. speech with silent file). + // Some APIs just use this for the normal completion + // (e.g. nodejs-contact-center-insights), so let's just return + // an empty response in this case. + if (rawResponse.done) { + setImmediate(emit, 'complete', {}, metadata, rawResponse); + return; + } + setTimeout(() => { + now = new Date(); + delay = Math.min(delay * delayMult, maxDelay); + retry(); + }, delay); + return; + } + setImmediate(emit, 'complete', result, metadata, rawResponse); + }); + } + retry(); + } + /** + * Wraps the `complete` and `error` events in a Promise. + * + * @return {promise} - Promise that resolves on operation completion and rejects + * on operation error. + */ + promise() { + return new Promise((resolve, reject) => { + this.on('error', reject).on('complete', (result, metadata, rawResponse) => { + resolve([result, metadata, rawResponse]); + }); + }); + } +} +exports.Operation = Operation; +/** + * Method used to create Operation objects. + * + * @constructor + * + * @param {google.longrunning.Operation} op - The operation to be wrapped. + * @param {LongRunningDescriptor} longrunningDescriptor - This defines the + * operations service client and unpacking mechanisms for the operation. + * @param {BackoffSettings} backoffSettings - The backoff settings used in + * in polling the operation. + * @param {CallOptions=} callOptions - CallOptions used in making get operation + * requests. + */ +function operation(op, longrunningDescriptor, backoffSettings, callOptions) { + return new Operation(op, longrunningDescriptor, backoffSettings, callOptions); +} +//# sourceMappingURL=longrunning.js.map /***/ }), -/***/ 66894: -/***/ ((__unused_webpack_module, exports) => { +/***/ 56513: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.NormalApiCaller = void 0; +const call_1 = __nccwpck_require__(53047); +/** + * Creates an API caller for regular unary methods. + */ +class NormalApiCaller { + init(callback) { + if (callback) { + return new call_1.OngoingCall(callback); + } + return new call_1.OngoingCallPromise(); + } + wrap(func) { + return func; + } + call(apiCall, argument, settings, canceller) { + canceller.call(apiCall, argument); + } + fail(canceller, err) { + canceller.callback(err); + } + result(canceller) { + return canceller.promise; + } +} +exports.NormalApiCaller = NormalApiCaller; +//# sourceMappingURL=normalApiCaller.js.map /***/ }), -/***/ 57887: -/***/ ((__unused_webpack_module, exports) => { +/***/ 69742: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.retryable = retryable; +const status_1 = __nccwpck_require__(53501); +const googleError_1 = __nccwpck_require__(6634); +const timeout_1 = __nccwpck_require__(4531); +/** + * Creates a function equivalent to func, but that retries on certain + * exceptions. + * + * @private + * + * @param {GRPCCall} func - A function. + * @param {RetryOptions} retry - Configures the exceptions upon which the + * function eshould retry, and the parameters to the exponential backoff retry + * algorithm. + * @param {GRPCCallOtherArgs} otherArgs - the additional arguments to be passed to func. + * @return {SimpleCallbackFunction} A function that will retry. + */ +function retryable(func, retry, otherArgs, apiName) { + const delayMult = retry.backoffSettings.retryDelayMultiplier; + const maxDelay = retry.backoffSettings.maxRetryDelayMillis; + const timeoutMult = retry.backoffSettings.rpcTimeoutMultiplier; + const maxTimeout = retry.backoffSettings.maxRpcTimeoutMillis; + let delay = retry.backoffSettings.initialRetryDelayMillis; + let timeout = retry.backoffSettings.initialRpcTimeoutMillis; + /** + * Equivalent to ``func``, but retries upon transient failure. + * + * Retrying is done through an exponential backoff algorithm configured + * by the options in ``retry``. + * @param {RequestType} argument The request object. + * @param {APICallback} callback The callback. + * @return {GRPCCall} + */ + return (argument, callback) => { + let canceller; + let timeoutId; + let now = new Date(); + let deadline; + if (retry.backoffSettings.totalTimeoutMillis) { + deadline = now.getTime() + retry.backoffSettings.totalTimeoutMillis; + } + let retries = 0; + const maxRetries = retry.backoffSettings.maxRetries; + // TODO: define A/B testing values for retry behaviors. + /** Repeat the API call as long as necessary. */ + function repeat(err) { + timeoutId = null; + if (deadline && now.getTime() >= deadline) { + const error = new googleError_1.GoogleError(`Total timeout of API ${apiName} exceeded ${retry.backoffSettings.totalTimeoutMillis} milliseconds ${err ? `retrying error ${err} ` : ''} before any response was received.`); + error.code = status_1.Status.DEADLINE_EXCEEDED; + callback(error); + return; + } + if (retries && retries >= maxRetries) { + const error = new googleError_1.GoogleError('Exceeded maximum number of retries ' + + (err ? `retrying error ${err} ` : '') + + 'before any response was received'); + error.code = status_1.Status.DEADLINE_EXCEEDED; + callback(error); + return; + } + retries++; + let lastError = err; + const toCall = (0, timeout_1.addTimeoutArg)(func, timeout, otherArgs); + canceller = toCall(argument, (err, response, next, rawResponse) => { + // Save only the error before deadline exceeded + if (err && err.code !== 4) { + lastError = err; + } + if (!err) { + callback(null, response, next, rawResponse); + return; + } + canceller = null; + if (retry.retryCodes.length > 0 && + retry.retryCodes.indexOf(err.code) < 0) { + err.note = + 'Exception occurred in retry method that was ' + + 'not classified as transient'; + callback(err); + } + else { + const toSleep = Math.random() * delay; + timeoutId = setTimeout(() => { + now = new Date(); + delay = Math.min(delay * delayMult, maxDelay); + const timeoutCal = timeout && timeoutMult ? timeout * timeoutMult : 0; + const rpcTimeout = maxTimeout ? maxTimeout : 0; + const newDeadline = deadline ? deadline - now.getTime() : 0; + timeout = Math.min(timeoutCal, rpcTimeout, newDeadline); + repeat(lastError); + }, toSleep); + } + }); + if (canceller instanceof Promise) { + canceller.catch(err => { + callback(new googleError_1.GoogleError(err)); + }); + } + } + if (maxRetries && deadline) { + const error = new googleError_1.GoogleError('Cannot set both totalTimeoutMillis and maxRetries ' + + 'in backoffSettings.'); + error.code = status_1.Status.INVALID_ARGUMENT; + callback(error); + } + else { + repeat(); + } + return { + cancel() { + if (timeoutId) { + clearTimeout(timeoutId); + } + if (canceller) { + canceller.cancel(); + } + else { + const error = new googleError_1.GoogleError('cancelled'); + error.code = status_1.Status.CANCELLED; + callback(error); + } + }, + }; + }; +} +//# sourceMappingURL=retries.js.map /***/ }), -/***/ 66255: +/***/ 4531: /***/ ((__unused_webpack_module, exports) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); - +exports.addTimeoutArg = addTimeoutArg; +/** + * Updates func so that it gets called with the timeout as its final arg. + * + * This converts a function, func, into another function with updated deadline. + * + * @private + * + * @param {GRPCCall} func - a function to be updated. + * @param {number} timeout - to be added to the original function as it final + * positional arg. + * @param {Object} otherArgs - the additional arguments to be passed to func. + * @param {Object=} abTests - the A/B testing key/value pairs. + * @return {function(Object, APICallback)} + * the function with other arguments and the timeout. + */ +function addTimeoutArg(func, timeout, otherArgs, abTests) { + // TODO: this assumes the other arguments consist of metadata and options, + // which is specific to gRPC calls. Remove the hidden dependency on gRPC. + return (argument, callback) => { + const now = new Date(); + const options = otherArgs.options || {}; + options.deadline = new Date(now.getTime() + timeout); + const metadata = otherArgs.metadataBuilder + ? otherArgs.metadataBuilder(abTests, otherArgs.headers || {}) + : null; + return func(argument, metadata, options, callback); + }; +} +//# sourceMappingURL=timeout.js.map /***/ }), -/***/ 14681: +/***/ 82450: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseUrl = void 0; -const querystring_parser_1 = __nccwpck_require__(4769); -const parseUrl = (url) => { - if (typeof url === "string") { - return (0, exports.parseUrl)(new URL(url)); +exports.OperationsClientBuilder = exports.OperationsClient = exports.ALL_SCOPES = exports.SERVICE_ADDRESS = void 0; +const createApiCall_1 = __nccwpck_require__(79712); +const descriptor_1 = __nccwpck_require__(13627); +const gax = __nccwpck_require__(80978); +const configData = __nccwpck_require__(24114); +const operationProtoJson = __nccwpck_require__(78472); +const transcoding_1 = __nccwpck_require__(86707); +exports.SERVICE_ADDRESS = 'longrunning.googleapis.com'; +const version = (__nccwpck_require__(13385).version); +const DEFAULT_SERVICE_PORT = 443; +const CODE_GEN_NAME_VERSION = 'gapic/0.7.1'; +/** + * The scopes needed to make gRPC calls to all of the methods defined in + * this service. + */ +exports.ALL_SCOPES = []; +/** + * Manages long-running operations with an API service. + * + * When an API method normally takes long time to complete, it can be designed + * to return {@link Operation} to the client, and the client can use this + * interface to receive the real response asynchronously by polling the + * operation resource, or pass the operation resource to another API (such as + * Google Cloud Pub/Sub API) to receive the response. Any API service that + * returns long-running operations should implement the `Operations` interface + * so developers can have a consistent client experience. + * + * This will be created through a builder function which can be obtained by the + * module. See the following example of how to initialize the module and how to + * access to the builder. + * @see {@link operationsClient} + * + * @class + */ +class OperationsClient { + constructor(gaxGrpc, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + operationsProtos, options) { + const opts = Object.assign({ + servicePath: exports.SERVICE_ADDRESS, + port: DEFAULT_SERVICE_PORT, + clientConfig: {}, + }, options); + const googleApiClient = ['gl-node/' + process.versions.node]; + if (opts.libName && opts.libVersion) { + googleApiClient.push(opts.libName + '/' + opts.libVersion); + } + googleApiClient.push(CODE_GEN_NAME_VERSION, 'gax/' + version); + if (opts.fallback) { + googleApiClient.push('gl-web/' + version); + } + else { + googleApiClient.push('grpc/' + gaxGrpc.grpcVersion); + } + const defaults = gaxGrpc.constructSettings('google.longrunning.Operations', configData, opts.clientConfig || {}, { 'x-goog-api-client': googleApiClient.join(' ') }); + this.auth = gaxGrpc.auth; + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + this.descriptor = { + listOperations: new descriptor_1.PageDescriptor('pageToken', 'nextPageToken', 'operations'), + }; + // Put together the "service stub" for + // google.longrunning.Operations. + this.operationsStub = gaxGrpc.createStub(opts.fallback + ? operationsProtos.lookupService('google.longrunning.Operations') + : operationsProtos.google.longrunning.Operations, opts); + const operationsStubMethods = [ + 'getOperation', + 'listOperations', + 'cancelOperation', + 'deleteOperation', + ]; + for (const methodName of operationsStubMethods) { + const innerCallPromise = this.operationsStub.then(stub => (...args) => { + const func = stub[methodName]; + return func.apply(stub, args); + }, err => () => { + throw err; + }); + this.innerApiCalls[methodName] = (0, createApiCall_1.createApiCall)(innerCallPromise, defaults[methodName], this.descriptor[methodName]); + } } - const { hostname, pathname, port, protocol, search } = url; - let query; - if (search) { - query = (0, querystring_parser_1.parseQueryString)(search); + /** Closes this operations client. */ + close() { + this.operationsStub.then(stub => stub.close()); } - return { - hostname, - port: port ? parseInt(port) : undefined, - protocol, - path: pathname, - query, - }; -}; -exports.parseUrl = parseUrl; - + getProjectId(callback) { + if (this.auth && 'getProjectId' in this.auth) { + return this.auth.getProjectId(callback); + } + if (callback) { + callback(new Error('Cannot determine project ID.')); + } + else { + return Promise.reject('Cannot determine project ID.'); + } + } + // Service calls + getOperationInternal(request, options, callback) { + request = request || {}; + options = options || {}; + return this.innerApiCalls.getOperation(request, options, callback); + } + /** + * Gets the latest state of a long-running operation. Clients can use this + * method to poll the operation result at intervals as recommended by the API + * service. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation resource. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the + * details. + * @param {function(?Error, ?Object)=} callback + * The function which will be called with the result of the API call. + * + * The second parameter to the callback is an object representing + * [google.longrunning.Operation]{@link + * external:"google.longrunning.Operation"}. + * @return {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * [google.longrunning.Operation]{@link + * external:"google.longrunning.Operation"}. The promise has a method named + * "cancel" which cancels the ongoing API call. + * + * @example + * + * const client = longrunning.operationsClient(); + * const name = ''; + * const [response] = await client.getOperation({name}); + * // doThingsWith(response) + */ + getOperation(request, optionsOrCallback, callback) { + let options; + if (optionsOrCallback instanceof Function && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + request = request || {}; + options = options || {}; + return this.innerApiCalls.getOperation(request, options, callback); + } + /** + * Lists operations that match the specified filter in the request. If the + * server doesn't support this method, it returns `UNIMPLEMENTED`. + * + * NOTE: the `name` binding below allows API services to override the binding + * to use different resource name schemes. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation collection. + * @param {string} request.filter - The standard list filter. + * @param {number=} request.pageSize + * The maximum number of resources contained in the underlying API + * response. If page streaming is performed per-resource, this + * parameter does not affect the return value. If page streaming is + * performed per-page, this determines the maximum number of + * resources in a page. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the + * details. + * @param {function(?Error, ?Array, ?Object, ?Object)=} callback + * The function which will be called with the result of the API call. + * + * The second parameter to the callback is Array of + * [google.longrunning.Operation]{@link + * external:"google.longrunning.Operation"}. + * + * When autoPaginate: false is specified through options, it contains the + * result in a single response. If the response indicates the next page + * exists, the third parameter is set to be used for the next request object. + * The fourth parameter keeps the raw response object of an object + * representing [google.longrunning.ListOperationsResponse]{@link + * external:"google.longrunning.ListOperationsResponse"}. + * @return {Promise} - The promise which resolves to an array. + * The first element of the array is Array of + * [google.longrunning.Operation]{@link + * external:"google.longrunning.Operation"}. + * + * When autoPaginate: false is specified through options, the array has + * three elements. The first element is Array of + * [google.longrunning.Operation]{@link + * external:"google.longrunning.Operation"} in a single response. The second + * element is the next request object if the response indicates the next page + * exists, or null. The third element is an object representing + * [google.longrunning.ListOperationsResponse]{@link + * external:"google.longrunning.ListOperationsResponse"}. + * + * The promise has a method named "cancel" which cancels the ongoing API + * call. + * + * @example + * + * const client = longrunning.operationsClient(); + * const request = { + * name: '', + * filter: '' + * }; + * // Iterate over all elements. + * const [resources] = await client.listOperations(request); + * for (const resource of resources) { + * console.log(resources); + * } + * + * // Or obtain the paged response. + * const options = {autoPaginate: false}; + * let nextRequest = request; + * while(nextRequest) { + * const response = await client.listOperations(nextRequest, options); + * const resources = response[0]; + * nextRequest = response[1]; + * const rawResponse = response[2]; + * for (const resource of resources) { + * // doThingsWith(resource); + * } + * }; + */ + listOperations(request, optionsOrCallback, callback) { + let options; + if (optionsOrCallback instanceof Function && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + request = request || {}; + options = options || {}; + return this.innerApiCalls.listOperations(request, options, callback); + } + /** + * Equivalent to {@link listOperations}, but returns a NodeJS Stream object. + * + * This fetches the paged responses for {@link listOperations} continuously + * and invokes the callback registered for 'data' event for each element in + * the responses. + * + * The returned object has 'end' method when no more elements are required. + * + * autoPaginate option will be ignored. + * + * @see {@link https://nodejs.org/api/stream.html} + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation collection. + * @param {string} request.filter - The standard list filter. + * @param {number=} request.pageSize - + * The maximum number of resources contained in the underlying API + * response. If page streaming is performed per-resource, this + * parameter does not affect the return value. If page streaming is + * performed per-page, this determines the maximum number of + * resources in a page. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the + * details. + * @return {Stream} - An object stream which emits an object representing [google.longrunning.Operation]{@link external:"google.longrunning.Operation"} on 'data' event. + * + * @example + * + * const client = longrunning.operationsClient(); + * const request = { + * name: '', + * filter: '' + * }; + * client.listOperationsStream(request) + * .on('data', element => { + * // doThingsWith(element) + * }) + * .on('error', err => { + * console.error(err); + * }); + */ + listOperationsStream(request, options) { + const callSettings = new gax.CallSettings(options); + return this.descriptor.listOperations.createStream(this.innerApiCalls.listOperations, request, callSettings); + } + /** + * Equivalent to {@link listOperations}, but returns an iterable object. + * + * for-await-of syntax is used with the iterable to recursively get response element on-demand. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation collection. + * @param {string} request.filter - The standard list filter. + * @param {number=} request.pageSize - + * The maximum number of resources contained in the underlying API + * response. If page streaming is performed per-resource, this + * parameter does not affect the return value. If page streaming is + * performed per-page, this determines the maximum number of + * resources in a page. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the + * details. + * @returns {Object} + * An iterable Object that conforms to @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols. + */ + listOperationsAsync(request, options) { + request = request || {}; + options = options || {}; + const callSettings = new gax.CallSettings(options); + return this.descriptor.listOperations.asyncIterate(this.innerApiCalls.listOperations, request, callSettings); + } + /** + * Starts asynchronous cancellation on a long-running operation. The server + * makes a best effort to cancel the operation, but success is not + * guaranteed. If the server doesn't support this method, it returns + * `google.rpc.Code.UNIMPLEMENTED`. Clients can use + * {@link Operations.GetOperation} or + * other methods to check whether the cancellation succeeded or whether the + * operation completed despite cancellation. On successful cancellation, + * the operation is not deleted; instead, it becomes an operation with + * an {@link Operation.error} value with a {@link google.rpc.Status.code} of + * 1, corresponding to `Code.CANCELLED`. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation resource to be cancelled. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the + * details. + * @param {function(?Error)=} callback + * The function which will be called with the result of the API call. + * @return {Promise} - The promise which resolves when API call finishes. + * The promise has a method named "cancel" which cancels the ongoing API + * call. + * + * @example + * + * const client = longrunning.operationsClient(); + * await client.cancelOperation({name: ''}); + */ + cancelOperation(request, optionsOrCallback, callback) { + let options; + if (optionsOrCallback instanceof Function && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + request = request || {}; + options = options || {}; + return this.innerApiCalls.cancelOperation(request, options, callback); + } + /** + * Deletes a long-running operation. This method indicates that the client is + * no longer interested in the operation result. It does not cancel the + * operation. If the server doesn't support this method, it returns + * `google.rpc.Code.UNIMPLEMENTED`. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation resource to be deleted. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the + * details. + * @param {function(?Error)=} callback + * The function which will be called with the result of the API call. + * @return {Promise} - The promise which resolves when API call finishes. + * The promise has a method named "cancel" which cancels the ongoing API + * call. + * + * @example + * + * const client = longrunning.operationsClient(); + * await client.deleteOperation({name: ''}); + */ + deleteOperation(request, optionsOrCallback, callback) { + let options; + if (optionsOrCallback instanceof Function && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + request = request || {}; + options = options || {}; + return this.innerApiCalls.deleteOperation(request, options, callback); + } +} +exports.OperationsClient = OperationsClient; +class OperationsClientBuilder { + /** + * Builds a new Operations Client + * @param gaxGrpc {GrpcClient} + */ + constructor(gaxGrpc, protoJson) { + if (protoJson && gaxGrpc.httpRules) { + // overwrite the http rules if provide in service yaml. + (0, transcoding_1.overrideHttpRules)(gaxGrpc.httpRules, protoJson); + } + const operationsProtos = protoJson !== null && protoJson !== void 0 ? protoJson : gaxGrpc.loadProtoJSON(operationProtoJson); + /** + * Build a new instance of {@link OperationsClient}. + * + * @param {Object=} opts - The optional parameters. + * @param {String=} opts.servicePath - Domain name of the API remote host. + * @param {number=} opts.port - The port on which to connect to the remote host. + * @param {grpc.ClientCredentials=} opts.sslCreds - A ClientCredentials for use with an SSL-enabled channel. + * @param {Object=} opts.clientConfig - The customized config to build the call settings. See {@link gax.constructSettings} for the format. + */ + this.operationsClient = opts => { + if (gaxGrpc.fallback) { + opts.fallback = gaxGrpc.fallback; + } + return new OperationsClient(gaxGrpc, operationsProtos, opts); + }; + Object.assign(this.operationsClient, OperationsClient); + } +} +exports.OperationsClientBuilder = OperationsClientBuilder; +//# sourceMappingURL=operationsClient.js.map /***/ }), -/***/ 30305: +/***/ 71172: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromBase64 = void 0; -const util_buffer_from_1 = __nccwpck_require__(31381); -const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; -const fromBase64 = (input) => { - if ((input.length * 3) % 4 !== 0) { - throw new TypeError(`Incorrect padding on base64 string.`); +exports.PageDescriptor = void 0; +const stream_1 = __nccwpck_require__(12781); +const normalApiCaller_1 = __nccwpck_require__(56513); +const pagedApiCaller_1 = __nccwpck_require__(37441); +const maxAttemptsEmptyResponse = 10; +/** + * A descriptor for methods that support pagination. + */ +class PageDescriptor { + constructor(requestPageTokenField, responsePageTokenField, resourceField) { + this.requestPageTokenField = requestPageTokenField; + this.responsePageTokenField = responsePageTokenField; + this.resourceField = resourceField; } - if (!BASE64_REGEX.exec(input)) { - throw new TypeError(`Invalid base64 string.`); + /** + * Creates a new object Stream which emits the resource on 'data' event. + */ + createStream(apiCall, request, options) { + const stream = new stream_1.PassThrough({ objectMode: true }); + options = Object.assign({}, options, { autoPaginate: false }); + const maxResults = 'maxResults' in options ? options.maxResults : -1; + let pushCount = 0; + let started = false; + function callback(err, resources, next, apiResp) { + if (err) { + stream.emit('error', err); + return; + } + // emit full api response with every page. + stream.emit('response', apiResp); + for (let i = 0; i < resources.length; ++i) { + // TODO: rewrite without accessing stream internals + if (stream + ._readableState.ended) { + return; + } + if (resources[i] === null) { + continue; + } + stream.push(resources[i]); + pushCount++; + if (pushCount === maxResults) { + stream.end(); + } + } + // TODO: rewrite without accessing stream internals + if (stream._readableState + .ended) { + return; + } + if (!next) { + stream.end(); + return; + } + // When pageToken is specified in the original options, it will overwrite + // the page token field in the next request. Therefore it must be cleared. + if ('pageToken' in options) { + delete options.pageToken; + } + if (stream.isPaused()) { + request = next; + started = false; + } + else { + setImmediate(apiCall, next, options, callback); + } + } + stream.on('resume', () => { + if (!started) { + started = true; + apiCall(request, options, callback); + } + }); + return stream; } - const buffer = (0, util_buffer_from_1.fromString)(input, "base64"); - return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); -}; -exports.fromBase64 = fromBase64; - + /** + * Create an async iterable which can be recursively called for data on-demand. + */ + asyncIterate(apiCall, request, options) { + options = Object.assign({}, options, { autoPaginate: false }); + const iterable = this.createIterator(apiCall, request, options); + return iterable; + } + createIterator(apiCall, request, options) { + const asyncIterable = { + [Symbol.asyncIterator]() { + let nextPageRequest = request; + const cache = []; + return { + async next() { + if (cache.length > 0) { + return Promise.resolve({ + done: false, + value: cache.shift(), + }); + } + let attempts = 0; + while (cache.length === 0 && nextPageRequest) { + let result; + [result, nextPageRequest] = (await apiCall(nextPageRequest, options)); + // For pagination response with protobuf map type, use tuple as representation. + if (result && !Array.isArray(result)) { + for (const [key, value] of Object.entries(result)) { + cache.push([key, value]); + } + } + else { + cache.push(...result); + } + if (cache.length === 0) { + ++attempts; + if (attempts > maxAttemptsEmptyResponse) { + break; + } + } + } + if (cache.length === 0) { + return Promise.resolve({ done: true, value: undefined }); + } + return Promise.resolve({ done: false, value: cache.shift() }); + }, + }; + }, + }; + return asyncIterable; + } + getApiCaller(settings) { + if (!settings.autoPaginate) { + return new normalApiCaller_1.NormalApiCaller(); + } + return new pagedApiCaller_1.PagedApiCaller(this); + } +} +exports.PageDescriptor = PageDescriptor; +//# sourceMappingURL=pageDescriptor.js.map /***/ }), -/***/ 75600: +/***/ 37441: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(30305), exports); -tslib_1.__exportStar(__nccwpck_require__(74730), exports); - +exports.PagedApiCaller = void 0; +const call_1 = __nccwpck_require__(53047); +const googleError_1 = __nccwpck_require__(6634); +const resourceCollector_1 = __nccwpck_require__(50389); +class PagedApiCaller { + /** + * Creates an API caller that returns a stream to performs page-streaming. + * + * @private + * @constructor + * @param {PageDescriptor} pageDescriptor - indicates the structure + * of page streaming to be performed. + */ + constructor(pageDescriptor) { + this.pageDescriptor = pageDescriptor; + } + /** + * This function translates between regular gRPC calls (that accepts a request and returns a response, + * and does not know anything about pages and page tokens) and the users' callback (that expects + * to see resources from one page, a request to get the next page, and the raw response from the server). + * + * It generates a function that can be passed as a callback function to a gRPC call, will understand + * pagination-specific fields in the response, and call the users' callback after having those fields + * parsed. + * + * @param request Request object. It needs to be passed to all subsequent next page requests + * (the main content of the request object stays unchanged, only the next page token changes) + * @param callback The user's callback that expects the page content, next page request, and raw response. + */ + generateParseResponseCallback(request, callback) { + const resourceFieldName = this.pageDescriptor.resourceField; + const responsePageTokenFieldName = this.pageDescriptor.responsePageTokenField; + const requestPageTokenFieldName = this.pageDescriptor.requestPageTokenField; + return (err, response) => { + if (err) { + callback(err); + return; + } + if (!request) { + callback(new googleError_1.GoogleError('Undefined request in pagination method callback.')); + return; + } + if (!response) { + callback(new googleError_1.GoogleError('Undefined response in pagination method callback.')); + return; + } + const resources = response[resourceFieldName] || []; + const pageToken = response[responsePageTokenFieldName]; + let nextPageRequest = null; + if (pageToken) { + nextPageRequest = Object.assign({}, request); + nextPageRequest[requestPageTokenFieldName] = pageToken; + } + callback(err, resources, nextPageRequest, response); + }; + } + /** + * Adds a special ability to understand pagination-specific fields to the existing gRPC call. + * The original gRPC call just calls callback(err, result). + * The wrapped one will call callback(err, resources, nextPageRequest, rawResponse) instead. + * + * @param func gRPC call (normally, a service stub call). The gRPC call is expected to accept four parameters: + * request, metadata, call options, and callback. + */ + wrap(func) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + return function wrappedCall(argument, metadata, options, callback) { + return func(argument, metadata, options, self.generateParseResponseCallback(argument, callback)); + }; + } + /** + * Makes it possible to use both callback-based and promise-based calls. + * Returns an OngoingCall or OngoingCallPromise object. + * Regardless of which one is returned, it always has a `.callback` to call. + * + * @param settings Call settings. Can only be used to replace Promise with another promise implementation. + * @param [callback] Callback to be called, if any. + */ + init(callback) { + if (callback) { + return new call_1.OngoingCall(callback); + } + return new call_1.OngoingCallPromise(); + } + /** + * Implements auto-pagination logic. + * + * @param apiCall A function that performs gRPC request and calls its callback with a response or an error. + * It's supposed to be a gRPC service stub function wrapped into several layers of wrappers that make it + * accept just two parameters: (request, callback). + * @param request A request object that came from the user. + * @param settings Call settings. We are interested in `maxResults` and `autoPaginate` (they are optional). + * @param ongoingCall An instance of OngoingCall or OngoingCallPromise that can be used for call cancellation, + * and is used to return results to the user. + */ + call(apiCall, request, settings, ongoingCall) { + request = Object.assign({}, request); + if (!settings.autoPaginate) { + // they don't want auto-pagination this time - okay, just call once + ongoingCall.call(apiCall, request); + return; + } + const maxResults = settings.maxResults || -1; + const resourceCollector = new resourceCollector_1.ResourceCollector(apiCall, maxResults); + resourceCollector.processAllPages(request).then(resources => ongoingCall.callback(null, resources), err => ongoingCall.callback(err)); + } + fail(ongoingCall, err) { + ongoingCall.callback(err); + } + result(ongoingCall) { + return ongoingCall.promise; + } +} +exports.PagedApiCaller = PagedApiCaller; +//# sourceMappingURL=pagedApiCaller.js.map /***/ }), -/***/ 74730: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 50389: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toBase64 = void 0; -const util_buffer_from_1 = __nccwpck_require__(31381); -const toBase64 = (input) => (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64"); -exports.toBase64 = toBase64; - +exports.ResourceCollector = void 0; +/** + * ResourceCollector class implements asynchronous logic of calling the API call that supports pagination, + * page by page, collecting all resources (up to `maxResults`) in the array. + * + * Usage: + * const resourceCollector = new ResourceCollector(apiCall, maxResults); // -1 for unlimited + * resourceCollector.processAllPages(request).then(resources => ...); + */ +class ResourceCollector { + constructor(apiCall, maxResults = -1) { + this.apiCall = apiCall; + this.resources = []; + this.maxResults = maxResults; + } + callback(err, resources, nextPageRequest) { + if (err) { + // Something went wrong with this request - failing everything + this.rejectCallback(err); + return; + } + // Process one page + for (const resource of resources) { + this.resources.push(resource); + if (this.resources.length === this.maxResults) { + nextPageRequest = null; + break; + } + } + // All done? + if (!nextPageRequest) { + this.resolveCallback(this.resources); + return; + } + // Schedule the next call + const callback = (...args) => this.callback(...args); + setImmediate(this.apiCall, nextPageRequest, callback); + } + processAllPages(firstRequest) { + return new Promise((resolve, reject) => { + this.resolveCallback = resolve; + this.rejectCallback = reject; + // Schedule the first call + const callback = (...args) => this.callback(...args); + setImmediate(this.apiCall, firstRequest, callback); + }); + } +} +exports.ResourceCollector = ResourceCollector; +//# sourceMappingURL=resourceCollector.js.map /***/ }), -/***/ 54880: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 20513: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.calculateBodyLength = void 0; -const fs_1 = __nccwpck_require__(57147); -const calculateBodyLength = (body) => { - if (!body) { - return 0; - } - if (typeof body === "string") { - return Buffer.from(body).length; +exports.PathTemplate = void 0; +class PathTemplate { + /** + * @param {String} data the of the template + * + * @constructor + */ + constructor(data) { + this.bindings = {}; + this.data = data; + this.segments = this.parsePathTemplate(data); + this.size = this.segments.length; } - else if (typeof body.byteLength === "number") { - return body.byteLength; + /** + * Matches a fully-qualified path template string. + * + * @param {String} path a fully-qualified path template string + * @return {Object} contains const names matched to binding values + * @throws {TypeError} if path can't be matched to this template + */ + match(path) { + let pathSegments = path.split('/'); + const bindings = {}; + if (pathSegments.length !== this.segments.length) { + // if the path contains a wildcard, then the length may differ by 1. + if (!this.data.includes('**')) { + throw new TypeError(`This path ${path} does not match path template ${this.data}, the number of parameters is not same.`); + } + else if (pathSegments.length !== this.segments.length + 1) { + throw new TypeError(`This path ${path} does not match path template ${this.data}, the number of parameters is not same with one wildcard.`); + } + } + for (let index = 0; index < this.segments.length && pathSegments.length > 0; index++) { + if (this.segments[index] !== pathSegments[0]) { + if (!this.segments[index].includes('*')) { + throw new TypeError(`segment does not match, ${this.segments[index]} and ${pathSegments[index]}.`); + } + else { + let segment = this.segments[index]; + const matches = segment.match(/\{[$0-9a-zA-Z_]+=.*?\}/g); + if (!matches) { + throw new Error(`Error processing path template segment ${segment}`); + } + const variables = matches.map(str => str.replace(/^\{/, '').replace(/=.*/, '')); + if (segment.includes('**')) { + bindings[variables[0]] = pathSegments[0] + '/' + pathSegments[1]; + pathSegments = pathSegments.slice(2); + } + else { + // atomic resource + if (variables.length === 1) { + bindings[variables[0]] = pathSegments[0]; + } + else { + // non-slash resource + // segment: {blurb_id=*}.{legacy_user=*} to match pathSegments: ['bar.user2'] + // split the match pathSegments[0] -> value: ['bar', 'user2'] + // compare the length of two arrays, and compare array items + const value = pathSegments[0].split(/[-_.~]/); + if (value.length !== variables.length) { + throw new Error(`segment ${segment} does not match ${pathSegments[0]}`); + } + for (const v of variables) { + bindings[v] = value[0]; + segment = segment.replace(`{${v}=*}`, `${value[0]}`); + value.shift(); + } + // segment: {blurb_id=*}.{legacy_user=*} matching pathSegments: ['bar~user2'] should fail + if (segment !== pathSegments[0]) { + throw new TypeError(`non slash resource pattern ${this.segments[index]} and ${pathSegments[0]} should have same separator`); + } + } + pathSegments.shift(); + } + } + } + else { + pathSegments.shift(); + } + } + return bindings; } - else if (typeof body.size === "number") { - return body.size; + /** + * Renders a path template using the provided bindings. + * + * @param {Object} bindings a mapping of const names to binding strings + * @return {String} a rendered representation of the path template + * @throws {TypeError} if a key is missing, or if a sub-template cannot be + * parsed + */ + render(bindings) { + if (Object.keys(bindings).length !== Object.keys(this.bindings).length) { + throw new TypeError(`The number of variables ${Object.keys(bindings).length} does not match the number of needed variables ${Object.keys(this.bindings).length}`); + } + let path = this.inspect(); + for (const key of Object.keys(bindings)) { + const b = bindings[key].toString(); + if (!this.bindings[key]) { + throw new TypeError(`render fails for not matching ${bindings[key]}`); + } + const variable = this.bindings[key]; + if (variable === '*') { + if (!b.match(/[^/{}]+/)) { + throw new TypeError(`render fails for not matching ${b}`); + } + path = path.replace(`{${key}=*}`, `${b}`); + } + else if (variable === '**') { + if (!b.match(/[^{}]+/)) { + throw new TypeError(`render fails for not matching ${b}`); + } + path = path.replace(`{${key}=**}`, `${b}`); + } + } + return path; } - else if (typeof body.start === "number" && typeof body.end === "number") { - return body.end + 1 - body.start; + /** + * Renders the path template. + * + * @return {string} contains const names matched to binding values + */ + inspect() { + return this.segments.join('/'); } - else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { - return (0, fs_1.lstatSync)(body.path).size; + /** + * Parse the path template. + * + * @return {string[]} return segments of the input path. + * For example: 'buckets/{hello}'' will give back ['buckets', {hello=*}] + */ + parsePathTemplate(data) { + const pathSegments = splitPathTemplate(data); + let index = 0; + let wildCardCount = 0; + const segments = []; + let matches; + pathSegments.forEach(segment => { + // * or ** -> segments.push('{$0=*}'); + // -> bindings['$0'] = '*' + if (segment === '*' || segment === '**') { + this.bindings[`$${index}`] = segment; + segments.push(`{$${index}=${segment}}`); + index = index + 1; + if (segment === '**') { + ++wildCardCount; + } + } + else if ((matches = segment.match(/\{[0-9a-zA-Z-.~_]+(?:=.*?)?\}/g))) { + for (const subsegment of matches) { + const pairMatch = subsegment.match(/^\{([0-9a-zA-Z-.~_]+)(?:=(.*?))?\}$/); + if (!pairMatch) { + throw new Error(`Cannot process path template segment ${subsegment}`); + } + const key = pairMatch[1]; + let value = pairMatch[2]; + if (!value) { + value = '*'; + segment = segment.replace(key, key + '=*'); + this.bindings[key] = value; + } + else if (value === '*') { + this.bindings[key] = value; + } + else if (value === '**') { + ++wildCardCount; + this.bindings[key] = value; + } + } + segments.push(segment); + } + else if (segment.match(/[0-9a-zA-Z-.~_]+/)) { + segments.push(segment); + } + }); + if (wildCardCount > 1) { + throw new TypeError('Can not have more than one wildcard.'); + } + return segments; } - else if (typeof body.fd === "number") { - return (0, fs_1.fstatSync)(body.fd).size; +} +exports.PathTemplate = PathTemplate; +/** + * Split the path template by `/`. + * It can not be simply splitted by `/` because there might be `/` in the segments. + * For example: 'a/b/{a=hello/world}' we do not want to break the brackets pair + * so above path will be splitted as ['a', 'b', '{a=hello/world}'] + */ +function splitPathTemplate(data) { + let left = 0; + let right = 0; + let bracketCount = 0; + const segments = []; + while (right >= left && right < data.length) { + if (data.charAt(right) === '{') { + bracketCount = bracketCount + 1; + } + else if (data.charAt(right) === '}') { + bracketCount = bracketCount - 1; + } + else if (data.charAt(right) === '/') { + if (right === data.length - 1) { + throw new TypeError('Invalid path, it can not be ended by /'); + } + if (bracketCount === 0) { + // complete bracket, to avoid the case a/b/**/*/{a=hello/world} + segments.push(data.substring(left, right)); + left = right + 1; + } + } + if (right === data.length - 1) { + if (bracketCount !== 0) { + throw new TypeError('Brackets are invalid.'); + } + segments.push(data.substring(left)); + } + right = right + 1; } - throw new Error(`Body Length computation failed for ${body}`); -}; -exports.calculateBodyLength = calculateBodyLength; - + return segments; +} +//# sourceMappingURL=pathTemplate.js.map /***/ }), -/***/ 68075: +/***/ 8827: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(54880), exports); - +exports.fromParams = fromParams; +const querystring = __nccwpck_require__(63477); +/** + * Helpers for constructing routing headers. + * + * These headers are used by Google infrastructure to determine how to route + * requests, especially for services that are regional. + * + * Generally, these headers are specified as gRPC metadata. + */ +/** + * Constructs the routing header from the given params + * + * @param {Object} params - the request header parameters. + * @return {string} the routing header value. + */ +function fromParams(params) { + return querystring.stringify(params); +} +//# sourceMappingURL=routingHeader.js.map /***/ }), -/***/ 31381: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 53501: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromString = exports.fromArrayBuffer = void 0; -const is_array_buffer_1 = __nccwpck_require__(10780); -const buffer_1 = __nccwpck_require__(14300); -const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { - if (!(0, is_array_buffer_1.isArrayBuffer)(input)) { - throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); +exports.HttpCodeToRpcCodeMap = exports.Status = void 0; +exports.rpcCodeFromHttpStatusCode = rpcCodeFromHttpStatusCode; +// The following is a copy of the Status enum defined in @grpc/grpc-js, +// src/constants.ts. We need to use some of these statuses here and there, +// but we don't want to include the whole @grpc/grpc-js into the browser +// bundle just to have this small enum. +var Status; +(function (Status) { + Status[Status["OK"] = 0] = "OK"; + Status[Status["CANCELLED"] = 1] = "CANCELLED"; + Status[Status["UNKNOWN"] = 2] = "UNKNOWN"; + Status[Status["INVALID_ARGUMENT"] = 3] = "INVALID_ARGUMENT"; + Status[Status["DEADLINE_EXCEEDED"] = 4] = "DEADLINE_EXCEEDED"; + Status[Status["NOT_FOUND"] = 5] = "NOT_FOUND"; + Status[Status["ALREADY_EXISTS"] = 6] = "ALREADY_EXISTS"; + Status[Status["PERMISSION_DENIED"] = 7] = "PERMISSION_DENIED"; + Status[Status["RESOURCE_EXHAUSTED"] = 8] = "RESOURCE_EXHAUSTED"; + Status[Status["FAILED_PRECONDITION"] = 9] = "FAILED_PRECONDITION"; + Status[Status["ABORTED"] = 10] = "ABORTED"; + Status[Status["OUT_OF_RANGE"] = 11] = "OUT_OF_RANGE"; + Status[Status["UNIMPLEMENTED"] = 12] = "UNIMPLEMENTED"; + Status[Status["INTERNAL"] = 13] = "INTERNAL"; + Status[Status["UNAVAILABLE"] = 14] = "UNAVAILABLE"; + Status[Status["DATA_LOSS"] = 15] = "DATA_LOSS"; + Status[Status["UNAUTHENTICATED"] = 16] = "UNAUTHENTICATED"; +})(Status || (exports.Status = Status = {})); +exports.HttpCodeToRpcCodeMap = new Map([ + [400, Status.INVALID_ARGUMENT], + [401, Status.UNAUTHENTICATED], + [403, Status.PERMISSION_DENIED], + [404, Status.NOT_FOUND], + [409, Status.ABORTED], + [416, Status.OUT_OF_RANGE], + [429, Status.RESOURCE_EXHAUSTED], + [499, Status.CANCELLED], + [501, Status.UNIMPLEMENTED], + [503, Status.UNAVAILABLE], + [504, Status.DEADLINE_EXCEEDED], +]); +// Maps HTTP status codes to gRPC status codes above. +function rpcCodeFromHttpStatusCode(httpStatusCode) { + if (exports.HttpCodeToRpcCodeMap.has(httpStatusCode)) { + return exports.HttpCodeToRpcCodeMap.get(httpStatusCode); } - return buffer_1.Buffer.from(input, offset, length); -}; -exports.fromArrayBuffer = fromArrayBuffer; -const fromString = (input, encoding) => { - if (typeof input !== "string") { - throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + // All 2xx + if (httpStatusCode >= 200 && httpStatusCode < 300) { + return Status.OK; } - return encoding ? buffer_1.Buffer.from(input, encoding) : buffer_1.Buffer.from(input); -}; -exports.fromString = fromString; - + // All other 4xx + if (httpStatusCode >= 400 && httpStatusCode < 500) { + return Status.FAILED_PRECONDITION; + } + // All other 5xx + if (httpStatusCode >= 500 && httpStatusCode < 600) { + return Status.INTERNAL; + } + // Everything else + return Status.UNKNOWN; +} +//# sourceMappingURL=status.js.map /***/ }), -/***/ 42491: -/***/ ((__unused_webpack_module, exports) => { +/***/ 25735: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.booleanSelector = exports.SelectorType = void 0; -var SelectorType; -(function (SelectorType) { - SelectorType["ENV"] = "env"; - SelectorType["CONFIG"] = "shared config entry"; -})(SelectorType = exports.SelectorType || (exports.SelectorType = {})); -const booleanSelector = (obj, key, type) => { - if (!(key in obj)) - return undefined; - if (obj[key] === "true") - return true; - if (obj[key] === "false") - return false; - throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); -}; -exports.booleanSelector = booleanSelector; - +exports.StreamArrayParser = void 0; +const abort_controller_1 = __nccwpck_require__(61659); +const stream_1 = __nccwpck_require__(12781); +const fallbackRest_1 = __nccwpck_require__(21088); +const featureDetection_1 = __nccwpck_require__(11154); +class StreamArrayParser extends stream_1.Transform { + /** + * StreamArrayParser processes array of valid JSON objects in random chunks + * through readable stream, and produces a stream of plain Javascript objects + * where it converted from the corresponding protobuf message instance. + * + * The default JSON parser decodes the input stream under the + * following rules: + * 1. The stream represents a valid JSON array (must start with a "[" and + * close with the corresponding "]"). Each element of this array is assumed to + * be either an array or an object, and will be decoded as a JS object and + * delivered. + * 2. All JSON elements in the buffer will be decoded and delivered in a + * stream. + * + * @private + * @constructor + * @param {protobuf.Method} rpc - the protobuf method produce array of JSON. + * @param {Object} options - the options pass to Transform Stream. See more + * details + * https://nodejs.org/api/stream.html#stream_new_stream_transform_options. + */ + constructor(rpc, options) { + super(Object.assign({}, options, { readableObjectMode: true })); + this._done = false; + this._prevBlock = Buffer.from(''); + this._isInString = false; + this._isSkipped = false; + this._level = 0; + this.rpc = rpc; + this.cancelController = (0, featureDetection_1.hasAbortController)() + ? new AbortController() + : new abort_controller_1.AbortController(); + this.cancelSignal = this.cancelController.signal; + this.cancelRequested = false; + } + _transform(chunk, _, callback) { + let objectStart = 0; + let curIndex = 0; + if (this._level === 0 && curIndex === 0) { + if (String.fromCharCode(chunk[0]) !== '[') { + this.emit('error', new Error(`Internal Error: API service stream data must start with a '[' and close with the corresponding ']', but it start with ${String.fromCharCode(chunk[0])}`)); + } + curIndex++; + this._level++; + } + while (curIndex < chunk.length) { + const curValue = String.fromCharCode(chunk[curIndex]); + if (!this._isSkipped) { + switch (curValue) { + case '{': + // Check if it's in string, we ignore the curly brace in string. + // Otherwise the object level++. + if (!this._isInString) { + this._level++; + } + if (!this._isInString && this._level === 2) { + objectStart = curIndex; + } + break; + case '"': + // Flip the string status + this._isInString = !this._isInString; + break; + case '}': + // check if it's in string + // if true, do nothing + // if false and level = 0, push data + if (!this._isInString) { + this._level--; + } + if (!this._isInString && this._level === 1) { + // find a object + const objBuff = Buffer.concat([ + this._prevBlock, + chunk.slice(objectStart, curIndex + 1), + ]); + try { + // HTTP response.ok is true. + const msgObj = (0, fallbackRest_1.decodeResponse)(this.rpc, true, objBuff); + this.push(msgObj); + } + catch (err) { + this.emit('error', err); + } + objectStart = curIndex + 1; + this._prevBlock = Buffer.from(''); + } + break; + case ']': + if (!this._isInString && this._level === 1) { + this._done = true; + this.push(null); + } + break; + case '\\': + // Escaping escape character. + this._isSkipped = true; + break; + default: + break; + } + } + else { + this._isSkipped = false; + } + curIndex++; + } + if (this._level > 1) { + this._prevBlock = Buffer.concat([ + this._prevBlock, + chunk.slice(objectStart, curIndex), + ]); + } + callback(); + } + _flush(callback) { + callback(); + } + cancel() { + this._done = true; + this.cancelRequested = true; + this.cancelController.abort(); + this.end(); + } +} +exports.StreamArrayParser = StreamArrayParser; +//# sourceMappingURL=streamArrayParser.js.map /***/ }), -/***/ 83375: +/***/ 65880: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(42491), exports); - +exports.StreamDescriptor = void 0; +const streamingApiCaller_1 = __nccwpck_require__(32936); +/** + * A descriptor for streaming calls. + */ +class StreamDescriptor { + constructor(streamType, rest, gaxStreamingRetries) { + this.type = streamType; + this.streaming = true; + this.rest = rest; + this.gaxStreamingRetries = gaxStreamingRetries; + } + getApiCaller() { + // Right now retrying does not work with gRPC-streaming, because retryable + // assumes an API call returns an event emitter while gRPC-streaming methods + // return Stream. + return new streamingApiCaller_1.StreamingApiCaller(this); + } +} +exports.StreamDescriptor = StreamDescriptor; +//# sourceMappingURL=streamDescriptor.js.map /***/ }), -/***/ 56470: -/***/ ((__unused_webpack_module, exports) => { +/***/ 67389: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IMDS_REGION_PATH = exports.DEFAULTS_MODE_OPTIONS = exports.ENV_IMDS_DISABLED = exports.AWS_DEFAULT_REGION_ENV = exports.AWS_REGION_ENV = exports.AWS_EXECUTION_ENV = void 0; -exports.AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; -exports.AWS_REGION_ENV = "AWS_REGION"; -exports.AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; -exports.ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; -exports.DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; -exports.IMDS_REGION_PATH = "/latest/meta-data/placement/region"; - +exports.StreamProxy = exports.StreamType = void 0; +const googleError_1 = __nccwpck_require__(6634); +const streamingRetryRequest_1 = __nccwpck_require__(69645); +const status_1 = __nccwpck_require__(53501); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const duplexify = __nccwpck_require__(76599); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const retryRequest = __nccwpck_require__(63515); +/** + * The type of gRPC streaming. + * @enum {number} + */ +var StreamType; +(function (StreamType) { + /** Client sends a single request, server streams responses. */ + StreamType[StreamType["SERVER_STREAMING"] = 1] = "SERVER_STREAMING"; + /** Client streams requests, server returns a single response. */ + StreamType[StreamType["CLIENT_STREAMING"] = 2] = "CLIENT_STREAMING"; + /** Both client and server stream objects. */ + StreamType[StreamType["BIDI_STREAMING"] = 3] = "BIDI_STREAMING"; +})(StreamType || (exports.StreamType = StreamType = {})); +class StreamProxy extends duplexify { + /** + * StreamProxy is a proxy to gRPC-streaming method. + * + * @private + * @constructor + * @param {StreamType} type - the type of gRPC stream. + * @param {ApiCallback} callback - the callback for further API call. + */ + constructor(type, callback, rest, gaxServerStreamingRetries) { + super(undefined, undefined, { + objectMode: true, + readable: type !== StreamType.CLIENT_STREAMING, + writable: type !== StreamType.SERVER_STREAMING, + }); + this.retries = 0; + this.type = type; + this._callback = callback; + this._isCancelCalled = false; + this._responseHasSent = false; + this.rest = rest; + this.gaxServerStreamingRetries = gaxServerStreamingRetries; + } + shouldRetryRequest(error, retry) { + const e = googleError_1.GoogleError.parseGRPCStatusDetails(error); + let shouldRetry = this.defaultShouldRetry(e, retry); + if (retry.shouldRetryFn) { + shouldRetry = retry.shouldRetryFn(e); + } + return shouldRetry; + } + cancel() { + if (this.stream) { + this.stream.cancel(); + } + else { + this._isCancelCalled = true; + } + } + retry(stream, retry) { + let retryArgument = this.argument; + if (typeof retry.getResumptionRequestFn === 'function') { + const resumptionRetryArgument = retry.getResumptionRequestFn(retryArgument); + if (resumptionRetryArgument !== undefined) { + retryArgument = resumptionRetryArgument; + } + } + this.resetStreams(stream); + const newStream = this.apiCall(retryArgument, this._callback); + this.stream = newStream; + this.streamHandoffHelper(newStream, retry); + return newStream; + } + /** + * Helper function to handle total timeout + max retry check for server streaming retries + * @param {number} deadline - the current retry deadline + * @param {number} maxRetries - maximum total number of retries + * @param {number} totalTimeoutMillis - total timeout in milliseconds + */ + throwIfMaxRetriesOrTotalTimeoutExceeded(deadline, maxRetries, totalTimeoutMillis) { + const now = new Date(); + if (this.prevDeadline !== undefined && + deadline && + now.getTime() >= this.prevDeadline) { + const error = new googleError_1.GoogleError(`Total timeout of API exceeded ${totalTimeoutMillis} milliseconds before any response was received.`); + error.code = status_1.Status.DEADLINE_EXCEEDED; + this.emit('error', error); + this.destroy(); + // Without throwing error you get unhandled error since we are returning a new stream + // There might be a better way to do this + throw error; + } + if (this.retries && this.retries >= maxRetries) { + const error = new googleError_1.GoogleError('Exceeded maximum number of retries before any ' + + 'response was received'); + error.code = status_1.Status.DEADLINE_EXCEEDED; + this.emit('error', error); + this.destroy(); + throw error; + } + } + /** + * Error handler for server streaming retries + * @param {CancellableStream} stream - the stream being retried + * @param {RetryOptions} retry - Configures the exceptions upon which the + * function should retry, and the parameters to the exponential backoff retry + * algorithm. + * @param {Error} error - error to handle + */ + streamHandoffErrorHandler(stream, retry, error) { + let retryStream = this.stream; + const delayMult = retry.backoffSettings.retryDelayMultiplier; + const maxDelay = retry.backoffSettings.maxRetryDelayMillis; + const timeoutMult = retry.backoffSettings.rpcTimeoutMultiplier; + const maxTimeout = retry.backoffSettings.maxRpcTimeoutMillis; + let delay = retry.backoffSettings.initialRetryDelayMillis; + let timeout = retry.backoffSettings.initialRpcTimeoutMillis; + let now = new Date(); + let deadline = 0; + if (retry.backoffSettings.totalTimeoutMillis) { + deadline = now.getTime() + retry.backoffSettings.totalTimeoutMillis; + } + const maxRetries = retry.backoffSettings.maxRetries; + try { + this.throwIfMaxRetriesOrTotalTimeoutExceeded(deadline, maxRetries, retry.backoffSettings.totalTimeoutMillis); + } + catch (error) { + return; + } + this.retries++; + if (this.shouldRetryRequest(error, retry)) { + const toSleep = Math.random() * delay; + setTimeout(() => { + now = new Date(); + delay = Math.min(delay * delayMult, maxDelay); + const timeoutCal = timeout && timeoutMult ? timeout * timeoutMult : 0; + const rpcTimeout = maxTimeout ? maxTimeout : 0; + this.prevDeadline = deadline; + const newDeadline = deadline ? deadline - now.getTime() : 0; + timeout = Math.min(timeoutCal, rpcTimeout, newDeadline); + }, toSleep); + } + else { + const e = googleError_1.GoogleError.parseGRPCStatusDetails(error); + e.note = + 'Exception occurred in retry method that was ' + + 'not classified as transient'; + // for some reason this error must be emitted here + // instead of the destroy, otherwise the error event + // is swallowed + this.emit('error', e); + this.destroy(); + return; + } + retryStream = this.retry(stream, retry); + this.stream = retryStream; + return; + } + /** + * Used during server streaming retries to handle + * event forwarding, errors, and/or stream closure + * @param {CancellableStream} stream - the stream that we're doing the retry on + * @param {RetryOptions} retry - Configures the exceptions upon which the + * function should retry, and the parameters to the exponential backoff retry + * algorithm. + */ + streamHandoffHelper(stream, retry) { + let enteredError = false; + this.eventForwardHelper(stream); + stream.on('error', error => { + enteredError = true; + this.streamHandoffErrorHandler(stream, retry, error); + }); + stream.on('data', (data) => { + this.retries = 0; + this.emit.bind(this, 'data')(data); + }); + stream.on('end', () => { + if (!enteredError) { + enteredError = true; + this.emit('end'); + this.cancel(); + } + }); + } + eventForwardHelper(stream) { + const eventsToForward = ['metadata', 'response', 'status']; + eventsToForward.forEach(event => { + stream.on(event, this.emit.bind(this, event)); + }); + } + statusMetadataHelper(stream) { + // gRPC is guaranteed emit the 'status' event but not 'metadata', and 'status' is the last event to emit. + // Emit the 'response' event if stream has no 'metadata' event. + // This avoids the stream swallowing the other events, such as 'end'. + stream.on('status', () => { + if (!this._responseHasSent) { + stream.emit('response', { + code: 200, + details: '', + message: 'OK', + }); + } + }); + // We also want to supply the status data as 'response' event to support + // the behavior of google-cloud-node expects. + // see: + // https://github.com/GoogleCloudPlatform/google-cloud-node/pull/1775#issuecomment-259141029 + // https://github.com/GoogleCloudPlatform/google-cloud-node/blob/116436fa789d8b0f7fc5100b19b424e3ec63e6bf/packages/common/src/grpc-service.js#L355 + stream.on('metadata', metadata => { + // Create a response object with succeeds. + // TODO: unify this logic with the decoration of gRPC response when it's + // added. see: https://github.com/googleapis/gax-nodejs/issues/65 + stream.emit('response', { + code: 200, + details: '', + message: 'OK', + metadata, + }); + this._responseHasSent = true; + }); + } + /** + * Forward events from an API request stream to the user's stream. + * @param {Stream} stream - The API request stream. + * @param {RetryOptions} retry - Configures the exceptions upon which the + * function should retry, and the parameters to the exponential backoff retry + * algorithm. + */ + forwardEvents(stream) { + this.eventForwardHelper(stream); + this.statusMetadataHelper(stream); + stream.on('error', error => { + googleError_1.GoogleError.parseGRPCStatusDetails(error); + }); + } + defaultShouldRetry(error, retry) { + if ((retry.retryCodes.length > 0 && + retry.retryCodes.indexOf(error.code) < 0) || + retry.retryCodes.length === 0) { + return false; + } + return true; + } + /** + * Forward events from an API request stream to the user's stream. + * @param {Stream} stream - The API request stream. + * @param {RetryOptions} retry - Configures the exceptions upon which the + * function eshould retry, and the parameters to the exponential backoff retry + * algorithm. + */ + forwardEventsWithRetries(stream, retry) { + let retryStream = this.stream; + this.eventForwardHelper(stream); + this.statusMetadataHelper(stream); + stream.on('error', error => { + const timeout = retry.backoffSettings.totalTimeoutMillis; + const maxRetries = retry.backoffSettings.maxRetries; + if ((maxRetries && maxRetries > 0) || (timeout && timeout > 0)) { + if (this.shouldRetryRequest(error, retry)) { + if (maxRetries && timeout) { + const newError = new googleError_1.GoogleError('Cannot set both totalTimeoutMillis and maxRetries ' + + 'in backoffSettings.'); + newError.code = status_1.Status.INVALID_ARGUMENT; + this.emit('error', newError); + this.destroy(); + return; //end chunk + } + else { + this.retries++; + retryStream = this.retry(stream, retry); + this.stream = retryStream; + return retryStream; + } + } + else { + const e = googleError_1.GoogleError.parseGRPCStatusDetails(error); + e.note = + 'Exception occurred in retry method that was ' + + 'not classified as transient'; + this.destroy(e); + return; // end chunk + } + } + else { + if (maxRetries === 0) { + const e = googleError_1.GoogleError.parseGRPCStatusDetails(error); + e.note = 'Max retries is set to zero.'; + this.destroy(e); + return; // end chunk + } + return googleError_1.GoogleError.parseGRPCStatusDetails(error); + } + }); + return retryStream; + } + /** + * Resets the target stream as part of the retry process + * @param {CancellableStream} requestStream - the stream to end + */ + resetStreams(requestStream) { + if (requestStream) { + requestStream.cancel && requestStream.cancel(); + if (requestStream.destroy) { + requestStream.destroy(); + } + else if (requestStream.end) { + // TODO: not used in server streaming, but likely needed + // if we want to add BIDI or client side streaming + requestStream.end(); + } + } + } + /** + * Specifies the target stream. + * @param {ApiCall} apiCall - the API function to be called. + * @param {Object} argument - the argument to be passed to the apiCall. + * @param {RetryOptions} retry - Configures the exceptions upon which the + * function should retry, and the parameters to the exponential backoff retry + * algorithm. + */ + setStream(apiCall, argument, retryRequestOptions = {}, retry) { + this.apiCall = apiCall; + this.argument = argument; + if (this.type === StreamType.SERVER_STREAMING) { + if (this.rest) { + const stream = apiCall(argument, this._callback); + this.stream = stream; + this.setReadable(stream); + } + else if (this.gaxServerStreamingRetries) { + const retryStream = (0, streamingRetryRequest_1.streamingRetryRequest)({ + request: () => { + if (this._isCancelCalled) { + if (this.stream) { + this.stream.cancel(); + } + return; + } + const stream = apiCall(argument, this._callback); + this.stream = stream; + this.stream = this.forwardEventsWithRetries(stream, retry); + return this.stream; + }, + }); + this.setReadable(retryStream); + } + else { + const retryStream = retryRequest(null, { + objectMode: true, + request: () => { + if (this._isCancelCalled) { + if (this.stream) { + this.stream.cancel(); + } + return; + } + const stream = apiCall(argument, this._callback); + this.stream = stream; + this.forwardEvents(stream); + return stream; + }, + retries: retryRequestOptions.retries, + currentRetryAttempt: retryRequestOptions.currentRetryAttempt, + noResponseRetries: retryRequestOptions.noResponseRetries, + shouldRetryFn: retryRequestOptions.shouldRetryFn, + }); + this.setReadable(retryStream); + } + return; + } + const stream = apiCall(argument, this._callback); + this.stream = stream; + this.forwardEvents(stream); + if (this.type === StreamType.CLIENT_STREAMING) { + this.setWritable(stream); + } + if (this.type === StreamType.BIDI_STREAMING) { + this.setReadable(stream); + this.setWritable(stream); + } + if (this._isCancelCalled && this.stream) { + this.stream.cancel(); + } + } +} +exports.StreamProxy = StreamProxy; +//# sourceMappingURL=streaming.js.map /***/ }), -/***/ 15577: -/***/ ((__unused_webpack_module, exports) => { +/***/ 32936: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NODE_DEFAULTS_MODE_CONFIG_OPTIONS = void 0; -const AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; -const AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; -exports.NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { - environmentVariableSelector: (env) => { - return env[AWS_DEFAULTS_MODE_ENV]; - }, - configFileSelector: (profile) => { - return profile[AWS_DEFAULTS_MODE_CONFIG]; - }, - default: "legacy", -}; - +exports.StreamingApiCaller = void 0; +const warnings_1 = __nccwpck_require__(16328); +const streaming_1 = __nccwpck_require__(67389); +class StreamingApiCaller { + /** + * An API caller for methods of gRPC streaming. + * @private + * @constructor + * @param {StreamDescriptor} descriptor - the descriptor of the method structure. + */ + constructor(descriptor) { + this.descriptor = descriptor; + } + init(callback) { + return new streaming_1.StreamProxy(this.descriptor.type, callback, this.descriptor.rest, this.descriptor.gaxStreamingRetries); + } + wrap(func) { + switch (this.descriptor.type) { + case streaming_1.StreamType.SERVER_STREAMING: + return (argument, metadata, options) => { + return func(argument, metadata, options); + }; + case streaming_1.StreamType.CLIENT_STREAMING: + return (argument, metadata, options, callback) => { + return func(metadata, options, callback); + }; + case streaming_1.StreamType.BIDI_STREAMING: + return (argument, metadata, options) => { + return func(metadata, options); + }; + default: + (0, warnings_1.warn)('streaming_wrap_unknown_stream_type', `Unknown stream type: ${this.descriptor.type}`); + } + return func; + } + call(apiCall, argument, settings, stream) { + stream.setStream(apiCall, argument, settings.retryRequestOptions, settings.retry); + } + fail(stream, err) { + stream.emit('error', err); + } + result(stream) { + return stream; + } +} +exports.StreamingApiCaller = StreamingApiCaller; +//# sourceMappingURL=streamingApiCaller.js.map /***/ }), -/***/ 72429: +/***/ 69645: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright 2023 Google LLC Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(46217), exports); - +exports.streamingRetryRequest = streamingRetryRequest; +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// https://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const { PassThrough } = __nccwpck_require__(12781); +const DEFAULTS = { + /* + Max # of retries + */ + maxRetries: 2, +}; +// In retry-request, you could pass parameters to request using the requestOpts parameter +// when we called retry-request from gax, we always passed null +// passing null here removes an unnecessary parameter from this implementation +const requestOps = null; +const objectMode = true; // we don't support objectMode being false +/** + * Localized adaptation derived from retry-request + * @param opts - corresponds to https://github.com/googleapis/retry-request#opts-optional + * @returns + */ +function streamingRetryRequest(opts) { + opts = Object.assign({}, DEFAULTS, opts); + if (opts.request === undefined) { + throw new Error('A request function must be provided'); + } + let numNoResponseAttempts = 0; + let streamResponseHandled = false; + let requestStream; + let delayStream; + const retryStream = new PassThrough({ objectMode: objectMode }); + makeRequest(); + return retryStream; + function makeRequest() { + streamResponseHandled = false; + delayStream = new PassThrough({ objectMode: objectMode }); + requestStream = opts.request(requestOps); + requestStream + // gRPC via google-cloud-node can emit an `error` as well as a `response` + // Whichever it emits, we run with-- we can't run with both. That's what + // is up with the `streamResponseHandled` tracking. + .on('error', (err) => { + if (streamResponseHandled) { + return; + } + streamResponseHandled = true; + onResponse(err); + }) + .on('response', (resp) => { + if (streamResponseHandled) { + return; + } + streamResponseHandled = true; + onResponse(null, resp); + }); + requestStream.pipe(delayStream); + } + function onResponse(err, response = null) { + // An error such as DNS resolution. + if (err) { + numNoResponseAttempts++; + if (numNoResponseAttempts <= opts.maxRetries) { + makeRequest(); + } + else { + retryStream.emit('error', err); + } + return; + } + // No more attempts need to be made, just continue on. + retryStream.emit('response', response); + delayStream.pipe(retryStream); + requestStream.on('error', () => { + // retryStream must be destroyed here for the stream handoff part of retries to function properly + // but the error event should not be passed - if it emits as part of .destroy() + // it will bubble up early to the caller + retryStream.destroy(); + }); + } +} +//# sourceMappingURL=streamingRetryRequest.js.map /***/ }), -/***/ 46217: +/***/ 86707: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveDefaultsModeConfig = void 0; -const config_resolver_1 = __nccwpck_require__(53098); -const credential_provider_imds_1 = __nccwpck_require__(7477); -const node_config_provider_1 = __nccwpck_require__(33461); -const property_provider_1 = __nccwpck_require__(79721); -const constants_1 = __nccwpck_require__(56470); -const defaultsModeConfig_1 = __nccwpck_require__(15577); -const resolveDefaultsModeConfig = ({ region = (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS), defaultsMode = (0, node_config_provider_1.loadConfig)(defaultsModeConfig_1.NODE_DEFAULTS_MODE_CONFIG_OPTIONS), } = {}) => (0, property_provider_1.memoize)(async () => { - const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; - switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { - case "auto": - return resolveNodeDefaultsModeAuto(region); - case "in-region": - case "cross-region": - case "mobile": - case "standard": - case "legacy": - return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); - case undefined: - return Promise.resolve("legacy"); - default: - throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); +exports.getField = getField; +exports.deepCopyWithoutMatchedFields = deepCopyWithoutMatchedFields; +exports.deleteField = deleteField; +exports.buildQueryStringComponents = buildQueryStringComponents; +exports.encodeWithSlashes = encodeWithSlashes; +exports.encodeWithoutSlashes = encodeWithoutSlashes; +exports.applyPattern = applyPattern; +exports.match = match; +exports.flattenObject = flattenObject; +exports.isProto3OptionalField = isProto3OptionalField; +exports.transcode = transcode; +exports.overrideHttpRules = overrideHttpRules; +const util_1 = __nccwpck_require__(26969); +const httpOptionName = '(google.api.http)'; +const proto3OptionalName = 'proto3_optional'; +// List of methods as defined in google/api/http.proto (see HttpRule) +const supportedHttpMethods = ['get', 'post', 'put', 'patch', 'delete']; +function getField(request, field, allowObjects = false // in most cases, we need leaf fields +) { + const parts = field.split('.'); + let value = request; + for (const part of parts) { + if (typeof value !== 'object') { + return undefined; + } + value = value[part]; } -}); -exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; -const resolveNodeDefaultsModeAuto = async (clientRegion) => { - if (clientRegion) { - const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; - const inferredRegion = await inferPhysicalRegion(); - if (!inferredRegion) { - return "standard"; + if (!allowObjects && + typeof value === 'object' && + !Array.isArray(value) && + value !== null) { + return undefined; + } + return value; +} +function deepCopyWithoutMatchedFields(request, fieldsToSkip, fullNamePrefix = '') { + if (typeof request !== 'object' || request === null) { + return request; + } + const copy = Object.assign({}, request); + for (const key in copy) { + if (fieldsToSkip.has(`${fullNamePrefix}${key}`)) { + delete copy[key]; + continue; } - if (resolvedRegion === inferredRegion) { - return "in-region"; + const nextFullNamePrefix = `${fullNamePrefix}${key}.`; + if (Array.isArray(copy[key])) { + // a field of an array cannot be addressed as "request.field", so we omit the skipping logic for array descendants + copy[key] = copy[key].map(value => deepCopyWithoutMatchedFields(value, new Set())); + } + else if (typeof copy[key] === 'object' && copy[key] !== null) { + copy[key] = deepCopyWithoutMatchedFields(copy[key], fieldsToSkip, nextFullNamePrefix); + } + } + return copy; +} +function deleteField(request, field) { + const parts = field.split('.'); + while (parts.length > 1) { + if (typeof request !== 'object') { + return; + } + const part = parts.shift(); + request = request[part]; + } + const part = parts.shift(); + if (typeof request !== 'object') { + return; + } + delete request[part]; +} +function buildQueryStringComponents(request, prefix = '') { + const resultList = []; + for (const key in request) { + if (Array.isArray(request[key])) { + for (const value of request[key]) { + resultList.push(`${prefix}${encodeWithoutSlashes(key)}=${encodeWithoutSlashes(value.toString())}`); + } + } + else if (typeof request[key] === 'object' && request[key] !== null) { + resultList.push(...buildQueryStringComponents(request[key], `${key}.`)); } else { - return "cross-region"; + resultList.push(`${prefix}${encodeWithoutSlashes(key)}=${encodeWithoutSlashes(request[key] === null ? 'null' : request[key].toString())}`); } } - return "standard"; -}; -const inferPhysicalRegion = async () => { - var _a; - if (process.env[constants_1.AWS_EXECUTION_ENV] && (process.env[constants_1.AWS_REGION_ENV] || process.env[constants_1.AWS_DEFAULT_REGION_ENV])) { - return (_a = process.env[constants_1.AWS_REGION_ENV]) !== null && _a !== void 0 ? _a : process.env[constants_1.AWS_DEFAULT_REGION_ENV]; + return resultList; +} +function encodeWithSlashes(str) { + return str + .split('') + .map(c => (c.match(/[-_.~0-9a-zA-Z]/) ? c : encodeURIComponent(c))) + .join(''); +} +function encodeWithoutSlashes(str) { + return str + .split('') + .map(c => (c.match(/[-_.~0-9a-zA-Z/]/) ? c : encodeURIComponent(c))) + .join(''); +} +function escapeRegExp(str) { + return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} +function applyPattern(pattern, fieldValue) { + if (!pattern || pattern === '*') { + return encodeWithSlashes(fieldValue); } - if (!process.env[constants_1.ENV_IMDS_DISABLED]) { - try { - const endpoint = await (0, credential_provider_imds_1.getInstanceMetadataEndpoint)(); - return (await (0, credential_provider_imds_1.httpRequest)({ ...endpoint, path: constants_1.IMDS_REGION_PATH })).toString(); + if (!pattern.includes('*') && pattern !== fieldValue) { + return undefined; + } + // since we're converting the pattern to a regex, make necessary precautions: + const regex = new RegExp('^' + + escapeRegExp(pattern) + .replace(/\\\*\\\*/g, '(.+)') + .replace(/\\\*/g, '([^/]+)') + + '$'); + if (!fieldValue.match(regex)) { + return undefined; + } + return encodeWithoutSlashes(fieldValue); +} +function fieldToCamelCase(field) { + const parts = field.split('.'); + return parts.map(part => (0, util_1.toCamelCase)(part)).join('.'); +} +function match(request, pattern) { + let url = pattern; + const matchedFields = []; + for (;;) { + const match = url.match(/^(.*)\{([^}=]+)(?:=([^}]*))?\}(.*)/); + if (!match) { + break; } - catch (e) { + const [, before, field, pattern, after] = match; + const camelCasedField = fieldToCamelCase(field); + matchedFields.push(fieldToCamelCase(camelCasedField)); + const fieldValue = getField(request, camelCasedField); + if (fieldValue === undefined) { + return undefined; + } + const appliedPattern = applyPattern(pattern, fieldValue === null ? 'null' : fieldValue.toString()); + if (appliedPattern === undefined) { + return undefined; } + url = before + appliedPattern + after; } -}; - + return { matchedFields, url }; +} +function flattenObject(request) { + const result = {}; + for (const key in request) { + if (request[key] === undefined) { + continue; + } + if (Array.isArray(request[key])) { + // According to the http.proto comments, a repeated field may only + // contain primitive types, so no extra recursion here. + result[key] = request[key]; + continue; + } + if (typeof request[key] === 'object' && request[key] !== null) { + const nested = flattenObject(request[key]); + for (const nestedKey in nested) { + result[`${key}.${nestedKey}`] = nested[nestedKey]; + } + continue; + } + result[key] = request[key]; + } + return result; +} +function isProto3OptionalField(field) { + return field && field.options && field.options[proto3OptionalName]; +} +function transcode(request, parsedOptions) { + const httpRules = []; + for (const option of parsedOptions) { + if (!(httpOptionName in option)) { + continue; + } + const httpRule = option[httpOptionName]; + httpRules.push(httpRule); + if (httpRule === null || httpRule === void 0 ? void 0 : httpRule.additional_bindings) { + const additionalBindings = Array.isArray(httpRule.additional_bindings) + ? httpRule.additional_bindings + : [httpRule.additional_bindings]; + httpRules.push(...additionalBindings); + } + } + for (const httpRule of httpRules) { + for (const httpMethod of supportedHttpMethods) { + if (!(httpMethod in httpRule)) { + continue; + } + const pathTemplate = httpRule[httpMethod]; + const matchResult = match(request, pathTemplate); + if (matchResult === undefined) { + continue; + } + const { url, matchedFields } = matchResult; + let data = deepCopyWithoutMatchedFields(request, new Set(matchedFields)); + if (httpRule.body === '*') { + return { httpMethod, url, queryString: '', data }; + } + // one field possibly goes to request data, others go to query string + const queryStringObject = data; + if (httpRule.body) { + data = getField(queryStringObject, fieldToCamelCase(httpRule.body), + /*allowObjects:*/ true); + deleteField(queryStringObject, fieldToCamelCase(httpRule.body)); + } + else { + data = ''; + } + const queryStringComponents = buildQueryStringComponents(queryStringObject); + const queryString = queryStringComponents.join('&'); + if (!data || + (typeof data === 'object' && Object.keys(data).length === 0)) { + data = ''; + } + return { httpMethod, url, queryString, data }; + } + } + return undefined; +} +// Override the protobuf json's the http rules. +function overrideHttpRules(httpRules, protoJson) { + for (const rule of httpRules) { + if (!rule.selector) { + continue; + } + const rpc = protoJson.lookup(rule.selector); + // Not support override on non-exist RPC or a RPC without an annotation. + // We could reconsider if we have the use case later. + if (!rpc || !rpc.parsedOptions) { + continue; + } + for (const item of rpc.parsedOptions) { + if (!(httpOptionName in item)) { + continue; + } + const httpOptions = item[httpOptionName]; + for (const httpMethod in httpOptions) { + if (httpMethod in rule) { + if (httpMethod === 'additional_bindings') { + continue; + } + httpOptions[httpMethod] = + rule[httpMethod]; + } + if (rule.additional_bindings) { + httpOptions['additional_bindings'] = !httpOptions['additional_bindings'] + ? [] + : Array.isArray(httpOptions['additional_bindings']) + ? httpOptions['additional_bindings'] + : [httpOptions['additional_bindings']]; + // Make the additional_binding to be an array if it is not. + httpOptions['additional_bindings'].push(...rule.additional_bindings); + } + } + } + } +} +//# sourceMappingURL=transcoding.js.map /***/ }), -/***/ 71280: -/***/ ((__unused_webpack_module, exports) => { +/***/ 26969: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.debugId = void 0; -exports.debugId = "endpoints"; - +exports.camelToSnakeCase = camelToSnakeCase; +exports.toCamelCase = toCamelCase; +exports.toLowerCamelCase = toLowerCamelCase; +exports.makeUUID = makeUUID; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const uuid_1 = __nccwpck_require__(75840); +function words(str, normalize = false) { + if (normalize) { + // strings like somethingABCSomething are special case for protobuf.js, + // they should be split as "something", "abc", "something". + // Deal with sequences of capital letters first. + str = str.replace(/([A-Z])([A-Z]+)([A-Z])/g, (str) => { + return (str[0] + + str.slice(1, str.length - 1).toLowerCase() + + str[str.length - 1]); + }); + } + // split on spaces, non-alphanumeric, or capital letters + // note: we keep the capitalization of the first word (special case: IPProtocol) + return str + .split(/(?=[A-Z])|[^A-Za-z0-9.]+/) + .filter(w => w.length > 0) + .map((w, index) => (index === 0 ? w : w.toLowerCase())); +} +/** + * Converts the first character of the given string to lower case. + */ +function lowercase(str) { + if (str.length === 0) { + return str; + } + return str[0].toLowerCase() + str.slice(1); +} +/** + * Converts a given string from camelCase (used by protobuf.js and in JSON) + * to snake_case (normally used in proto definitions). + */ +function camelToSnakeCase(str) { + // Keep the first position capitalization, otherwise decapitalize with underscore. + const wordsList = words(str); + if (wordsList.length === 0) { + return str; + } + const result = [wordsList[0]]; + result.push(...wordsList.slice(1).map(lowercase)); + return result.join('_'); +} +/** + * Capitalizes the first character of the given string. + */ +function capitalize(str) { + if (str.length === 0) { + return str; + } + return str[0].toUpperCase() + str.slice(1); +} +/** + * Converts a given string from snake_case (normally used in proto definitions) or + * PascalCase (also used in proto definitions) to camelCase (used by protobuf.js). + * Preserves capitalization of the first character. + */ +function toCamelCase(str) { + const wordsList = words(str, /*normalize:*/ true); + if (wordsList.length === 0) { + return str; + } + const result = [wordsList[0]]; + result.push(...wordsList.slice(1).map(w => { + if (w.match(/^\d+$/)) { + return '_' + w; + } + return capitalize(w); + })); + return result.join(''); +} +/** + * Converts a given string to lower camel case (forcing the first character to be + * in lower case). + */ +function toLowerCamelCase(str) { + const camelCase = toCamelCase(str); + if (camelCase.length === 0) { + return camelCase; + } + return camelCase[0].toLowerCase() + camelCase.slice(1); +} +/** + * Converts a given string to lower camel case (forcing the first character to be + * in lower case). + */ +function makeUUID() { + return (0, uuid_1.v4)(); +} +//# sourceMappingURL=util.js.map /***/ }), -/***/ 30540: +/***/ 16328: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(71280), exports); -tslib_1.__exportStar(__nccwpck_require__(48927), exports); - +exports.warn = warn; +const featureDetection_1 = __nccwpck_require__(11154); +const emittedWarnings = new Set(); +// warnType is the type of warning (e.g. 'DeprecationWarning', 'ExperimentalWarning', etc.) +function warn(code, message, warnType) { + // Only show a given warning once + if (emittedWarnings.has(code)) { + return; + } + emittedWarnings.add(code); + if (!(0, featureDetection_1.isNodeJS)()) { + console.warn(message); + } + else if (typeof warnType !== 'undefined') { + process.emitWarning(message, { + type: warnType, + }); + } + else { + process.emitWarning(message); + } +} +//# sourceMappingURL=warnings.js.map /***/ }), -/***/ 48927: -/***/ ((__unused_webpack_module, exports) => { +/***/ 76031: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toDebugString = void 0; -function toDebugString(input) { - if (typeof input !== "object" || input == null) { - return input; +exports.GoogleToken = void 0; +const fs = __nccwpck_require__(57147); +const gaxios_1 = __nccwpck_require__(59555); +const jws = __nccwpck_require__(4636); +const path = __nccwpck_require__(71017); +const util_1 = __nccwpck_require__(73837); +const readFile = fs.readFile + ? (0, util_1.promisify)(fs.readFile) + : async () => { + // if running in the web-browser, fs.readFile may not have been shimmed. + throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); + }; +const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; +const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; +class ErrorWithCode extends Error { + constructor(message, code) { + super(message); + this.code = code; } - if ("ref" in input) { - return `$${toDebugString(input.ref)}`; +} +class GoogleToken { + get accessToken() { + return this.rawToken ? this.rawToken.access_token : undefined; } - if ("fn" in input) { - return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + get idToken() { + return this.rawToken ? this.rawToken.id_token : undefined; + } + get tokenType() { + return this.rawToken ? this.rawToken.token_type : undefined; + } + get refreshToken() { + return this.rawToken ? this.rawToken.refresh_token : undefined; + } + /** + * Create a GoogleToken. + * + * @param options Configuration object. + */ + constructor(options) { + _GoogleToken_instances.add(this); + this.transporter = { + request: opts => (0, gaxios_1.request)(opts), + }; + _GoogleToken_inFlightRequest.set(this, void 0); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); + } + /** + * Returns whether the token has expired. + * + * @return true if the token has expired, false otherwise. + */ + hasExpired() { + const now = new Date().getTime(); + if (this.rawToken && this.expiresAt) { + return now >= this.expiresAt; + } + else { + return true; + } + } + /** + * Returns whether the token will expire within eagerRefreshThresholdMillis + * + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. + */ + isTokenExpiring() { + var _a; + const now = new Date().getTime(); + const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; + if (this.rawToken && this.expiresAt) { + return this.expiresAt <= now + eagerRefreshThresholdMillis; + } + else { + return true; + } + } + getToken(callback, opts = {}) { + if (typeof callback === 'object') { + opts = callback; + callback = undefined; + } + opts = Object.assign({ + forceRefresh: false, + }, opts); + if (callback) { + const cb = callback; + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); + } + /** + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties + */ + async getCredentials(keyFile) { + const ext = path.extname(keyFile); + switch (ext) { + case '.json': { + const key = await readFile(keyFile, 'utf8'); + const body = JSON.parse(key); + const privateKey = body.private_key; + const clientEmail = body.client_email; + if (!privateKey || !clientEmail) { + throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); + } + return { privateKey, clientEmail }; + } + case '.der': + case '.crt': + case '.pem': { + const privateKey = await readFile(keyFile, 'utf8'); + return { privateKey }; + } + case '.p12': + case '.pfx': { + throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + + 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + default: + throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + + 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + } + revokeToken(callback) { + if (callback) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); } - return JSON.stringify(input, null, 2); } -exports.toDebugString = toDebugString; - +exports.GoogleToken = GoogleToken; +_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { + if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { + return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); + } + try { + return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); + } + finally { + __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); + } +}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { + if (this.isTokenExpiring() === false && opts.forceRefresh === false) { + return Promise.resolve(this.rawToken); + } + if (!this.key && !this.keyFile) { + throw new Error('No key or keyFile set.'); + } + if (!this.key && this.keyFile) { + const creds = await this.getCredentials(this.keyFile); + this.key = creds.privateKey; + this.iss = creds.clientEmail || this.iss; + if (!creds.clientEmail) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); + } + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); +}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { + if (!this.iss) { + throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); + } +}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { + if (!this.accessToken) { + throw new Error('No token to revoke.'); + } + const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; + await this.transporter.request({ + url, + retry: true, + }); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { + email: this.iss, + sub: this.sub, + key: this.key, + keyFile: this.keyFile, + scope: this.scope, + additionalClaims: this.additionalClaims, + }); +}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { + this.keyFile = options.keyFile; + this.key = options.key; + this.rawToken = undefined; + this.iss = options.email || options.iss; + this.sub = options.sub; + this.additionalClaims = options.additionalClaims; + if (typeof options.scope === 'object') { + this.scope = options.scope.join(' '); + } + else { + this.scope = options.scope; + } + this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; + if (options.transporter) { + this.transporter = options.transporter; + } +}, _GoogleToken_requestToken = +/** + * Request the token from Google. + */ +async function _GoogleToken_requestToken() { + var _a, _b; + const iat = Math.floor(new Date().getTime() / 1000); + const additionalClaims = this.additionalClaims || {}; + const payload = Object.assign({ + iss: this.iss, + scope: this.scope, + aud: GOOGLE_TOKEN_URL, + exp: iat + 3600, + iat, + sub: this.sub, + }, additionalClaims); + const signedJWT = jws.sign({ + header: { alg: 'RS256' }, + payload, + secret: this.key, + }); + try { + const r = await this.transporter.request({ + method: 'POST', + url: GOOGLE_TOKEN_URL, + data: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signedJWT, + }, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + responseType: 'json', + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + this.rawToken = r.data; + this.expiresAt = + r.data.expires_in === null || r.data.expires_in === undefined + ? undefined + : (iat + r.data.expires_in) * 1000; + return this.rawToken; + } + catch (e) { + this.rawToken = undefined; + this.tokenExpires = undefined; + const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) + ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data + : {}; + if (body.error) { + const desc = body.error_description + ? `: ${body.error_description}` + : ''; + e.message = `${body.error}${desc}`; + } + throw e; + } +}; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 45473: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 31621: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(55402), exports); -tslib_1.__exportStar(__nccwpck_require__(55021), exports); -tslib_1.__exportStar(__nccwpck_require__(38824), exports); -tslib_1.__exportStar(__nccwpck_require__(78693), exports); -tslib_1.__exportStar(__nccwpck_require__(75442), exports); + +module.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +}; /***/ }), -/***/ 29132: -/***/ ((__unused_webpack_module, exports) => { +/***/ 52589: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.booleanEquals = void 0; -const booleanEquals = (value1, value2) => value1 === value2; -exports.booleanEquals = booleanEquals; - +var __assign=this&&this.__assign||function(){__assign=Object.assign||function(t){for(var s,i=1,n=arguments.length;i'"&]/g,nonAscii:/[<>'"&\u0080-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g,nonAsciiPrintable:/[<>'"&\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g,nonAsciiPrintableOnly:/[\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g,extensive:/[\x01-\x0c\x0e-\x1f\x21-\x2c\x2e-\x2f\x3a-\x40\x5b-\x60\x7b-\x7d\x7f-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g};var defaultEncodeOptions={mode:"specialChars",level:"all",numeric:"decimal"};function encode(text,_a){var _b=_a===void 0?defaultEncodeOptions:_a,_c=_b.mode,mode=_c===void 0?"specialChars":_c,_d=_b.numeric,numeric=_d===void 0?"decimal":_d,_e=_b.level,level=_e===void 0?"all":_e;if(!text){return""}var encodeRegExp=encodeRegExps[mode];var references=allNamedReferences[level].characters;var isHex=numeric==="hexadecimal";return replaceUsingRegExp(text,encodeRegExp,(function(input){var result=references[input];if(!result){var code=input.length>1?surrogate_pairs_1.getCodePoint(input,0):input.charCodeAt(0);result=(isHex?"&#x"+code.toString(16):"&#"+code)+";"}return result}))}exports.encode=encode;var defaultDecodeOptions={scope:"body",level:"all"};var strict=/&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);/g;var attribute=/&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+)[;=]?/g;var baseDecodeRegExps={xml:{strict:strict,attribute:attribute,body:named_references_1.bodyRegExps.xml},html4:{strict:strict,attribute:attribute,body:named_references_1.bodyRegExps.html4},html5:{strict:strict,attribute:attribute,body:named_references_1.bodyRegExps.html5}};var decodeRegExps=__assign(__assign({},baseDecodeRegExps),{all:baseDecodeRegExps.html5});var fromCharCode=String.fromCharCode;var outOfBoundsChar=fromCharCode(65533);var defaultDecodeEntityOptions={level:"all"};function getDecodedEntity(entity,references,isAttribute,isStrict){var decodeResult=entity;var decodeEntityLastChar=entity[entity.length-1];if(isAttribute&&decodeEntityLastChar==="="){decodeResult=entity}else if(isStrict&&decodeEntityLastChar!==";"){decodeResult=entity}else{var decodeResultByReference=references[entity];if(decodeResultByReference){decodeResult=decodeResultByReference}else if(entity[0]==="&"&&entity[1]==="#"){var decodeSecondChar=entity[2];var decodeCode=decodeSecondChar=="x"||decodeSecondChar=="X"?parseInt(entity.substr(3),16):parseInt(entity.substr(2));decodeResult=decodeCode>=1114111?outOfBoundsChar:decodeCode>65535?surrogate_pairs_1.fromCodePoint(decodeCode):fromCharCode(numeric_unicode_map_1.numericUnicodeMap[decodeCode]||decodeCode)}}return decodeResult}function decodeEntity(entity,_a){var _b=(_a===void 0?defaultDecodeEntityOptions:_a).level,level=_b===void 0?"all":_b;if(!entity){return""}return getDecodedEntity(entity,allNamedReferences[level].entities,false,false)}exports.decodeEntity=decodeEntity;function decode(text,_a){var _b=_a===void 0?defaultDecodeOptions:_a,_c=_b.level,level=_c===void 0?"all":_c,_d=_b.scope,scope=_d===void 0?level==="xml"?"strict":"body":_d;if(!text){return""}var decodeRegExp=decodeRegExps[level][scope];var references=allNamedReferences[level].entities;var isAttribute=scope==="attribute";var isStrict=scope==="strict";return replaceUsingRegExp(text,decodeRegExp,(function(entity){return getDecodedEntity(entity,references,isAttribute,isStrict)}))}exports.decode=decode; +//# sourceMappingURL=./index.js.map /***/ }), -/***/ 84624: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6068: +/***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getAttr = void 0; -const types_1 = __nccwpck_require__(75442); -const getAttrPathList_1 = __nccwpck_require__(91311); -const getAttr = (value, path) => (0, getAttrPathList_1.getAttrPathList)(path).reduce((acc, index) => { - if (typeof acc !== "object") { - throw new types_1.EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); - } - else if (Array.isArray(acc)) { - return acc[parseInt(index)]; - } - return acc[index]; -}, value); -exports.getAttr = getAttr; - +Object.defineProperty(exports, "__esModule", ({value:true}));exports.bodyRegExps={xml:/&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g,html4:/∉|&(?:nbsp|iexcl|cent|pound|curren|yen|brvbar|sect|uml|copy|ordf|laquo|not|shy|reg|macr|deg|plusmn|sup2|sup3|acute|micro|para|middot|cedil|sup1|ordm|raquo|frac14|frac12|frac34|iquest|Agrave|Aacute|Acirc|Atilde|Auml|Aring|AElig|Ccedil|Egrave|Eacute|Ecirc|Euml|Igrave|Iacute|Icirc|Iuml|ETH|Ntilde|Ograve|Oacute|Ocirc|Otilde|Ouml|times|Oslash|Ugrave|Uacute|Ucirc|Uuml|Yacute|THORN|szlig|agrave|aacute|acirc|atilde|auml|aring|aelig|ccedil|egrave|eacute|ecirc|euml|igrave|iacute|icirc|iuml|eth|ntilde|ograve|oacute|ocirc|otilde|ouml|divide|oslash|ugrave|uacute|ucirc|uuml|yacute|thorn|yuml|quot|amp|lt|gt|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g,html5:/·|℗|⋇|⪧|⩺|⋗|⦕|⩼|⪆|⥸|⋗|⋛|⪌|≷|≳|⪦|⩹|⋖|⋋|⋉|⥶|⩻|⦖|◃|⊴|◂|∉|⋹̸|⋵̸|∉|⋷|⋶|∌|∌|⋾|⋽|∥|⊠|⨱|⨰|&(?:AElig|AMP|Aacute|Acirc|Agrave|Aring|Atilde|Auml|COPY|Ccedil|ETH|Eacute|Ecirc|Egrave|Euml|GT|Iacute|Icirc|Igrave|Iuml|LT|Ntilde|Oacute|Ocirc|Ograve|Oslash|Otilde|Ouml|QUOT|REG|THORN|Uacute|Ucirc|Ugrave|Uuml|Yacute|aacute|acirc|acute|aelig|agrave|amp|aring|atilde|auml|brvbar|ccedil|cedil|cent|copy|curren|deg|divide|eacute|ecirc|egrave|eth|euml|frac12|frac14|frac34|gt|iacute|icirc|iexcl|igrave|iquest|iuml|laquo|lt|macr|micro|middot|nbsp|not|ntilde|oacute|ocirc|ograve|ordf|ordm|oslash|otilde|ouml|para|plusmn|pound|quot|raquo|reg|sect|shy|sup1|sup2|sup3|szlig|thorn|times|uacute|ucirc|ugrave|uml|uuml|yacute|yen|yuml|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g};exports.namedReferences={xml:{entities:{"<":"<",">":">",""":'"',"'":"'","&":"&"},characters:{"<":"<",">":">",'"':""","'":"'","&":"&"}},html4:{entities:{"'":"'"," ":" "," ":" ","¡":"¡","¡":"¡","¢":"¢","¢":"¢","£":"£","£":"£","¤":"¤","¤":"¤","¥":"¥","¥":"¥","¦":"¦","¦":"¦","§":"§","§":"§","¨":"¨","¨":"¨","©":"©","©":"©","ª":"ª","ª":"ª","«":"«","«":"«","¬":"¬","¬":"¬","­":"­","­":"­","®":"®","®":"®","¯":"¯","¯":"¯","°":"°","°":"°","±":"±","±":"±","²":"²","²":"²","³":"³","³":"³","´":"´","´":"´","µ":"µ","µ":"µ","¶":"¶","¶":"¶","·":"·","·":"·","¸":"¸","¸":"¸","¹":"¹","¹":"¹","º":"º","º":"º","»":"»","»":"»","¼":"¼","¼":"¼","½":"½","½":"½","¾":"¾","¾":"¾","¿":"¿","¿":"¿","À":"À","À":"À","Á":"Á","Á":"Á","Â":"Â","Â":"Â","Ã":"Ã","Ã":"Ã","Ä":"Ä","Ä":"Ä","Å":"Å","Å":"Å","Æ":"Æ","Æ":"Æ","Ç":"Ç","Ç":"Ç","È":"È","È":"È","É":"É","É":"É","Ê":"Ê","Ê":"Ê","Ë":"Ë","Ë":"Ë","Ì":"Ì","Ì":"Ì","Í":"Í","Í":"Í","Î":"Î","Î":"Î","Ï":"Ï","Ï":"Ï","Ð":"Ð","Ð":"Ð","Ñ":"Ñ","Ñ":"Ñ","Ò":"Ò","Ò":"Ò","Ó":"Ó","Ó":"Ó","Ô":"Ô","Ô":"Ô","Õ":"Õ","Õ":"Õ","Ö":"Ö","Ö":"Ö","×":"×","×":"×","Ø":"Ø","Ø":"Ø","Ù":"Ù","Ù":"Ù","Ú":"Ú","Ú":"Ú","Û":"Û","Û":"Û","Ü":"Ü","Ü":"Ü","Ý":"Ý","Ý":"Ý","Þ":"Þ","Þ":"Þ","ß":"ß","ß":"ß","à":"à","à":"à","á":"á","á":"á","â":"â","â":"â","ã":"ã","ã":"ã","ä":"ä","ä":"ä","å":"å","å":"å","æ":"æ","æ":"æ","ç":"ç","ç":"ç","è":"è","è":"è","é":"é","é":"é","ê":"ê","ê":"ê","ë":"ë","ë":"ë","ì":"ì","ì":"ì","í":"í","í":"í","î":"î","î":"î","ï":"ï","ï":"ï","ð":"ð","ð":"ð","ñ":"ñ","ñ":"ñ","ò":"ò","ò":"ò","ó":"ó","ó":"ó","ô":"ô","ô":"ô","õ":"õ","õ":"õ","ö":"ö","ö":"ö","÷":"÷","÷":"÷","ø":"ø","ø":"ø","ù":"ù","ù":"ù","ú":"ú","ú":"ú","û":"û","û":"û","ü":"ü","ü":"ü","ý":"ý","ý":"ý","þ":"þ","þ":"þ","ÿ":"ÿ","ÿ":"ÿ",""":'"',""":'"',"&":"&","&":"&","<":"<","<":"<",">":">",">":">","Œ":"Œ","œ":"œ","Š":"Š","š":"š","Ÿ":"Ÿ","ˆ":"ˆ","˜":"˜"," ":" "," ":" "," ":" ","‌":"‌","‍":"‍","‎":"‎","‏":"‏","–":"–","—":"—","‘":"‘","’":"’","‚":"‚","“":"“","”":"”","„":"„","†":"†","‡":"‡","‰":"‰","‹":"‹","›":"›","€":"€","ƒ":"ƒ","Α":"Α","Β":"Β","Γ":"Γ","Δ":"Δ","Ε":"Ε","Ζ":"Ζ","Η":"Η","Θ":"Θ","Ι":"Ι","Κ":"Κ","Λ":"Λ","Μ":"Μ","Ν":"Ν","Ξ":"Ξ","Ο":"Ο","Π":"Π","Ρ":"Ρ","Σ":"Σ","Τ":"Τ","Υ":"Υ","Φ":"Φ","Χ":"Χ","Ψ":"Ψ","Ω":"Ω","α":"α","β":"β","γ":"γ","δ":"δ","ε":"ε","ζ":"ζ","η":"η","θ":"θ","ι":"ι","κ":"κ","λ":"λ","μ":"μ","ν":"ν","ξ":"ξ","ο":"ο","π":"π","ρ":"ρ","ς":"ς","σ":"σ","τ":"τ","υ":"υ","φ":"φ","χ":"χ","ψ":"ψ","ω":"ω","ϑ":"ϑ","ϒ":"ϒ","ϖ":"ϖ","•":"•","…":"…","′":"′","″":"″","‾":"‾","⁄":"⁄","℘":"℘","ℑ":"ℑ","ℜ":"ℜ","™":"™","ℵ":"ℵ","←":"←","↑":"↑","→":"→","↓":"↓","↔":"↔","↵":"↵","⇐":"⇐","⇑":"⇑","⇒":"⇒","⇓":"⇓","⇔":"⇔","∀":"∀","∂":"∂","∃":"∃","∅":"∅","∇":"∇","∈":"∈","∉":"∉","∋":"∋","∏":"∏","∑":"∑","−":"−","∗":"∗","√":"√","∝":"∝","∞":"∞","∠":"∠","∧":"∧","∨":"∨","∩":"∩","∪":"∪","∫":"∫","∴":"∴","∼":"∼","≅":"≅","≈":"≈","≠":"≠","≡":"≡","≤":"≤","≥":"≥","⊂":"⊂","⊃":"⊃","⊄":"⊄","⊆":"⊆","⊇":"⊇","⊕":"⊕","⊗":"⊗","⊥":"⊥","⋅":"⋅","⌈":"⌈","⌉":"⌉","⌊":"⌊","⌋":"⌋","⟨":"〈","⟩":"〉","◊":"◊","♠":"♠","♣":"♣","♥":"♥","♦":"♦"},characters:{"'":"'"," ":" ","¡":"¡","¢":"¢","£":"£","¤":"¤","¥":"¥","¦":"¦","§":"§","¨":"¨","©":"©","ª":"ª","«":"«","¬":"¬","­":"­","®":"®","¯":"¯","°":"°","±":"±","²":"²","³":"³","´":"´","µ":"µ","¶":"¶","·":"·","¸":"¸","¹":"¹","º":"º","»":"»","¼":"¼","½":"½","¾":"¾","¿":"¿","À":"À","Á":"Á","Â":"Â","Ã":"Ã","Ä":"Ä","Å":"Å","Æ":"Æ","Ç":"Ç","È":"È","É":"É","Ê":"Ê","Ë":"Ë","Ì":"Ì","Í":"Í","Î":"Î","Ï":"Ï","Ð":"Ð","Ñ":"Ñ","Ò":"Ò","Ó":"Ó","Ô":"Ô","Õ":"Õ","Ö":"Ö","×":"×","Ø":"Ø","Ù":"Ù","Ú":"Ú","Û":"Û","Ü":"Ü","Ý":"Ý","Þ":"Þ","ß":"ß","à":"à","á":"á","â":"â","ã":"ã","ä":"ä","å":"å","æ":"æ","ç":"ç","è":"è","é":"é","ê":"ê","ë":"ë","ì":"ì","í":"í","î":"î","ï":"ï","ð":"ð","ñ":"ñ","ò":"ò","ó":"ó","ô":"ô","õ":"õ","ö":"ö","÷":"÷","ø":"ø","ù":"ù","ú":"ú","û":"û","ü":"ü","ý":"ý","þ":"þ","ÿ":"ÿ",'"':""","&":"&","<":"<",">":">","Œ":"Œ","œ":"œ","Š":"Š","š":"š","Ÿ":"Ÿ","ˆ":"ˆ","˜":"˜"," ":" "," ":" "," ":" ","‌":"‌","‍":"‍","‎":"‎","‏":"‏","–":"–","—":"—","‘":"‘","’":"’","‚":"‚","“":"“","”":"”","„":"„","†":"†","‡":"‡","‰":"‰","‹":"‹","›":"›","€":"€","ƒ":"ƒ","Α":"Α","Β":"Β","Γ":"Γ","Δ":"Δ","Ε":"Ε","Ζ":"Ζ","Η":"Η","Θ":"Θ","Ι":"Ι","Κ":"Κ","Λ":"Λ","Μ":"Μ","Ν":"Ν","Ξ":"Ξ","Ο":"Ο","Π":"Π","Ρ":"Ρ","Σ":"Σ","Τ":"Τ","Υ":"Υ","Φ":"Φ","Χ":"Χ","Ψ":"Ψ","Ω":"Ω","α":"α","β":"β","γ":"γ","δ":"δ","ε":"ε","ζ":"ζ","η":"η","θ":"θ","ι":"ι","κ":"κ","λ":"λ","μ":"μ","ν":"ν","ξ":"ξ","ο":"ο","π":"π","ρ":"ρ","ς":"ς","σ":"σ","τ":"τ","υ":"υ","φ":"φ","χ":"χ","ψ":"ψ","ω":"ω","ϑ":"ϑ","ϒ":"ϒ","ϖ":"ϖ","•":"•","…":"…","′":"′","″":"″","‾":"‾","⁄":"⁄","℘":"℘","ℑ":"ℑ","ℜ":"ℜ","™":"™","ℵ":"ℵ","←":"←","↑":"↑","→":"→","↓":"↓","↔":"↔","↵":"↵","⇐":"⇐","⇑":"⇑","⇒":"⇒","⇓":"⇓","⇔":"⇔","∀":"∀","∂":"∂","∃":"∃","∅":"∅","∇":"∇","∈":"∈","∉":"∉","∋":"∋","∏":"∏","∑":"∑","−":"−","∗":"∗","√":"√","∝":"∝","∞":"∞","∠":"∠","∧":"∧","∨":"∨","∩":"∩","∪":"∪","∫":"∫","∴":"∴","∼":"∼","≅":"≅","≈":"≈","≠":"≠","≡":"≡","≤":"≤","≥":"≥","⊂":"⊂","⊃":"⊃","⊄":"⊄","⊆":"⊆","⊇":"⊇","⊕":"⊕","⊗":"⊗","⊥":"⊥","⋅":"⋅","⌈":"⌈","⌉":"⌉","⌊":"⌊","⌋":"⌋","〈":"⟨","〉":"⟩","◊":"◊","♠":"♠","♣":"♣","♥":"♥","♦":"♦"}},html5:{entities:{"Æ":"Æ","Æ":"Æ","&":"&","&":"&","Á":"Á","Á":"Á","Ă":"Ă","Â":"Â","Â":"Â","А":"А","𝔄":"𝔄","À":"À","À":"À","Α":"Α","Ā":"Ā","⩓":"⩓","Ą":"Ą","𝔸":"𝔸","⁡":"⁡","Å":"Å","Å":"Å","𝒜":"𝒜","≔":"≔","Ã":"Ã","Ã":"Ã","Ä":"Ä","Ä":"Ä","∖":"∖","⫧":"⫧","⌆":"⌆","Б":"Б","∵":"∵","ℬ":"ℬ","Β":"Β","𝔅":"𝔅","𝔹":"𝔹","˘":"˘","ℬ":"ℬ","≎":"≎","Ч":"Ч","©":"©","©":"©","Ć":"Ć","⋒":"⋒","ⅅ":"ⅅ","ℭ":"ℭ","Č":"Č","Ç":"Ç","Ç":"Ç","Ĉ":"Ĉ","∰":"∰","Ċ":"Ċ","¸":"¸","·":"·","ℭ":"ℭ","Χ":"Χ","⊙":"⊙","⊖":"⊖","⊕":"⊕","⊗":"⊗","∲":"∲","”":"”","’":"’","∷":"∷","⩴":"⩴","≡":"≡","∯":"∯","∮":"∮","ℂ":"ℂ","∐":"∐","∳":"∳","⨯":"⨯","𝒞":"𝒞","⋓":"⋓","≍":"≍","ⅅ":"ⅅ","⤑":"⤑","Ђ":"Ђ","Ѕ":"Ѕ","Џ":"Џ","‡":"‡","↡":"↡","⫤":"⫤","Ď":"Ď","Д":"Д","∇":"∇","Δ":"Δ","𝔇":"𝔇","´":"´","˙":"˙","˝":"˝","`":"`","˜":"˜","⋄":"⋄","ⅆ":"ⅆ","𝔻":"𝔻","¨":"¨","⃜":"⃜","≐":"≐","∯":"∯","¨":"¨","⇓":"⇓","⇐":"⇐","⇔":"⇔","⫤":"⫤","⟸":"⟸","⟺":"⟺","⟹":"⟹","⇒":"⇒","⊨":"⊨","⇑":"⇑","⇕":"⇕","∥":"∥","↓":"↓","⤓":"⤓","⇵":"⇵","̑":"̑","⥐":"⥐","⥞":"⥞","↽":"↽","⥖":"⥖","⥟":"⥟","⇁":"⇁","⥗":"⥗","⊤":"⊤","↧":"↧","⇓":"⇓","𝒟":"𝒟","Đ":"Đ","Ŋ":"Ŋ","Ð":"Ð","Ð":"Ð","É":"É","É":"É","Ě":"Ě","Ê":"Ê","Ê":"Ê","Э":"Э","Ė":"Ė","𝔈":"𝔈","È":"È","È":"È","∈":"∈","Ē":"Ē","◻":"◻","▫":"▫","Ę":"Ę","𝔼":"𝔼","Ε":"Ε","⩵":"⩵","≂":"≂","⇌":"⇌","ℰ":"ℰ","⩳":"⩳","Η":"Η","Ë":"Ë","Ë":"Ë","∃":"∃","ⅇ":"ⅇ","Ф":"Ф","𝔉":"𝔉","◼":"◼","▪":"▪","𝔽":"𝔽","∀":"∀","ℱ":"ℱ","ℱ":"ℱ","Ѓ":"Ѓ",">":">",">":">","Γ":"Γ","Ϝ":"Ϝ","Ğ":"Ğ","Ģ":"Ģ","Ĝ":"Ĝ","Г":"Г","Ġ":"Ġ","𝔊":"𝔊","⋙":"⋙","𝔾":"𝔾","≥":"≥","⋛":"⋛","≧":"≧","⪢":"⪢","≷":"≷","⩾":"⩾","≳":"≳","𝒢":"𝒢","≫":"≫","Ъ":"Ъ","ˇ":"ˇ","^":"^","Ĥ":"Ĥ","ℌ":"ℌ","ℋ":"ℋ","ℍ":"ℍ","─":"─","ℋ":"ℋ","Ħ":"Ħ","≎":"≎","≏":"≏","Е":"Е","IJ":"IJ","Ё":"Ё","Í":"Í","Í":"Í","Î":"Î","Î":"Î","И":"И","İ":"İ","ℑ":"ℑ","Ì":"Ì","Ì":"Ì","ℑ":"ℑ","Ī":"Ī","ⅈ":"ⅈ","⇒":"⇒","∬":"∬","∫":"∫","⋂":"⋂","⁣":"⁣","⁢":"⁢","Į":"Į","𝕀":"𝕀","Ι":"Ι","ℐ":"ℐ","Ĩ":"Ĩ","І":"І","Ï":"Ï","Ï":"Ï","Ĵ":"Ĵ","Й":"Й","𝔍":"𝔍","𝕁":"𝕁","𝒥":"𝒥","Ј":"Ј","Є":"Є","Х":"Х","Ќ":"Ќ","Κ":"Κ","Ķ":"Ķ","К":"К","𝔎":"𝔎","𝕂":"𝕂","𝒦":"𝒦","Љ":"Љ","<":"<","<":"<","Ĺ":"Ĺ","Λ":"Λ","⟪":"⟪","ℒ":"ℒ","↞":"↞","Ľ":"Ľ","Ļ":"Ļ","Л":"Л","⟨":"⟨","←":"←","⇤":"⇤","⇆":"⇆","⌈":"⌈","⟦":"⟦","⥡":"⥡","⇃":"⇃","⥙":"⥙","⌊":"⌊","↔":"↔","⥎":"⥎","⊣":"⊣","↤":"↤","⥚":"⥚","⊲":"⊲","⧏":"⧏","⊴":"⊴","⥑":"⥑","⥠":"⥠","↿":"↿","⥘":"⥘","↼":"↼","⥒":"⥒","⇐":"⇐","⇔":"⇔","⋚":"⋚","≦":"≦","≶":"≶","⪡":"⪡","⩽":"⩽","≲":"≲","𝔏":"𝔏","⋘":"⋘","⇚":"⇚","Ŀ":"Ŀ","⟵":"⟵","⟷":"⟷","⟶":"⟶","⟸":"⟸","⟺":"⟺","⟹":"⟹","𝕃":"𝕃","↙":"↙","↘":"↘","ℒ":"ℒ","↰":"↰","Ł":"Ł","≪":"≪","⤅":"⤅","М":"М"," ":" ","ℳ":"ℳ","𝔐":"𝔐","∓":"∓","𝕄":"𝕄","ℳ":"ℳ","Μ":"Μ","Њ":"Њ","Ń":"Ń","Ň":"Ň","Ņ":"Ņ","Н":"Н","​":"​","​":"​","​":"​","​":"​","≫":"≫","≪":"≪"," ":"\n","𝔑":"𝔑","⁠":"⁠"," ":" ","ℕ":"ℕ","⫬":"⫬","≢":"≢","≭":"≭","∦":"∦","∉":"∉","≠":"≠","≂̸":"≂̸","∄":"∄","≯":"≯","≱":"≱","≧̸":"≧̸","≫̸":"≫̸","≹":"≹","⩾̸":"⩾̸","≵":"≵","≎̸":"≎̸","≏̸":"≏̸","⋪":"⋪","⧏̸":"⧏̸","⋬":"⋬","≮":"≮","≰":"≰","≸":"≸","≪̸":"≪̸","⩽̸":"⩽̸","≴":"≴","⪢̸":"⪢̸","⪡̸":"⪡̸","⊀":"⊀","⪯̸":"⪯̸","⋠":"⋠","∌":"∌","⋫":"⋫","⧐̸":"⧐̸","⋭":"⋭","⊏̸":"⊏̸","⋢":"⋢","⊐̸":"⊐̸","⋣":"⋣","⊂⃒":"⊂⃒","⊈":"⊈","⊁":"⊁","⪰̸":"⪰̸","⋡":"⋡","≿̸":"≿̸","⊃⃒":"⊃⃒","⊉":"⊉","≁":"≁","≄":"≄","≇":"≇","≉":"≉","∤":"∤","𝒩":"𝒩","Ñ":"Ñ","Ñ":"Ñ","Ν":"Ν","Œ":"Œ","Ó":"Ó","Ó":"Ó","Ô":"Ô","Ô":"Ô","О":"О","Ő":"Ő","𝔒":"𝔒","Ò":"Ò","Ò":"Ò","Ō":"Ō","Ω":"Ω","Ο":"Ο","𝕆":"𝕆","“":"“","‘":"‘","⩔":"⩔","𝒪":"𝒪","Ø":"Ø","Ø":"Ø","Õ":"Õ","Õ":"Õ","⨷":"⨷","Ö":"Ö","Ö":"Ö","‾":"‾","⏞":"⏞","⎴":"⎴","⏜":"⏜","∂":"∂","П":"П","𝔓":"𝔓","Φ":"Φ","Π":"Π","±":"±","ℌ":"ℌ","ℙ":"ℙ","⪻":"⪻","≺":"≺","⪯":"⪯","≼":"≼","≾":"≾","″":"″","∏":"∏","∷":"∷","∝":"∝","𝒫":"𝒫","Ψ":"Ψ",""":'"',""":'"',"𝔔":"𝔔","ℚ":"ℚ","𝒬":"𝒬","⤐":"⤐","®":"®","®":"®","Ŕ":"Ŕ","⟫":"⟫","↠":"↠","⤖":"⤖","Ř":"Ř","Ŗ":"Ŗ","Р":"Р","ℜ":"ℜ","∋":"∋","⇋":"⇋","⥯":"⥯","ℜ":"ℜ","Ρ":"Ρ","⟩":"⟩","→":"→","⇥":"⇥","⇄":"⇄","⌉":"⌉","⟧":"⟧","⥝":"⥝","⇂":"⇂","⥕":"⥕","⌋":"⌋","⊢":"⊢","↦":"↦","⥛":"⥛","⊳":"⊳","⧐":"⧐","⊵":"⊵","⥏":"⥏","⥜":"⥜","↾":"↾","⥔":"⥔","⇀":"⇀","⥓":"⥓","⇒":"⇒","ℝ":"ℝ","⥰":"⥰","⇛":"⇛","ℛ":"ℛ","↱":"↱","⧴":"⧴","Щ":"Щ","Ш":"Ш","Ь":"Ь","Ś":"Ś","⪼":"⪼","Š":"Š","Ş":"Ş","Ŝ":"Ŝ","С":"С","𝔖":"𝔖","↓":"↓","←":"←","→":"→","↑":"↑","Σ":"Σ","∘":"∘","𝕊":"𝕊","√":"√","□":"□","⊓":"⊓","⊏":"⊏","⊑":"⊑","⊐":"⊐","⊒":"⊒","⊔":"⊔","𝒮":"𝒮","⋆":"⋆","⋐":"⋐","⋐":"⋐","⊆":"⊆","≻":"≻","⪰":"⪰","≽":"≽","≿":"≿","∋":"∋","∑":"∑","⋑":"⋑","⊃":"⊃","⊇":"⊇","⋑":"⋑","Þ":"Þ","Þ":"Þ","™":"™","Ћ":"Ћ","Ц":"Ц"," ":"\t","Τ":"Τ","Ť":"Ť","Ţ":"Ţ","Т":"Т","𝔗":"𝔗","∴":"∴","Θ":"Θ","  ":"  "," ":" ","∼":"∼","≃":"≃","≅":"≅","≈":"≈","𝕋":"𝕋","⃛":"⃛","𝒯":"𝒯","Ŧ":"Ŧ","Ú":"Ú","Ú":"Ú","↟":"↟","⥉":"⥉","Ў":"Ў","Ŭ":"Ŭ","Û":"Û","Û":"Û","У":"У","Ű":"Ű","𝔘":"𝔘","Ù":"Ù","Ù":"Ù","Ū":"Ū","_":"_","⏟":"⏟","⎵":"⎵","⏝":"⏝","⋃":"⋃","⊎":"⊎","Ų":"Ų","𝕌":"𝕌","↑":"↑","⤒":"⤒","⇅":"⇅","↕":"↕","⥮":"⥮","⊥":"⊥","↥":"↥","⇑":"⇑","⇕":"⇕","↖":"↖","↗":"↗","ϒ":"ϒ","Υ":"Υ","Ů":"Ů","𝒰":"𝒰","Ũ":"Ũ","Ü":"Ü","Ü":"Ü","⊫":"⊫","⫫":"⫫","В":"В","⊩":"⊩","⫦":"⫦","⋁":"⋁","‖":"‖","‖":"‖","∣":"∣","|":"|","❘":"❘","≀":"≀"," ":" ","𝔙":"𝔙","𝕍":"𝕍","𝒱":"𝒱","⊪":"⊪","Ŵ":"Ŵ","⋀":"⋀","𝔚":"𝔚","𝕎":"𝕎","𝒲":"𝒲","𝔛":"𝔛","Ξ":"Ξ","𝕏":"𝕏","𝒳":"𝒳","Я":"Я","Ї":"Ї","Ю":"Ю","Ý":"Ý","Ý":"Ý","Ŷ":"Ŷ","Ы":"Ы","𝔜":"𝔜","𝕐":"𝕐","𝒴":"𝒴","Ÿ":"Ÿ","Ж":"Ж","Ź":"Ź","Ž":"Ž","З":"З","Ż":"Ż","​":"​","Ζ":"Ζ","ℨ":"ℨ","ℤ":"ℤ","𝒵":"𝒵","á":"á","á":"á","ă":"ă","∾":"∾","∾̳":"∾̳","∿":"∿","â":"â","â":"â","´":"´","´":"´","а":"а","æ":"æ","æ":"æ","⁡":"⁡","𝔞":"𝔞","à":"à","à":"à","ℵ":"ℵ","ℵ":"ℵ","α":"α","ā":"ā","⨿":"⨿","&":"&","&":"&","∧":"∧","⩕":"⩕","⩜":"⩜","⩘":"⩘","⩚":"⩚","∠":"∠","⦤":"⦤","∠":"∠","∡":"∡","⦨":"⦨","⦩":"⦩","⦪":"⦪","⦫":"⦫","⦬":"⦬","⦭":"⦭","⦮":"⦮","⦯":"⦯","∟":"∟","⊾":"⊾","⦝":"⦝","∢":"∢","Å":"Å","⍼":"⍼","ą":"ą","𝕒":"𝕒","≈":"≈","⩰":"⩰","⩯":"⩯","≊":"≊","≋":"≋","'":"'","≈":"≈","≊":"≊","å":"å","å":"å","𝒶":"𝒶","*":"*","≈":"≈","≍":"≍","ã":"ã","ã":"ã","ä":"ä","ä":"ä","∳":"∳","⨑":"⨑","⫭":"⫭","≌":"≌","϶":"϶","‵":"‵","∽":"∽","⋍":"⋍","⊽":"⊽","⌅":"⌅","⌅":"⌅","⎵":"⎵","⎶":"⎶","≌":"≌","б":"б","„":"„","∵":"∵","∵":"∵","⦰":"⦰","϶":"϶","ℬ":"ℬ","β":"β","ℶ":"ℶ","≬":"≬","𝔟":"𝔟","⋂":"⋂","◯":"◯","⋃":"⋃","⨀":"⨀","⨁":"⨁","⨂":"⨂","⨆":"⨆","★":"★","▽":"▽","△":"△","⨄":"⨄","⋁":"⋁","⋀":"⋀","⤍":"⤍","⧫":"⧫","▪":"▪","▴":"▴","▾":"▾","◂":"◂","▸":"▸","␣":"␣","▒":"▒","░":"░","▓":"▓","█":"█","=⃥":"=⃥","≡⃥":"≡⃥","⌐":"⌐","𝕓":"𝕓","⊥":"⊥","⊥":"⊥","⋈":"⋈","╗":"╗","╔":"╔","╖":"╖","╓":"╓","═":"═","╦":"╦","╩":"╩","╤":"╤","╧":"╧","╝":"╝","╚":"╚","╜":"╜","╙":"╙","║":"║","╬":"╬","╣":"╣","╠":"╠","╫":"╫","╢":"╢","╟":"╟","⧉":"⧉","╕":"╕","╒":"╒","┐":"┐","┌":"┌","─":"─","╥":"╥","╨":"╨","┬":"┬","┴":"┴","⊟":"⊟","⊞":"⊞","⊠":"⊠","╛":"╛","╘":"╘","┘":"┘","└":"└","│":"│","╪":"╪","╡":"╡","╞":"╞","┼":"┼","┤":"┤","├":"├","‵":"‵","˘":"˘","¦":"¦","¦":"¦","𝒷":"𝒷","⁏":"⁏","∽":"∽","⋍":"⋍","\":"\\","⧅":"⧅","⟈":"⟈","•":"•","•":"•","≎":"≎","⪮":"⪮","≏":"≏","≏":"≏","ć":"ć","∩":"∩","⩄":"⩄","⩉":"⩉","⩋":"⩋","⩇":"⩇","⩀":"⩀","∩︀":"∩︀","⁁":"⁁","ˇ":"ˇ","⩍":"⩍","č":"č","ç":"ç","ç":"ç","ĉ":"ĉ","⩌":"⩌","⩐":"⩐","ċ":"ċ","¸":"¸","¸":"¸","⦲":"⦲","¢":"¢","¢":"¢","·":"·","𝔠":"𝔠","ч":"ч","✓":"✓","✓":"✓","χ":"χ","○":"○","⧃":"⧃","ˆ":"ˆ","≗":"≗","↺":"↺","↻":"↻","®":"®","Ⓢ":"Ⓢ","⊛":"⊛","⊚":"⊚","⊝":"⊝","≗":"≗","⨐":"⨐","⫯":"⫯","⧂":"⧂","♣":"♣","♣":"♣",":":":","≔":"≔","≔":"≔",",":",","@":"@","∁":"∁","∘":"∘","∁":"∁","ℂ":"ℂ","≅":"≅","⩭":"⩭","∮":"∮","𝕔":"𝕔","∐":"∐","©":"©","©":"©","℗":"℗","↵":"↵","✗":"✗","𝒸":"𝒸","⫏":"⫏","⫑":"⫑","⫐":"⫐","⫒":"⫒","⋯":"⋯","⤸":"⤸","⤵":"⤵","⋞":"⋞","⋟":"⋟","↶":"↶","⤽":"⤽","∪":"∪","⩈":"⩈","⩆":"⩆","⩊":"⩊","⊍":"⊍","⩅":"⩅","∪︀":"∪︀","↷":"↷","⤼":"⤼","⋞":"⋞","⋟":"⋟","⋎":"⋎","⋏":"⋏","¤":"¤","¤":"¤","↶":"↶","↷":"↷","⋎":"⋎","⋏":"⋏","∲":"∲","∱":"∱","⌭":"⌭","⇓":"⇓","⥥":"⥥","†":"†","ℸ":"ℸ","↓":"↓","‐":"‐","⊣":"⊣","⤏":"⤏","˝":"˝","ď":"ď","д":"д","ⅆ":"ⅆ","‡":"‡","⇊":"⇊","⩷":"⩷","°":"°","°":"°","δ":"δ","⦱":"⦱","⥿":"⥿","𝔡":"𝔡","⇃":"⇃","⇂":"⇂","⋄":"⋄","⋄":"⋄","♦":"♦","♦":"♦","¨":"¨","ϝ":"ϝ","⋲":"⋲","÷":"÷","÷":"÷","÷":"÷","⋇":"⋇","⋇":"⋇","ђ":"ђ","⌞":"⌞","⌍":"⌍","$":"$","𝕕":"𝕕","˙":"˙","≐":"≐","≑":"≑","∸":"∸","∔":"∔","⊡":"⊡","⌆":"⌆","↓":"↓","⇊":"⇊","⇃":"⇃","⇂":"⇂","⤐":"⤐","⌟":"⌟","⌌":"⌌","𝒹":"𝒹","ѕ":"ѕ","⧶":"⧶","đ":"đ","⋱":"⋱","▿":"▿","▾":"▾","⇵":"⇵","⥯":"⥯","⦦":"⦦","џ":"џ","⟿":"⟿","⩷":"⩷","≑":"≑","é":"é","é":"é","⩮":"⩮","ě":"ě","≖":"≖","ê":"ê","ê":"ê","≕":"≕","э":"э","ė":"ė","ⅇ":"ⅇ","≒":"≒","𝔢":"𝔢","⪚":"⪚","è":"è","è":"è","⪖":"⪖","⪘":"⪘","⪙":"⪙","⏧":"⏧","ℓ":"ℓ","⪕":"⪕","⪗":"⪗","ē":"ē","∅":"∅","∅":"∅","∅":"∅"," ":" "," ":" "," ":" ","ŋ":"ŋ"," ":" ","ę":"ę","𝕖":"𝕖","⋕":"⋕","⧣":"⧣","⩱":"⩱","ε":"ε","ε":"ε","ϵ":"ϵ","≖":"≖","≕":"≕","≂":"≂","⪖":"⪖","⪕":"⪕","=":"=","≟":"≟","≡":"≡","⩸":"⩸","⧥":"⧥","≓":"≓","⥱":"⥱","ℯ":"ℯ","≐":"≐","≂":"≂","η":"η","ð":"ð","ð":"ð","ë":"ë","ë":"ë","€":"€","!":"!","∃":"∃","ℰ":"ℰ","ⅇ":"ⅇ","≒":"≒","ф":"ф","♀":"♀","ffi":"ffi","ff":"ff","ffl":"ffl","𝔣":"𝔣","fi":"fi","fj":"fj","♭":"♭","fl":"fl","▱":"▱","ƒ":"ƒ","𝕗":"𝕗","∀":"∀","⋔":"⋔","⫙":"⫙","⨍":"⨍","½":"½","½":"½","⅓":"⅓","¼":"¼","¼":"¼","⅕":"⅕","⅙":"⅙","⅛":"⅛","⅔":"⅔","⅖":"⅖","¾":"¾","¾":"¾","⅗":"⅗","⅜":"⅜","⅘":"⅘","⅚":"⅚","⅝":"⅝","⅞":"⅞","⁄":"⁄","⌢":"⌢","𝒻":"𝒻","≧":"≧","⪌":"⪌","ǵ":"ǵ","γ":"γ","ϝ":"ϝ","⪆":"⪆","ğ":"ğ","ĝ":"ĝ","г":"г","ġ":"ġ","≥":"≥","⋛":"⋛","≥":"≥","≧":"≧","⩾":"⩾","⩾":"⩾","⪩":"⪩","⪀":"⪀","⪂":"⪂","⪄":"⪄","⋛︀":"⋛︀","⪔":"⪔","𝔤":"𝔤","≫":"≫","⋙":"⋙","ℷ":"ℷ","ѓ":"ѓ","≷":"≷","⪒":"⪒","⪥":"⪥","⪤":"⪤","≩":"≩","⪊":"⪊","⪊":"⪊","⪈":"⪈","⪈":"⪈","≩":"≩","⋧":"⋧","𝕘":"𝕘","`":"`","ℊ":"ℊ","≳":"≳","⪎":"⪎","⪐":"⪐",">":">",">":">","⪧":"⪧","⩺":"⩺","⋗":"⋗","⦕":"⦕","⩼":"⩼","⪆":"⪆","⥸":"⥸","⋗":"⋗","⋛":"⋛","⪌":"⪌","≷":"≷","≳":"≳","≩︀":"≩︀","≩︀":"≩︀","⇔":"⇔"," ":" ","½":"½","ℋ":"ℋ","ъ":"ъ","↔":"↔","⥈":"⥈","↭":"↭","ℏ":"ℏ","ĥ":"ĥ","♥":"♥","♥":"♥","…":"…","⊹":"⊹","𝔥":"𝔥","⤥":"⤥","⤦":"⤦","⇿":"⇿","∻":"∻","↩":"↩","↪":"↪","𝕙":"𝕙","―":"―","𝒽":"𝒽","ℏ":"ℏ","ħ":"ħ","⁃":"⁃","‐":"‐","í":"í","í":"í","⁣":"⁣","î":"î","î":"î","и":"и","е":"е","¡":"¡","¡":"¡","⇔":"⇔","𝔦":"𝔦","ì":"ì","ì":"ì","ⅈ":"ⅈ","⨌":"⨌","∭":"∭","⧜":"⧜","℩":"℩","ij":"ij","ī":"ī","ℑ":"ℑ","ℐ":"ℐ","ℑ":"ℑ","ı":"ı","⊷":"⊷","Ƶ":"Ƶ","∈":"∈","℅":"℅","∞":"∞","⧝":"⧝","ı":"ı","∫":"∫","⊺":"⊺","ℤ":"ℤ","⊺":"⊺","⨗":"⨗","⨼":"⨼","ё":"ё","į":"į","𝕚":"𝕚","ι":"ι","⨼":"⨼","¿":"¿","¿":"¿","𝒾":"𝒾","∈":"∈","⋹":"⋹","⋵":"⋵","⋴":"⋴","⋳":"⋳","∈":"∈","⁢":"⁢","ĩ":"ĩ","і":"і","ï":"ï","ï":"ï","ĵ":"ĵ","й":"й","𝔧":"𝔧","ȷ":"ȷ","𝕛":"𝕛","𝒿":"𝒿","ј":"ј","є":"є","κ":"κ","ϰ":"ϰ","ķ":"ķ","к":"к","𝔨":"𝔨","ĸ":"ĸ","х":"х","ќ":"ќ","𝕜":"𝕜","𝓀":"𝓀","⇚":"⇚","⇐":"⇐","⤛":"⤛","⤎":"⤎","≦":"≦","⪋":"⪋","⥢":"⥢","ĺ":"ĺ","⦴":"⦴","ℒ":"ℒ","λ":"λ","⟨":"⟨","⦑":"⦑","⟨":"⟨","⪅":"⪅","«":"«","«":"«","←":"←","⇤":"⇤","⤟":"⤟","⤝":"⤝","↩":"↩","↫":"↫","⤹":"⤹","⥳":"⥳","↢":"↢","⪫":"⪫","⤙":"⤙","⪭":"⪭","⪭︀":"⪭︀","⤌":"⤌","❲":"❲","{":"{","[":"[","⦋":"⦋","⦏":"⦏","⦍":"⦍","ľ":"ľ","ļ":"ļ","⌈":"⌈","{":"{","л":"л","⤶":"⤶","“":"“","„":"„","⥧":"⥧","⥋":"⥋","↲":"↲","≤":"≤","←":"←","↢":"↢","↽":"↽","↼":"↼","⇇":"⇇","↔":"↔","⇆":"⇆","⇋":"⇋","↭":"↭","⋋":"⋋","⋚":"⋚","≤":"≤","≦":"≦","⩽":"⩽","⩽":"⩽","⪨":"⪨","⩿":"⩿","⪁":"⪁","⪃":"⪃","⋚︀":"⋚︀","⪓":"⪓","⪅":"⪅","⋖":"⋖","⋚":"⋚","⪋":"⪋","≶":"≶","≲":"≲","⥼":"⥼","⌊":"⌊","𝔩":"𝔩","≶":"≶","⪑":"⪑","↽":"↽","↼":"↼","⥪":"⥪","▄":"▄","љ":"љ","≪":"≪","⇇":"⇇","⌞":"⌞","⥫":"⥫","◺":"◺","ŀ":"ŀ","⎰":"⎰","⎰":"⎰","≨":"≨","⪉":"⪉","⪉":"⪉","⪇":"⪇","⪇":"⪇","≨":"≨","⋦":"⋦","⟬":"⟬","⇽":"⇽","⟦":"⟦","⟵":"⟵","⟷":"⟷","⟼":"⟼","⟶":"⟶","↫":"↫","↬":"↬","⦅":"⦅","𝕝":"𝕝","⨭":"⨭","⨴":"⨴","∗":"∗","_":"_","◊":"◊","◊":"◊","⧫":"⧫","(":"(","⦓":"⦓","⇆":"⇆","⌟":"⌟","⇋":"⇋","⥭":"⥭","‎":"‎","⊿":"⊿","‹":"‹","𝓁":"𝓁","↰":"↰","≲":"≲","⪍":"⪍","⪏":"⪏","[":"[","‘":"‘","‚":"‚","ł":"ł","<":"<","<":"<","⪦":"⪦","⩹":"⩹","⋖":"⋖","⋋":"⋋","⋉":"⋉","⥶":"⥶","⩻":"⩻","⦖":"⦖","◃":"◃","⊴":"⊴","◂":"◂","⥊":"⥊","⥦":"⥦","≨︀":"≨︀","≨︀":"≨︀","∺":"∺","¯":"¯","¯":"¯","♂":"♂","✠":"✠","✠":"✠","↦":"↦","↦":"↦","↧":"↧","↤":"↤","↥":"↥","▮":"▮","⨩":"⨩","м":"м","—":"—","∡":"∡","𝔪":"𝔪","℧":"℧","µ":"µ","µ":"µ","∣":"∣","*":"*","⫰":"⫰","·":"·","·":"·","−":"−","⊟":"⊟","∸":"∸","⨪":"⨪","⫛":"⫛","…":"…","∓":"∓","⊧":"⊧","𝕞":"𝕞","∓":"∓","𝓂":"𝓂","∾":"∾","μ":"μ","⊸":"⊸","⊸":"⊸","⋙̸":"⋙̸","≫⃒":"≫⃒","≫̸":"≫̸","⇍":"⇍","⇎":"⇎","⋘̸":"⋘̸","≪⃒":"≪⃒","≪̸":"≪̸","⇏":"⇏","⊯":"⊯","⊮":"⊮","∇":"∇","ń":"ń","∠⃒":"∠⃒","≉":"≉","⩰̸":"⩰̸","≋̸":"≋̸","ʼn":"ʼn","≉":"≉","♮":"♮","♮":"♮","ℕ":"ℕ"," ":" "," ":" ","≎̸":"≎̸","≏̸":"≏̸","⩃":"⩃","ň":"ň","ņ":"ņ","≇":"≇","⩭̸":"⩭̸","⩂":"⩂","н":"н","–":"–","≠":"≠","⇗":"⇗","⤤":"⤤","↗":"↗","↗":"↗","≐̸":"≐̸","≢":"≢","⤨":"⤨","≂̸":"≂̸","∄":"∄","∄":"∄","𝔫":"𝔫","≧̸":"≧̸","≱":"≱","≱":"≱","≧̸":"≧̸","⩾̸":"⩾̸","⩾̸":"⩾̸","≵":"≵","≯":"≯","≯":"≯","⇎":"⇎","↮":"↮","⫲":"⫲","∋":"∋","⋼":"⋼","⋺":"⋺","∋":"∋","њ":"њ","⇍":"⇍","≦̸":"≦̸","↚":"↚","‥":"‥","≰":"≰","↚":"↚","↮":"↮","≰":"≰","≦̸":"≦̸","⩽̸":"⩽̸","⩽̸":"⩽̸","≮":"≮","≴":"≴","≮":"≮","⋪":"⋪","⋬":"⋬","∤":"∤","𝕟":"𝕟","¬":"¬","¬":"¬","∉":"∉","⋹̸":"⋹̸","⋵̸":"⋵̸","∉":"∉","⋷":"⋷","⋶":"⋶","∌":"∌","∌":"∌","⋾":"⋾","⋽":"⋽","∦":"∦","∦":"∦","⫽⃥":"⫽⃥","∂̸":"∂̸","⨔":"⨔","⊀":"⊀","⋠":"⋠","⪯̸":"⪯̸","⊀":"⊀","⪯̸":"⪯̸","⇏":"⇏","↛":"↛","⤳̸":"⤳̸","↝̸":"↝̸","↛":"↛","⋫":"⋫","⋭":"⋭","⊁":"⊁","⋡":"⋡","⪰̸":"⪰̸","𝓃":"𝓃","∤":"∤","∦":"∦","≁":"≁","≄":"≄","≄":"≄","∤":"∤","∦":"∦","⋢":"⋢","⋣":"⋣","⊄":"⊄","⫅̸":"⫅̸","⊈":"⊈","⊂⃒":"⊂⃒","⊈":"⊈","⫅̸":"⫅̸","⊁":"⊁","⪰̸":"⪰̸","⊅":"⊅","⫆̸":"⫆̸","⊉":"⊉","⊃⃒":"⊃⃒","⊉":"⊉","⫆̸":"⫆̸","≹":"≹","ñ":"ñ","ñ":"ñ","≸":"≸","⋪":"⋪","⋬":"⋬","⋫":"⋫","⋭":"⋭","ν":"ν","#":"#","№":"№"," ":" ","⊭":"⊭","⤄":"⤄","≍⃒":"≍⃒","⊬":"⊬","≥⃒":"≥⃒",">⃒":">⃒","⧞":"⧞","⤂":"⤂","≤⃒":"≤⃒","<⃒":"<⃒","⊴⃒":"⊴⃒","⤃":"⤃","⊵⃒":"⊵⃒","∼⃒":"∼⃒","⇖":"⇖","⤣":"⤣","↖":"↖","↖":"↖","⤧":"⤧","Ⓢ":"Ⓢ","ó":"ó","ó":"ó","⊛":"⊛","⊚":"⊚","ô":"ô","ô":"ô","о":"о","⊝":"⊝","ő":"ő","⨸":"⨸","⊙":"⊙","⦼":"⦼","œ":"œ","⦿":"⦿","𝔬":"𝔬","˛":"˛","ò":"ò","ò":"ò","⧁":"⧁","⦵":"⦵","Ω":"Ω","∮":"∮","↺":"↺","⦾":"⦾","⦻":"⦻","‾":"‾","⧀":"⧀","ō":"ō","ω":"ω","ο":"ο","⦶":"⦶","⊖":"⊖","𝕠":"𝕠","⦷":"⦷","⦹":"⦹","⊕":"⊕","∨":"∨","↻":"↻","⩝":"⩝","ℴ":"ℴ","ℴ":"ℴ","ª":"ª","ª":"ª","º":"º","º":"º","⊶":"⊶","⩖":"⩖","⩗":"⩗","⩛":"⩛","ℴ":"ℴ","ø":"ø","ø":"ø","⊘":"⊘","õ":"õ","õ":"õ","⊗":"⊗","⨶":"⨶","ö":"ö","ö":"ö","⌽":"⌽","∥":"∥","¶":"¶","¶":"¶","∥":"∥","⫳":"⫳","⫽":"⫽","∂":"∂","п":"п","%":"%",".":".","‰":"‰","⊥":"⊥","‱":"‱","𝔭":"𝔭","φ":"φ","ϕ":"ϕ","ℳ":"ℳ","☎":"☎","π":"π","⋔":"⋔","ϖ":"ϖ","ℏ":"ℏ","ℎ":"ℎ","ℏ":"ℏ","+":"+","⨣":"⨣","⊞":"⊞","⨢":"⨢","∔":"∔","⨥":"⨥","⩲":"⩲","±":"±","±":"±","⨦":"⨦","⨧":"⨧","±":"±","⨕":"⨕","𝕡":"𝕡","£":"£","£":"£","≺":"≺","⪳":"⪳","⪷":"⪷","≼":"≼","⪯":"⪯","≺":"≺","⪷":"⪷","≼":"≼","⪯":"⪯","⪹":"⪹","⪵":"⪵","⋨":"⋨","≾":"≾","′":"′","ℙ":"ℙ","⪵":"⪵","⪹":"⪹","⋨":"⋨","∏":"∏","⌮":"⌮","⌒":"⌒","⌓":"⌓","∝":"∝","∝":"∝","≾":"≾","⊰":"⊰","𝓅":"𝓅","ψ":"ψ"," ":" ","𝔮":"𝔮","⨌":"⨌","𝕢":"𝕢","⁗":"⁗","𝓆":"𝓆","ℍ":"ℍ","⨖":"⨖","?":"?","≟":"≟",""":'"',""":'"',"⇛":"⇛","⇒":"⇒","⤜":"⤜","⤏":"⤏","⥤":"⥤","∽̱":"∽̱","ŕ":"ŕ","√":"√","⦳":"⦳","⟩":"⟩","⦒":"⦒","⦥":"⦥","⟩":"⟩","»":"»","»":"»","→":"→","⥵":"⥵","⇥":"⇥","⤠":"⤠","⤳":"⤳","⤞":"⤞","↪":"↪","↬":"↬","⥅":"⥅","⥴":"⥴","↣":"↣","↝":"↝","⤚":"⤚","∶":"∶","ℚ":"ℚ","⤍":"⤍","❳":"❳","}":"}","]":"]","⦌":"⦌","⦎":"⦎","⦐":"⦐","ř":"ř","ŗ":"ŗ","⌉":"⌉","}":"}","р":"р","⤷":"⤷","⥩":"⥩","”":"”","”":"”","↳":"↳","ℜ":"ℜ","ℛ":"ℛ","ℜ":"ℜ","ℝ":"ℝ","▭":"▭","®":"®","®":"®","⥽":"⥽","⌋":"⌋","𝔯":"𝔯","⇁":"⇁","⇀":"⇀","⥬":"⥬","ρ":"ρ","ϱ":"ϱ","→":"→","↣":"↣","⇁":"⇁","⇀":"⇀","⇄":"⇄","⇌":"⇌","⇉":"⇉","↝":"↝","⋌":"⋌","˚":"˚","≓":"≓","⇄":"⇄","⇌":"⇌","‏":"‏","⎱":"⎱","⎱":"⎱","⫮":"⫮","⟭":"⟭","⇾":"⇾","⟧":"⟧","⦆":"⦆","𝕣":"𝕣","⨮":"⨮","⨵":"⨵",")":")","⦔":"⦔","⨒":"⨒","⇉":"⇉","›":"›","𝓇":"𝓇","↱":"↱","]":"]","’":"’","’":"’","⋌":"⋌","⋊":"⋊","▹":"▹","⊵":"⊵","▸":"▸","⧎":"⧎","⥨":"⥨","℞":"℞","ś":"ś","‚":"‚","≻":"≻","⪴":"⪴","⪸":"⪸","š":"š","≽":"≽","⪰":"⪰","ş":"ş","ŝ":"ŝ","⪶":"⪶","⪺":"⪺","⋩":"⋩","⨓":"⨓","≿":"≿","с":"с","⋅":"⋅","⊡":"⊡","⩦":"⩦","⇘":"⇘","⤥":"⤥","↘":"↘","↘":"↘","§":"§","§":"§",";":";","⤩":"⤩","∖":"∖","∖":"∖","✶":"✶","𝔰":"𝔰","⌢":"⌢","♯":"♯","щ":"щ","ш":"ш","∣":"∣","∥":"∥","­":"­","­":"­","σ":"σ","ς":"ς","ς":"ς","∼":"∼","⩪":"⩪","≃":"≃","≃":"≃","⪞":"⪞","⪠":"⪠","⪝":"⪝","⪟":"⪟","≆":"≆","⨤":"⨤","⥲":"⥲","←":"←","∖":"∖","⨳":"⨳","⧤":"⧤","∣":"∣","⌣":"⌣","⪪":"⪪","⪬":"⪬","⪬︀":"⪬︀","ь":"ь","/":"/","⧄":"⧄","⌿":"⌿","𝕤":"𝕤","♠":"♠","♠":"♠","∥":"∥","⊓":"⊓","⊓︀":"⊓︀","⊔":"⊔","⊔︀":"⊔︀","⊏":"⊏","⊑":"⊑","⊏":"⊏","⊑":"⊑","⊐":"⊐","⊒":"⊒","⊐":"⊐","⊒":"⊒","□":"□","□":"□","▪":"▪","▪":"▪","→":"→","𝓈":"𝓈","∖":"∖","⌣":"⌣","⋆":"⋆","☆":"☆","★":"★","ϵ":"ϵ","ϕ":"ϕ","¯":"¯","⊂":"⊂","⫅":"⫅","⪽":"⪽","⊆":"⊆","⫃":"⫃","⫁":"⫁","⫋":"⫋","⊊":"⊊","⪿":"⪿","⥹":"⥹","⊂":"⊂","⊆":"⊆","⫅":"⫅","⊊":"⊊","⫋":"⫋","⫇":"⫇","⫕":"⫕","⫓":"⫓","≻":"≻","⪸":"⪸","≽":"≽","⪰":"⪰","⪺":"⪺","⪶":"⪶","⋩":"⋩","≿":"≿","∑":"∑","♪":"♪","¹":"¹","¹":"¹","²":"²","²":"²","³":"³","³":"³","⊃":"⊃","⫆":"⫆","⪾":"⪾","⫘":"⫘","⊇":"⊇","⫄":"⫄","⟉":"⟉","⫗":"⫗","⥻":"⥻","⫂":"⫂","⫌":"⫌","⊋":"⊋","⫀":"⫀","⊃":"⊃","⊇":"⊇","⫆":"⫆","⊋":"⊋","⫌":"⫌","⫈":"⫈","⫔":"⫔","⫖":"⫖","⇙":"⇙","⤦":"⤦","↙":"↙","↙":"↙","⤪":"⤪","ß":"ß","ß":"ß","⌖":"⌖","τ":"τ","⎴":"⎴","ť":"ť","ţ":"ţ","т":"т","⃛":"⃛","⌕":"⌕","𝔱":"𝔱","∴":"∴","∴":"∴","θ":"θ","ϑ":"ϑ","ϑ":"ϑ","≈":"≈","∼":"∼"," ":" ","≈":"≈","∼":"∼","þ":"þ","þ":"þ","˜":"˜","×":"×","×":"×","⊠":"⊠","⨱":"⨱","⨰":"⨰","∭":"∭","⤨":"⤨","⊤":"⊤","⌶":"⌶","⫱":"⫱","𝕥":"𝕥","⫚":"⫚","⤩":"⤩","‴":"‴","™":"™","▵":"▵","▿":"▿","◃":"◃","⊴":"⊴","≜":"≜","▹":"▹","⊵":"⊵","◬":"◬","≜":"≜","⨺":"⨺","⨹":"⨹","⧍":"⧍","⨻":"⨻","⏢":"⏢","𝓉":"𝓉","ц":"ц","ћ":"ћ","ŧ":"ŧ","≬":"≬","↞":"↞","↠":"↠","⇑":"⇑","⥣":"⥣","ú":"ú","ú":"ú","↑":"↑","ў":"ў","ŭ":"ŭ","û":"û","û":"û","у":"у","⇅":"⇅","ű":"ű","⥮":"⥮","⥾":"⥾","𝔲":"𝔲","ù":"ù","ù":"ù","↿":"↿","↾":"↾","▀":"▀","⌜":"⌜","⌜":"⌜","⌏":"⌏","◸":"◸","ū":"ū","¨":"¨","¨":"¨","ų":"ų","𝕦":"𝕦","↑":"↑","↕":"↕","↿":"↿","↾":"↾","⊎":"⊎","υ":"υ","ϒ":"ϒ","υ":"υ","⇈":"⇈","⌝":"⌝","⌝":"⌝","⌎":"⌎","ů":"ů","◹":"◹","𝓊":"𝓊","⋰":"⋰","ũ":"ũ","▵":"▵","▴":"▴","⇈":"⇈","ü":"ü","ü":"ü","⦧":"⦧","⇕":"⇕","⫨":"⫨","⫩":"⫩","⊨":"⊨","⦜":"⦜","ϵ":"ϵ","ϰ":"ϰ","∅":"∅","ϕ":"ϕ","ϖ":"ϖ","∝":"∝","↕":"↕","ϱ":"ϱ","ς":"ς","⊊︀":"⊊︀","⫋︀":"⫋︀","⊋︀":"⊋︀","⫌︀":"⫌︀","ϑ":"ϑ","⊲":"⊲","⊳":"⊳","в":"в","⊢":"⊢","∨":"∨","⊻":"⊻","≚":"≚","⋮":"⋮","|":"|","|":"|","𝔳":"𝔳","⊲":"⊲","⊂⃒":"⊂⃒","⊃⃒":"⊃⃒","𝕧":"𝕧","∝":"∝","⊳":"⊳","𝓋":"𝓋","⫋︀":"⫋︀","⊊︀":"⊊︀","⫌︀":"⫌︀","⊋︀":"⊋︀","⦚":"⦚","ŵ":"ŵ","⩟":"⩟","∧":"∧","≙":"≙","℘":"℘","𝔴":"𝔴","𝕨":"𝕨","℘":"℘","≀":"≀","≀":"≀","𝓌":"𝓌","⋂":"⋂","◯":"◯","⋃":"⋃","▽":"▽","𝔵":"𝔵","⟺":"⟺","⟷":"⟷","ξ":"ξ","⟸":"⟸","⟵":"⟵","⟼":"⟼","⋻":"⋻","⨀":"⨀","𝕩":"𝕩","⨁":"⨁","⨂":"⨂","⟹":"⟹","⟶":"⟶","𝓍":"𝓍","⨆":"⨆","⨄":"⨄","△":"△","⋁":"⋁","⋀":"⋀","ý":"ý","ý":"ý","я":"я","ŷ":"ŷ","ы":"ы","¥":"¥","¥":"¥","𝔶":"𝔶","ї":"ї","𝕪":"𝕪","𝓎":"𝓎","ю":"ю","ÿ":"ÿ","ÿ":"ÿ","ź":"ź","ž":"ž","з":"з","ż":"ż","ℨ":"ℨ","ζ":"ζ","𝔷":"𝔷","ж":"ж","⇝":"⇝","𝕫":"𝕫","𝓏":"𝓏","‍":"‍","‌":"‌"},characters:{"Æ":"Æ","&":"&","Á":"Á","Ă":"Ă","Â":"Â","А":"А","𝔄":"𝔄","À":"À","Α":"Α","Ā":"Ā","⩓":"⩓","Ą":"Ą","𝔸":"𝔸","⁡":"⁡","Å":"Å","𝒜":"𝒜","≔":"≔","Ã":"Ã","Ä":"Ä","∖":"∖","⫧":"⫧","⌆":"⌆","Б":"Б","∵":"∵","ℬ":"ℬ","Β":"Β","𝔅":"𝔅","𝔹":"𝔹","˘":"˘","≎":"≎","Ч":"Ч","©":"©","Ć":"Ć","⋒":"⋒","ⅅ":"ⅅ","ℭ":"ℭ","Č":"Č","Ç":"Ç","Ĉ":"Ĉ","∰":"∰","Ċ":"Ċ","¸":"¸","·":"·","Χ":"Χ","⊙":"⊙","⊖":"⊖","⊕":"⊕","⊗":"⊗","∲":"∲","”":"”","’":"’","∷":"∷","⩴":"⩴","≡":"≡","∯":"∯","∮":"∮","ℂ":"ℂ","∐":"∐","∳":"∳","⨯":"⨯","𝒞":"𝒞","⋓":"⋓","≍":"≍","⤑":"⤑","Ђ":"Ђ","Ѕ":"Ѕ","Џ":"Џ","‡":"‡","↡":"↡","⫤":"⫤","Ď":"Ď","Д":"Д","∇":"∇","Δ":"Δ","𝔇":"𝔇","´":"´","˙":"˙","˝":"˝","`":"`","˜":"˜","⋄":"⋄","ⅆ":"ⅆ","𝔻":"𝔻","¨":"¨","⃜":"⃜","≐":"≐","⇓":"⇓","⇐":"⇐","⇔":"⇔","⟸":"⟸","⟺":"⟺","⟹":"⟹","⇒":"⇒","⊨":"⊨","⇑":"⇑","⇕":"⇕","∥":"∥","↓":"↓","⤓":"⤓","⇵":"⇵","̑":"̑","⥐":"⥐","⥞":"⥞","↽":"↽","⥖":"⥖","⥟":"⥟","⇁":"⇁","⥗":"⥗","⊤":"⊤","↧":"↧","𝒟":"𝒟","Đ":"Đ","Ŋ":"Ŋ","Ð":"Ð","É":"É","Ě":"Ě","Ê":"Ê","Э":"Э","Ė":"Ė","𝔈":"𝔈","È":"È","∈":"∈","Ē":"Ē","◻":"◻","▫":"▫","Ę":"Ę","𝔼":"𝔼","Ε":"Ε","⩵":"⩵","≂":"≂","⇌":"⇌","ℰ":"ℰ","⩳":"⩳","Η":"Η","Ë":"Ë","∃":"∃","ⅇ":"ⅇ","Ф":"Ф","𝔉":"𝔉","◼":"◼","▪":"▪","𝔽":"𝔽","∀":"∀","ℱ":"ℱ","Ѓ":"Ѓ",">":">","Γ":"Γ","Ϝ":"Ϝ","Ğ":"Ğ","Ģ":"Ģ","Ĝ":"Ĝ","Г":"Г","Ġ":"Ġ","𝔊":"𝔊","⋙":"⋙","𝔾":"𝔾","≥":"≥","⋛":"⋛","≧":"≧","⪢":"⪢","≷":"≷","⩾":"⩾","≳":"≳","𝒢":"𝒢","≫":"≫","Ъ":"Ъ","ˇ":"ˇ","^":"^","Ĥ":"Ĥ","ℌ":"ℌ","ℋ":"ℋ","ℍ":"ℍ","─":"─","Ħ":"Ħ","≏":"≏","Е":"Е","IJ":"IJ","Ё":"Ё","Í":"Í","Î":"Î","И":"И","İ":"İ","ℑ":"ℑ","Ì":"Ì","Ī":"Ī","ⅈ":"ⅈ","∬":"∬","∫":"∫","⋂":"⋂","⁣":"⁣","⁢":"⁢","Į":"Į","𝕀":"𝕀","Ι":"Ι","ℐ":"ℐ","Ĩ":"Ĩ","І":"І","Ï":"Ï","Ĵ":"Ĵ","Й":"Й","𝔍":"𝔍","𝕁":"𝕁","𝒥":"𝒥","Ј":"Ј","Є":"Є","Х":"Х","Ќ":"Ќ","Κ":"Κ","Ķ":"Ķ","К":"К","𝔎":"𝔎","𝕂":"𝕂","𝒦":"𝒦","Љ":"Љ","<":"<","Ĺ":"Ĺ","Λ":"Λ","⟪":"⟪","ℒ":"ℒ","↞":"↞","Ľ":"Ľ","Ļ":"Ļ","Л":"Л","⟨":"⟨","←":"←","⇤":"⇤","⇆":"⇆","⌈":"⌈","⟦":"⟦","⥡":"⥡","⇃":"⇃","⥙":"⥙","⌊":"⌊","↔":"↔","⥎":"⥎","⊣":"⊣","↤":"↤","⥚":"⥚","⊲":"⊲","⧏":"⧏","⊴":"⊴","⥑":"⥑","⥠":"⥠","↿":"↿","⥘":"⥘","↼":"↼","⥒":"⥒","⋚":"⋚","≦":"≦","≶":"≶","⪡":"⪡","⩽":"⩽","≲":"≲","𝔏":"𝔏","⋘":"⋘","⇚":"⇚","Ŀ":"Ŀ","⟵":"⟵","⟷":"⟷","⟶":"⟶","𝕃":"𝕃","↙":"↙","↘":"↘","↰":"↰","Ł":"Ł","≪":"≪","⤅":"⤅","М":"М"," ":" ","ℳ":"ℳ","𝔐":"𝔐","∓":"∓","𝕄":"𝕄","Μ":"Μ","Њ":"Њ","Ń":"Ń","Ň":"Ň","Ņ":"Ņ","Н":"Н","​":"​","\n":" ","𝔑":"𝔑","⁠":"⁠"," ":" ","ℕ":"ℕ","⫬":"⫬","≢":"≢","≭":"≭","∦":"∦","∉":"∉","≠":"≠","≂̸":"≂̸","∄":"∄","≯":"≯","≱":"≱","≧̸":"≧̸","≫̸":"≫̸","≹":"≹","⩾̸":"⩾̸","≵":"≵","≎̸":"≎̸","≏̸":"≏̸","⋪":"⋪","⧏̸":"⧏̸","⋬":"⋬","≮":"≮","≰":"≰","≸":"≸","≪̸":"≪̸","⩽̸":"⩽̸","≴":"≴","⪢̸":"⪢̸","⪡̸":"⪡̸","⊀":"⊀","⪯̸":"⪯̸","⋠":"⋠","∌":"∌","⋫":"⋫","⧐̸":"⧐̸","⋭":"⋭","⊏̸":"⊏̸","⋢":"⋢","⊐̸":"⊐̸","⋣":"⋣","⊂⃒":"⊂⃒","⊈":"⊈","⊁":"⊁","⪰̸":"⪰̸","⋡":"⋡","≿̸":"≿̸","⊃⃒":"⊃⃒","⊉":"⊉","≁":"≁","≄":"≄","≇":"≇","≉":"≉","∤":"∤","𝒩":"𝒩","Ñ":"Ñ","Ν":"Ν","Œ":"Œ","Ó":"Ó","Ô":"Ô","О":"О","Ő":"Ő","𝔒":"𝔒","Ò":"Ò","Ō":"Ō","Ω":"Ω","Ο":"Ο","𝕆":"𝕆","“":"“","‘":"‘","⩔":"⩔","𝒪":"𝒪","Ø":"Ø","Õ":"Õ","⨷":"⨷","Ö":"Ö","‾":"‾","⏞":"⏞","⎴":"⎴","⏜":"⏜","∂":"∂","П":"П","𝔓":"𝔓","Φ":"Φ","Π":"Π","±":"±","ℙ":"ℙ","⪻":"⪻","≺":"≺","⪯":"⪯","≼":"≼","≾":"≾","″":"″","∏":"∏","∝":"∝","𝒫":"𝒫","Ψ":"Ψ",'"':""","𝔔":"𝔔","ℚ":"ℚ","𝒬":"𝒬","⤐":"⤐","®":"®","Ŕ":"Ŕ","⟫":"⟫","↠":"↠","⤖":"⤖","Ř":"Ř","Ŗ":"Ŗ","Р":"Р","ℜ":"ℜ","∋":"∋","⇋":"⇋","⥯":"⥯","Ρ":"Ρ","⟩":"⟩","→":"→","⇥":"⇥","⇄":"⇄","⌉":"⌉","⟧":"⟧","⥝":"⥝","⇂":"⇂","⥕":"⥕","⌋":"⌋","⊢":"⊢","↦":"↦","⥛":"⥛","⊳":"⊳","⧐":"⧐","⊵":"⊵","⥏":"⥏","⥜":"⥜","↾":"↾","⥔":"⥔","⇀":"⇀","⥓":"⥓","ℝ":"ℝ","⥰":"⥰","⇛":"⇛","ℛ":"ℛ","↱":"↱","⧴":"⧴","Щ":"Щ","Ш":"Ш","Ь":"Ь","Ś":"Ś","⪼":"⪼","Š":"Š","Ş":"Ş","Ŝ":"Ŝ","С":"С","𝔖":"𝔖","↑":"↑","Σ":"Σ","∘":"∘","𝕊":"𝕊","√":"√","□":"□","⊓":"⊓","⊏":"⊏","⊑":"⊑","⊐":"⊐","⊒":"⊒","⊔":"⊔","𝒮":"𝒮","⋆":"⋆","⋐":"⋐","⊆":"⊆","≻":"≻","⪰":"⪰","≽":"≽","≿":"≿","∑":"∑","⋑":"⋑","⊃":"⊃","⊇":"⊇","Þ":"Þ","™":"™","Ћ":"Ћ","Ц":"Ц","\t":" ","Τ":"Τ","Ť":"Ť","Ţ":"Ţ","Т":"Т","𝔗":"𝔗","∴":"∴","Θ":"Θ","  ":"  "," ":" ","∼":"∼","≃":"≃","≅":"≅","≈":"≈","𝕋":"𝕋","⃛":"⃛","𝒯":"𝒯","Ŧ":"Ŧ","Ú":"Ú","↟":"↟","⥉":"⥉","Ў":"Ў","Ŭ":"Ŭ","Û":"Û","У":"У","Ű":"Ű","𝔘":"𝔘","Ù":"Ù","Ū":"Ū",_:"_","⏟":"⏟","⎵":"⎵","⏝":"⏝","⋃":"⋃","⊎":"⊎","Ų":"Ų","𝕌":"𝕌","⤒":"⤒","⇅":"⇅","↕":"↕","⥮":"⥮","⊥":"⊥","↥":"↥","↖":"↖","↗":"↗","ϒ":"ϒ","Υ":"Υ","Ů":"Ů","𝒰":"𝒰","Ũ":"Ũ","Ü":"Ü","⊫":"⊫","⫫":"⫫","В":"В","⊩":"⊩","⫦":"⫦","⋁":"⋁","‖":"‖","∣":"∣","|":"|","❘":"❘","≀":"≀"," ":" ","𝔙":"𝔙","𝕍":"𝕍","𝒱":"𝒱","⊪":"⊪","Ŵ":"Ŵ","⋀":"⋀","𝔚":"𝔚","𝕎":"𝕎","𝒲":"𝒲","𝔛":"𝔛","Ξ":"Ξ","𝕏":"𝕏","𝒳":"𝒳","Я":"Я","Ї":"Ї","Ю":"Ю","Ý":"Ý","Ŷ":"Ŷ","Ы":"Ы","𝔜":"𝔜","𝕐":"𝕐","𝒴":"𝒴","Ÿ":"Ÿ","Ж":"Ж","Ź":"Ź","Ž":"Ž","З":"З","Ż":"Ż","Ζ":"Ζ","ℨ":"ℨ","ℤ":"ℤ","𝒵":"𝒵","á":"á","ă":"ă","∾":"∾","∾̳":"∾̳","∿":"∿","â":"â","а":"а","æ":"æ","𝔞":"𝔞","à":"à","ℵ":"ℵ","α":"α","ā":"ā","⨿":"⨿","∧":"∧","⩕":"⩕","⩜":"⩜","⩘":"⩘","⩚":"⩚","∠":"∠","⦤":"⦤","∡":"∡","⦨":"⦨","⦩":"⦩","⦪":"⦪","⦫":"⦫","⦬":"⦬","⦭":"⦭","⦮":"⦮","⦯":"⦯","∟":"∟","⊾":"⊾","⦝":"⦝","∢":"∢","⍼":"⍼","ą":"ą","𝕒":"𝕒","⩰":"⩰","⩯":"⩯","≊":"≊","≋":"≋","'":"'","å":"å","𝒶":"𝒶","*":"*","ã":"ã","ä":"ä","⨑":"⨑","⫭":"⫭","≌":"≌","϶":"϶","‵":"‵","∽":"∽","⋍":"⋍","⊽":"⊽","⌅":"⌅","⎶":"⎶","б":"б","„":"„","⦰":"⦰","β":"β","ℶ":"ℶ","≬":"≬","𝔟":"𝔟","◯":"◯","⨀":"⨀","⨁":"⨁","⨂":"⨂","⨆":"⨆","★":"★","▽":"▽","△":"△","⨄":"⨄","⤍":"⤍","⧫":"⧫","▴":"▴","▾":"▾","◂":"◂","▸":"▸","␣":"␣","▒":"▒","░":"░","▓":"▓","█":"█","=⃥":"=⃥","≡⃥":"≡⃥","⌐":"⌐","𝕓":"𝕓","⋈":"⋈","╗":"╗","╔":"╔","╖":"╖","╓":"╓","═":"═","╦":"╦","╩":"╩","╤":"╤","╧":"╧","╝":"╝","╚":"╚","╜":"╜","╙":"╙","║":"║","╬":"╬","╣":"╣","╠":"╠","╫":"╫","╢":"╢","╟":"╟","⧉":"⧉","╕":"╕","╒":"╒","┐":"┐","┌":"┌","╥":"╥","╨":"╨","┬":"┬","┴":"┴","⊟":"⊟","⊞":"⊞","⊠":"⊠","╛":"╛","╘":"╘","┘":"┘","└":"└","│":"│","╪":"╪","╡":"╡","╞":"╞","┼":"┼","┤":"┤","├":"├","¦":"¦","𝒷":"𝒷","⁏":"⁏","\\":"\","⧅":"⧅","⟈":"⟈","•":"•","⪮":"⪮","ć":"ć","∩":"∩","⩄":"⩄","⩉":"⩉","⩋":"⩋","⩇":"⩇","⩀":"⩀","∩︀":"∩︀","⁁":"⁁","⩍":"⩍","č":"č","ç":"ç","ĉ":"ĉ","⩌":"⩌","⩐":"⩐","ċ":"ċ","⦲":"⦲","¢":"¢","𝔠":"𝔠","ч":"ч","✓":"✓","χ":"χ","○":"○","⧃":"⧃","ˆ":"ˆ","≗":"≗","↺":"↺","↻":"↻","Ⓢ":"Ⓢ","⊛":"⊛","⊚":"⊚","⊝":"⊝","⨐":"⨐","⫯":"⫯","⧂":"⧂","♣":"♣",":":":",",":",","@":"@","∁":"∁","⩭":"⩭","𝕔":"𝕔","℗":"℗","↵":"↵","✗":"✗","𝒸":"𝒸","⫏":"⫏","⫑":"⫑","⫐":"⫐","⫒":"⫒","⋯":"⋯","⤸":"⤸","⤵":"⤵","⋞":"⋞","⋟":"⋟","↶":"↶","⤽":"⤽","∪":"∪","⩈":"⩈","⩆":"⩆","⩊":"⩊","⊍":"⊍","⩅":"⩅","∪︀":"∪︀","↷":"↷","⤼":"⤼","⋎":"⋎","⋏":"⋏","¤":"¤","∱":"∱","⌭":"⌭","⥥":"⥥","†":"†","ℸ":"ℸ","‐":"‐","⤏":"⤏","ď":"ď","д":"д","⇊":"⇊","⩷":"⩷","°":"°","δ":"δ","⦱":"⦱","⥿":"⥿","𝔡":"𝔡","♦":"♦","ϝ":"ϝ","⋲":"⋲","÷":"÷","⋇":"⋇","ђ":"ђ","⌞":"⌞","⌍":"⌍",$:"$","𝕕":"𝕕","≑":"≑","∸":"∸","∔":"∔","⊡":"⊡","⌟":"⌟","⌌":"⌌","𝒹":"𝒹","ѕ":"ѕ","⧶":"⧶","đ":"đ","⋱":"⋱","▿":"▿","⦦":"⦦","џ":"џ","⟿":"⟿","é":"é","⩮":"⩮","ě":"ě","≖":"≖","ê":"ê","≕":"≕","э":"э","ė":"ė","≒":"≒","𝔢":"𝔢","⪚":"⪚","è":"è","⪖":"⪖","⪘":"⪘","⪙":"⪙","⏧":"⏧","ℓ":"ℓ","⪕":"⪕","⪗":"⪗","ē":"ē","∅":"∅"," ":" "," ":" "," ":" ","ŋ":"ŋ"," ":" ","ę":"ę","𝕖":"𝕖","⋕":"⋕","⧣":"⧣","⩱":"⩱","ε":"ε","ϵ":"ϵ","=":"=","≟":"≟","⩸":"⩸","⧥":"⧥","≓":"≓","⥱":"⥱","ℯ":"ℯ","η":"η","ð":"ð","ë":"ë","€":"€","!":"!","ф":"ф","♀":"♀","ffi":"ffi","ff":"ff","ffl":"ffl","𝔣":"𝔣","fi":"fi",fj:"fj","♭":"♭","fl":"fl","▱":"▱","ƒ":"ƒ","𝕗":"𝕗","⋔":"⋔","⫙":"⫙","⨍":"⨍","½":"½","⅓":"⅓","¼":"¼","⅕":"⅕","⅙":"⅙","⅛":"⅛","⅔":"⅔","⅖":"⅖","¾":"¾","⅗":"⅗","⅜":"⅜","⅘":"⅘","⅚":"⅚","⅝":"⅝","⅞":"⅞","⁄":"⁄","⌢":"⌢","𝒻":"𝒻","⪌":"⪌","ǵ":"ǵ","γ":"γ","⪆":"⪆","ğ":"ğ","ĝ":"ĝ","г":"г","ġ":"ġ","⪩":"⪩","⪀":"⪀","⪂":"⪂","⪄":"⪄","⋛︀":"⋛︀","⪔":"⪔","𝔤":"𝔤","ℷ":"ℷ","ѓ":"ѓ","⪒":"⪒","⪥":"⪥","⪤":"⪤","≩":"≩","⪊":"⪊","⪈":"⪈","⋧":"⋧","𝕘":"𝕘","ℊ":"ℊ","⪎":"⪎","⪐":"⪐","⪧":"⪧","⩺":"⩺","⋗":"⋗","⦕":"⦕","⩼":"⩼","⥸":"⥸","≩︀":"≩︀","ъ":"ъ","⥈":"⥈","↭":"↭","ℏ":"ℏ","ĥ":"ĥ","♥":"♥","…":"…","⊹":"⊹","𝔥":"𝔥","⤥":"⤥","⤦":"⤦","⇿":"⇿","∻":"∻","↩":"↩","↪":"↪","𝕙":"𝕙","―":"―","𝒽":"𝒽","ħ":"ħ","⁃":"⁃","í":"í","î":"î","и":"и","е":"е","¡":"¡","𝔦":"𝔦","ì":"ì","⨌":"⨌","∭":"∭","⧜":"⧜","℩":"℩","ij":"ij","ī":"ī","ı":"ı","⊷":"⊷","Ƶ":"Ƶ","℅":"℅","∞":"∞","⧝":"⧝","⊺":"⊺","⨗":"⨗","⨼":"⨼","ё":"ё","į":"į","𝕚":"𝕚","ι":"ι","¿":"¿","𝒾":"𝒾","⋹":"⋹","⋵":"⋵","⋴":"⋴","⋳":"⋳","ĩ":"ĩ","і":"і","ï":"ï","ĵ":"ĵ","й":"й","𝔧":"𝔧","ȷ":"ȷ","𝕛":"𝕛","𝒿":"𝒿","ј":"ј","є":"є","κ":"κ","ϰ":"ϰ","ķ":"ķ","к":"к","𝔨":"𝔨","ĸ":"ĸ","х":"х","ќ":"ќ","𝕜":"𝕜","𝓀":"𝓀","⤛":"⤛","⤎":"⤎","⪋":"⪋","⥢":"⥢","ĺ":"ĺ","⦴":"⦴","λ":"λ","⦑":"⦑","⪅":"⪅","«":"«","⤟":"⤟","⤝":"⤝","↫":"↫","⤹":"⤹","⥳":"⥳","↢":"↢","⪫":"⪫","⤙":"⤙","⪭":"⪭","⪭︀":"⪭︀","⤌":"⤌","❲":"❲","{":"{","[":"[","⦋":"⦋","⦏":"⦏","⦍":"⦍","ľ":"ľ","ļ":"ļ","л":"л","⤶":"⤶","⥧":"⥧","⥋":"⥋","↲":"↲","≤":"≤","⇇":"⇇","⋋":"⋋","⪨":"⪨","⩿":"⩿","⪁":"⪁","⪃":"⪃","⋚︀":"⋚︀","⪓":"⪓","⋖":"⋖","⥼":"⥼","𝔩":"𝔩","⪑":"⪑","⥪":"⥪","▄":"▄","љ":"љ","⥫":"⥫","◺":"◺","ŀ":"ŀ","⎰":"⎰","≨":"≨","⪉":"⪉","⪇":"⪇","⋦":"⋦","⟬":"⟬","⇽":"⇽","⟼":"⟼","↬":"↬","⦅":"⦅","𝕝":"𝕝","⨭":"⨭","⨴":"⨴","∗":"∗","◊":"◊","(":"(","⦓":"⦓","⥭":"⥭","‎":"‎","⊿":"⊿","‹":"‹","𝓁":"𝓁","⪍":"⪍","⪏":"⪏","‚":"‚","ł":"ł","⪦":"⪦","⩹":"⩹","⋉":"⋉","⥶":"⥶","⩻":"⩻","⦖":"⦖","◃":"◃","⥊":"⥊","⥦":"⥦","≨︀":"≨︀","∺":"∺","¯":"¯","♂":"♂","✠":"✠","▮":"▮","⨩":"⨩","м":"м","—":"—","𝔪":"𝔪","℧":"℧","µ":"µ","⫰":"⫰","−":"−","⨪":"⨪","⫛":"⫛","⊧":"⊧","𝕞":"𝕞","𝓂":"𝓂","μ":"μ","⊸":"⊸","⋙̸":"⋙̸","≫⃒":"≫⃒","⇍":"⇍","⇎":"⇎","⋘̸":"⋘̸","≪⃒":"≪⃒","⇏":"⇏","⊯":"⊯","⊮":"⊮","ń":"ń","∠⃒":"∠⃒","⩰̸":"⩰̸","≋̸":"≋̸","ʼn":"ʼn","♮":"♮","⩃":"⩃","ň":"ň","ņ":"ņ","⩭̸":"⩭̸","⩂":"⩂","н":"н","–":"–","⇗":"⇗","⤤":"⤤","≐̸":"≐̸","⤨":"⤨","𝔫":"𝔫","↮":"↮","⫲":"⫲","⋼":"⋼","⋺":"⋺","њ":"њ","≦̸":"≦̸","↚":"↚","‥":"‥","𝕟":"𝕟","¬":"¬","⋹̸":"⋹̸","⋵̸":"⋵̸","⋷":"⋷","⋶":"⋶","⋾":"⋾","⋽":"⋽","⫽⃥":"⫽⃥","∂̸":"∂̸","⨔":"⨔","↛":"↛","⤳̸":"⤳̸","↝̸":"↝̸","𝓃":"𝓃","⊄":"⊄","⫅̸":"⫅̸","⊅":"⊅","⫆̸":"⫆̸","ñ":"ñ","ν":"ν","#":"#","№":"№"," ":" ","⊭":"⊭","⤄":"⤄","≍⃒":"≍⃒","⊬":"⊬","≥⃒":"≥⃒",">⃒":">⃒","⧞":"⧞","⤂":"⤂","≤⃒":"≤⃒","<⃒":"<⃒","⊴⃒":"⊴⃒","⤃":"⤃","⊵⃒":"⊵⃒","∼⃒":"∼⃒","⇖":"⇖","⤣":"⤣","⤧":"⤧","ó":"ó","ô":"ô","о":"о","ő":"ő","⨸":"⨸","⦼":"⦼","œ":"œ","⦿":"⦿","𝔬":"𝔬","˛":"˛","ò":"ò","⧁":"⧁","⦵":"⦵","⦾":"⦾","⦻":"⦻","⧀":"⧀","ō":"ō","ω":"ω","ο":"ο","⦶":"⦶","𝕠":"𝕠","⦷":"⦷","⦹":"⦹","∨":"∨","⩝":"⩝","ℴ":"ℴ","ª":"ª","º":"º","⊶":"⊶","⩖":"⩖","⩗":"⩗","⩛":"⩛","ø":"ø","⊘":"⊘","õ":"õ","⨶":"⨶","ö":"ö","⌽":"⌽","¶":"¶","⫳":"⫳","⫽":"⫽","п":"п","%":"%",".":".","‰":"‰","‱":"‱","𝔭":"𝔭","φ":"φ","ϕ":"ϕ","☎":"☎","π":"π","ϖ":"ϖ","ℎ":"ℎ","+":"+","⨣":"⨣","⨢":"⨢","⨥":"⨥","⩲":"⩲","⨦":"⨦","⨧":"⨧","⨕":"⨕","𝕡":"𝕡","£":"£","⪳":"⪳","⪷":"⪷","⪹":"⪹","⪵":"⪵","⋨":"⋨","′":"′","⌮":"⌮","⌒":"⌒","⌓":"⌓","⊰":"⊰","𝓅":"𝓅","ψ":"ψ"," ":" ","𝔮":"𝔮","𝕢":"𝕢","⁗":"⁗","𝓆":"𝓆","⨖":"⨖","?":"?","⤜":"⤜","⥤":"⥤","∽̱":"∽̱","ŕ":"ŕ","⦳":"⦳","⦒":"⦒","⦥":"⦥","»":"»","⥵":"⥵","⤠":"⤠","⤳":"⤳","⤞":"⤞","⥅":"⥅","⥴":"⥴","↣":"↣","↝":"↝","⤚":"⤚","∶":"∶","❳":"❳","}":"}","]":"]","⦌":"⦌","⦎":"⦎","⦐":"⦐","ř":"ř","ŗ":"ŗ","р":"р","⤷":"⤷","⥩":"⥩","↳":"↳","▭":"▭","⥽":"⥽","𝔯":"𝔯","⥬":"⥬","ρ":"ρ","ϱ":"ϱ","⇉":"⇉","⋌":"⋌","˚":"˚","‏":"‏","⎱":"⎱","⫮":"⫮","⟭":"⟭","⇾":"⇾","⦆":"⦆","𝕣":"𝕣","⨮":"⨮","⨵":"⨵",")":")","⦔":"⦔","⨒":"⨒","›":"›","𝓇":"𝓇","⋊":"⋊","▹":"▹","⧎":"⧎","⥨":"⥨","℞":"℞","ś":"ś","⪴":"⪴","⪸":"⪸","š":"š","ş":"ş","ŝ":"ŝ","⪶":"⪶","⪺":"⪺","⋩":"⋩","⨓":"⨓","с":"с","⋅":"⋅","⩦":"⩦","⇘":"⇘","§":"§",";":";","⤩":"⤩","✶":"✶","𝔰":"𝔰","♯":"♯","щ":"щ","ш":"ш","­":"­","σ":"σ","ς":"ς","⩪":"⩪","⪞":"⪞","⪠":"⪠","⪝":"⪝","⪟":"⪟","≆":"≆","⨤":"⨤","⥲":"⥲","⨳":"⨳","⧤":"⧤","⌣":"⌣","⪪":"⪪","⪬":"⪬","⪬︀":"⪬︀","ь":"ь","/":"/","⧄":"⧄","⌿":"⌿","𝕤":"𝕤","♠":"♠","⊓︀":"⊓︀","⊔︀":"⊔︀","𝓈":"𝓈","☆":"☆","⊂":"⊂","⫅":"⫅","⪽":"⪽","⫃":"⫃","⫁":"⫁","⫋":"⫋","⊊":"⊊","⪿":"⪿","⥹":"⥹","⫇":"⫇","⫕":"⫕","⫓":"⫓","♪":"♪","¹":"¹","²":"²","³":"³","⫆":"⫆","⪾":"⪾","⫘":"⫘","⫄":"⫄","⟉":"⟉","⫗":"⫗","⥻":"⥻","⫂":"⫂","⫌":"⫌","⊋":"⊋","⫀":"⫀","⫈":"⫈","⫔":"⫔","⫖":"⫖","⇙":"⇙","⤪":"⤪","ß":"ß","⌖":"⌖","τ":"τ","ť":"ť","ţ":"ţ","т":"т","⌕":"⌕","𝔱":"𝔱","θ":"θ","ϑ":"ϑ","þ":"þ","×":"×","⨱":"⨱","⨰":"⨰","⌶":"⌶","⫱":"⫱","𝕥":"𝕥","⫚":"⫚","‴":"‴","▵":"▵","≜":"≜","◬":"◬","⨺":"⨺","⨹":"⨹","⧍":"⧍","⨻":"⨻","⏢":"⏢","𝓉":"𝓉","ц":"ц","ћ":"ћ","ŧ":"ŧ","⥣":"⥣","ú":"ú","ў":"ў","ŭ":"ŭ","û":"û","у":"у","ű":"ű","⥾":"⥾","𝔲":"𝔲","ù":"ù","▀":"▀","⌜":"⌜","⌏":"⌏","◸":"◸","ū":"ū","ų":"ų","𝕦":"𝕦","υ":"υ","⇈":"⇈","⌝":"⌝","⌎":"⌎","ů":"ů","◹":"◹","𝓊":"𝓊","⋰":"⋰","ũ":"ũ","ü":"ü","⦧":"⦧","⫨":"⫨","⫩":"⫩","⦜":"⦜","⊊︀":"⊊︀","⫋︀":"⫋︀","⊋︀":"⊋︀","⫌︀":"⫌︀","в":"в","⊻":"⊻","≚":"≚","⋮":"⋮","𝔳":"𝔳","𝕧":"𝕧","𝓋":"𝓋","⦚":"⦚","ŵ":"ŵ","⩟":"⩟","≙":"≙","℘":"℘","𝔴":"𝔴","𝕨":"𝕨","𝓌":"𝓌","𝔵":"𝔵","ξ":"ξ","⋻":"⋻","𝕩":"𝕩","𝓍":"𝓍","ý":"ý","я":"я","ŷ":"ŷ","ы":"ы","¥":"¥","𝔶":"𝔶","ї":"ї","𝕪":"𝕪","𝓎":"𝓎","ю":"ю","ÿ":"ÿ","ź":"ź","ž":"ž","з":"з","ż":"ż","ζ":"ζ","𝔷":"𝔷","ж":"ж","⇝":"⇝","𝕫":"𝕫","𝓏":"𝓏","‍":"‍","‌":"‌"}}}; +//# sourceMappingURL=./named-references.js.map /***/ }), -/***/ 91311: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 45439: +/***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getAttrPathList = void 0; -const types_1 = __nccwpck_require__(75442); -const getAttrPathList = (path) => { - const parts = path.split("."); - const pathList = []; - for (const part of parts) { - const squareBracketIndex = part.indexOf("["); - if (squareBracketIndex !== -1) { - if (part.indexOf("]") !== part.length - 1) { - throw new types_1.EndpointError(`Path: '${path}' does not end with ']'`); - } - const arrayIndex = part.slice(squareBracketIndex + 1, -1); - if (Number.isNaN(parseInt(arrayIndex))) { - throw new types_1.EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); - } - if (squareBracketIndex !== 0) { - pathList.push(part.slice(0, squareBracketIndex)); - } - pathList.push(arrayIndex); - } - else { - pathList.push(part); - } - } - return pathList; -}; -exports.getAttrPathList = getAttrPathList; - +Object.defineProperty(exports, "__esModule", ({value:true}));exports.numericUnicodeMap={0:65533,128:8364,130:8218,131:402,132:8222,133:8230,134:8224,135:8225,136:710,137:8240,138:352,139:8249,140:338,142:381,145:8216,146:8217,147:8220,148:8221,149:8226,150:8211,151:8212,152:732,153:8482,154:353,155:8250,156:339,158:382,159:376}; +//# sourceMappingURL=./numeric-unicode-map.js.map /***/ }), -/***/ 36559: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 1454: +/***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(29132), exports); -tslib_1.__exportStar(__nccwpck_require__(84624), exports); -tslib_1.__exportStar(__nccwpck_require__(71231), exports); -tslib_1.__exportStar(__nccwpck_require__(55021), exports); -tslib_1.__exportStar(__nccwpck_require__(42249), exports); -tslib_1.__exportStar(__nccwpck_require__(84654), exports); -tslib_1.__exportStar(__nccwpck_require__(72512), exports); -tslib_1.__exportStar(__nccwpck_require__(49245), exports); -tslib_1.__exportStar(__nccwpck_require__(51482), exports); - +Object.defineProperty(exports, "__esModule", ({value:true}));exports.fromCodePoint=String.fromCodePoint||function(astralCodePoint){return String.fromCharCode(Math.floor((astralCodePoint-65536)/1024)+55296,(astralCodePoint-65536)%1024+56320)};exports.getCodePoint=String.prototype.codePointAt?function(input,position){return input.codePointAt(position)}:function(input,position){return(input.charCodeAt(position)-55296)*1024+input.charCodeAt(position+1)-56320+65536};exports.highSurrogateFrom=55296;exports.highSurrogateTo=56319; +//# sourceMappingURL=./surrogate-pairs.js.map /***/ }), -/***/ 55402: -/***/ ((__unused_webpack_module, exports) => { +/***/ 77492: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isIpAddress = void 0; -const IP_V4_REGEX = new RegExp(`^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$`); -const isIpAddress = (value) => IP_V4_REGEX.test(value) || (value.startsWith("[") && value.endsWith("]")); -exports.isIpAddress = isIpAddress; - +const net_1 = __importDefault(__nccwpck_require__(41808)); +const tls_1 = __importDefault(__nccwpck_require__(24404)); +const url_1 = __importDefault(__nccwpck_require__(57310)); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const once_1 = __importDefault(__nccwpck_require__(81040)); +const agent_base_1 = __nccwpck_require__(49690); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; + }); + } +} +exports["default"] = HttpProxyAgent; +//# sourceMappingURL=agent.js.map /***/ }), -/***/ 71231: -/***/ ((__unused_webpack_module, exports) => { +/***/ 23764: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isSet = void 0; -const isSet = (value) => value != null; -exports.isSet = isSet; - +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(77492)); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 55021: -/***/ ((__unused_webpack_module, exports) => { +/***/ 15098: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isValidHostLabel = void 0; -const VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); -const isValidHostLabel = (value, allowSubDomains = false) => { - if (!allowSubDomains) { - return VALID_HOST_LABEL_REGEX.test(value); +const net_1 = __importDefault(__nccwpck_require__(41808)); +const tls_1 = __importDefault(__nccwpck_require__(24404)); +const url_1 = __importDefault(__nccwpck_require__(57310)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const agent_base_1 = __nccwpck_require__(49690); +const parse_proxy_response_1 = __importDefault(__nccwpck_require__(595)); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; } - const labels = value.split("."); - for (const label of labels) { - if (!(0, exports.isValidHostLabel)(label)) { - return false; + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + }); + } +} +exports["default"] = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; } } - return true; -}; -exports.isValidHostLabel = isValidHostLabel; - + return ret; +} +//# sourceMappingURL=agent.js.map /***/ }), -/***/ 42249: -/***/ ((__unused_webpack_module, exports) => { +/***/ 77219: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.not = void 0; -const not = (value) => !value; -exports.not = not; - +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(15098)); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 84654: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 595: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseURL = void 0; -const types_1 = __nccwpck_require__(55756); -const isIpAddress_1 = __nccwpck_require__(55402); -const DEFAULT_PORTS = { - [types_1.EndpointURLScheme.HTTP]: 80, - [types_1.EndpointURLScheme.HTTPS]: 443, +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; }; -const parseURL = (value) => { - const whatwgURL = (() => { - try { - if (value instanceof URL) { - return value; - } - if (typeof value === "object" && "hostname" in value) { - const { hostname, port, protocol = "", path = "", query = {} } = value; - const url = new URL(`${protocol}//${hostname}${port ? `:${port}` : ""}${path}`); - url.search = Object.entries(query) - .map(([k, v]) => `${k}=${v}`) - .join("&"); - return url; - } - return new URL(value); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); } - catch (error) { - return null; + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); } - })(); - if (!whatwgURL) { - console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); - return null; - } - const urlString = whatwgURL.href; - const { host, hostname, pathname, protocol, search } = whatwgURL; - if (search) { - return null; - } - const scheme = protocol.slice(0, -1); - if (!Object.values(types_1.EndpointURLScheme).includes(scheme)) { - return null; - } - const isIp = (0, isIpAddress_1.isIpAddress)(hostname); - const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || - (typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`)); - const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; - return { - scheme, - authority, - path: pathname, - normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, - isIp, - }; -}; -exports.parseURL = parseURL; - + function onclose(err) { + debug('onclose had error %o', err); + } + function onend() { + debug('onend'); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered + }); + } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports["default"] = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map /***/ }), -/***/ 72512: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; +/***/ 44124: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.stringEquals = void 0; -const stringEquals = (value1, value2) => value1 === value2; -exports.stringEquals = stringEquals; +try { + var util = __nccwpck_require__(73837); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(8544); +} /***/ }), -/***/ 49245: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; +/***/ 8544: +/***/ ((module) => { -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.substring = void 0; -const substring = (input, start, stop, reverse) => { - if (start >= stop || input.length < stop) { - return null; +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) } - if (!reverse) { - return input.substring(start, stop); + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor } - return input.substring(input.length - stop, input.length - start); -}; -exports.substring = substring; + } +} /***/ }), -/***/ 51482: -/***/ ((__unused_webpack_module, exports) => { +/***/ 98768: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.uriEncode = void 0; -const uriEncode = (value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`); -exports.uriEncode = uriEncode; +const fs = __nccwpck_require__(57147); +let isDocker; -/***/ }), +function hasDockerEnv() { + try { + fs.statSync('/.dockerenv'); + return true; + } catch (_) { + return false; + } +} -/***/ 78693: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function hasDockerCGroup() { + try { + return fs.readFileSync('/proc/self/cgroup', 'utf8').includes('docker'); + } catch (_) { + return false; + } +} -"use strict"; +module.exports = () => { + if (isDocker === undefined) { + isDocker = hasDockerEnv() || hasDockerCGroup(); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveEndpoint = void 0; -const debug_1 = __nccwpck_require__(30540); -const types_1 = __nccwpck_require__(75442); -const utils_1 = __nccwpck_require__(96871); -const resolveEndpoint = (ruleSetObject, options) => { - var _a, _b, _c, _d, _e, _f; - const { endpointParams, logger } = options; - const { parameters, rules } = ruleSetObject; - (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, `${debug_1.debugId} Initial EndpointParams: ${(0, debug_1.toDebugString)(endpointParams)}`); - const paramsWithDefault = Object.entries(parameters) - .filter(([, v]) => v.default != null) - .map(([k, v]) => [k, v.default]); - if (paramsWithDefault.length > 0) { - for (const [paramKey, paramDefaultValue] of paramsWithDefault) { - endpointParams[paramKey] = (_c = endpointParams[paramKey]) !== null && _c !== void 0 ? _c : paramDefaultValue; - } - } - const requiredParams = Object.entries(parameters) - .filter(([, v]) => v.required) - .map(([k]) => k); - for (const requiredParam of requiredParams) { - if (endpointParams[requiredParam] == null) { - throw new types_1.EndpointError(`Missing required parameter: '${requiredParam}'`); - } - } - const endpoint = (0, utils_1.evaluateRules)(rules, { endpointParams, logger, referenceRecord: {} }); - if ((_d = options.endpointParams) === null || _d === void 0 ? void 0 : _d.Endpoint) { - try { - const givenEndpoint = new URL(options.endpointParams.Endpoint); - const { protocol, port } = givenEndpoint; - endpoint.url.protocol = protocol; - endpoint.url.port = port; - } - catch (e) { - } - } - (_f = (_e = options.logger) === null || _e === void 0 ? void 0 : _e.debug) === null || _f === void 0 ? void 0 : _f.call(_e, `${debug_1.debugId} Resolved endpoint: ${(0, debug_1.toDebugString)(endpoint)}`); - return endpoint; + return isDocker; }; -exports.resolveEndpoint = resolveEndpoint; /***/ }), -/***/ 84213: -/***/ ((__unused_webpack_module, exports) => { +/***/ 41554: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.EndpointError = void 0; -class EndpointError extends Error { - constructor(message) { - super(message); - this.name = "EndpointError"; - } -} -exports.EndpointError = EndpointError; - - -/***/ }), - -/***/ 34073: -/***/ ((__unused_webpack_module, exports) => { -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; -/***/ }), +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; -/***/ 72533: -/***/ ((__unused_webpack_module, exports) => { +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); -"use strict"; +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; -Object.defineProperty(exports, "__esModule", ({ value: true })); +module.exports = isStream; /***/ }), -/***/ 63135: -/***/ ((__unused_webpack_module, exports) => { +/***/ 52559: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); +const os = __nccwpck_require__(22037); +const fs = __nccwpck_require__(57147); +const isDocker = __nccwpck_require__(98768); +const isWsl = () => { + if (process.platform !== 'linux') { + return false; + } -/***/ }), + if (os.release().toLowerCase().includes('microsoft')) { + if (isDocker()) { + return false; + } -/***/ 19136: -/***/ ((__unused_webpack_module, exports) => { + return true; + } -"use strict"; + try { + return fs.readFileSync('/proc/version', 'utf8').toLowerCase().includes('microsoft') ? + !isDocker() : false; + } catch (_) { + return false; + } +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); +if (process.env.__IS_WSL_TEST__) { + module.exports = isWsl; +} else { + module.exports = isWsl(); +} /***/ }), -/***/ 28344: -/***/ ((__unused_webpack_module, exports) => { +/***/ 51778: +/***/ ((module) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); +class JSBI extends Array{constructor(i,_){if(super(i),this.sign=_,i>JSBI.__kMaxLength)throw new RangeError("Maximum BigInt size exceeded")}static BigInt(i){var _=Math.floor,t=Number.isFinite;if("number"==typeof i){if(0===i)return JSBI.__zero();if(JSBI.__isOneDigitInt(i))return 0>i?JSBI.__oneDigit(-i,!0):JSBI.__oneDigit(i,!1);if(!t(i)||_(i)!==i)throw new RangeError("The number "+i+" cannot be converted to BigInt because it is not an integer");return JSBI.__fromDouble(i)}if("string"==typeof i){const _=JSBI.__fromString(i);if(null===_)throw new SyntaxError("Cannot convert "+i+" to a BigInt");return _}if("boolean"==typeof i)return!0===i?JSBI.__oneDigit(1,!1):JSBI.__zero();if("object"==typeof i){if(i.constructor===JSBI)return i;const _=JSBI.__toPrimitive(i);return JSBI.BigInt(_)}throw new TypeError("Cannot convert "+i+" to a BigInt")}toDebugString(){const i=["BigInt["];for(const _ of this)i.push((_?(_>>>0).toString(16):_)+", ");return i.push("]"),i.join("")}toString(i=10){if(2>i||36>>=12;const a=l-12;let u=12<=l?0:o<<20+l,d=20+l;for(0>>30-a,u=o<>>30-d,d-=30;const h=JSBI.__decideRounding(i,d,s,o);if((1===h||0===h&&1==(1&u))&&(u=u+1>>>0,0===u&&(r++,0!=r>>>20&&(r=0,g++,1023=JSBI.__kMaxLengthBits)throw new RangeError("BigInt too big");if(1===i.length&&2===i.__digit(0)){const _=1+(0|t/30),e=i.sign&&0!=(1&t),n=new JSBI(_,e);n.__initializeDigits();const g=1<>=1;0!==t;t>>=1)n=JSBI.multiply(n,n),0!=(1&t)&&(null===e?e=n:e=JSBI.multiply(e,n));return e}static multiply(_,t){if(0===_.length)return _;if(0===t.length)return t;let i=_.length+t.length;30<=_.__clzmsd()+t.__clzmsd()&&i--;const e=new JSBI(i,_.sign!==t.sign);e.__initializeDigits();for(let n=0;n<_.length;n++)JSBI.__multiplyAccumulate(t,_.__digit(n),e,n);return e.__trim()}static divide(i,_){if(0===_.length)throw new RangeError("Division by zero");if(0>JSBI.__absoluteCompare(i,_))return JSBI.__zero();const t=i.sign!==_.sign,e=_.__unsignedDigit(0);let n;if(1===_.length&&32767>=e){if(1===e)return t===i.sign?i:JSBI.unaryMinus(i);n=JSBI.__absoluteDivSmall(i,e,null)}else n=JSBI.__absoluteDivLarge(i,_,!0,!1);return n.sign=t,n.__trim()}static remainder(i,_){if(0===_.length)throw new RangeError("Division by zero");if(0>JSBI.__absoluteCompare(i,_))return i;const t=_.__unsignedDigit(0);if(1===_.length&&32767>=t){if(1===t)return JSBI.__zero();const _=JSBI.__absoluteModSmall(i,t);return 0===_?JSBI.__zero():JSBI.__oneDigit(_,i.sign)}const e=JSBI.__absoluteDivLarge(i,_,!1,!0);return e.sign=i.sign,e.__trim()}static add(i,_){const t=i.sign;return t===_.sign?JSBI.__absoluteAdd(i,_,t):0<=JSBI.__absoluteCompare(i,_)?JSBI.__absoluteSub(i,_,t):JSBI.__absoluteSub(_,i,!t)}static subtract(i,_){const t=i.sign;return t===_.sign?0<=JSBI.__absoluteCompare(i,_)?JSBI.__absoluteSub(i,_,t):JSBI.__absoluteSub(_,i,!t):JSBI.__absoluteAdd(i,_,t)}static leftShift(i,_){return 0===_.length||0===i.length?i:_.sign?JSBI.__rightShiftByAbsolute(i,_):JSBI.__leftShiftByAbsolute(i,_)}static signedRightShift(i,_){return 0===_.length||0===i.length?i:_.sign?JSBI.__leftShiftByAbsolute(i,_):JSBI.__rightShiftByAbsolute(i,_)}static unsignedRightShift(){throw new TypeError("BigInts have no unsigned right shift; use >> instead")}static lessThan(i,_){return 0>JSBI.__compareToBigInt(i,_)}static lessThanOrEqual(i,_){return 0>=JSBI.__compareToBigInt(i,_)}static greaterThan(i,_){return 0_)throw new RangeError("Invalid value: not (convertible to) a safe integer");if(0===_)return JSBI.__zero();if(_>=JSBI.__kMaxLengthBits)return t;const e=0|(_+29)/30;if(t.lengthi)throw new RangeError("Invalid value: not (convertible to) a safe integer");if(0===i)return JSBI.__zero();if(_.sign){if(i>JSBI.__kMaxLengthBits)throw new RangeError("BigInt too big");return JSBI.__truncateAndSubFromPowerOfTwo(i,_,!1)}if(i>=JSBI.__kMaxLengthBits)return _;const e=0|(i+29)/30;if(_.length>>g)return _}return JSBI.__truncateToNBits(i,_)}static ADD(i,_){if(i=JSBI.__toPrimitive(i),_=JSBI.__toPrimitive(_),"string"==typeof i)return"string"!=typeof _&&(_=_.toString()),i+_;if("string"==typeof _)return i.toString()+_;if(i=JSBI.__toNumeric(i),_=JSBI.__toNumeric(_),JSBI.__isBigInt(i)&&JSBI.__isBigInt(_))return JSBI.add(i,_);if("number"==typeof i&&"number"==typeof _)return i+_;throw new TypeError("Cannot mix BigInt and other types, use explicit conversions")}static LT(i,_){return JSBI.__compare(i,_,0)}static LE(i,_){return JSBI.__compare(i,_,1)}static GT(i,_){return JSBI.__compare(i,_,2)}static GE(i,_){return JSBI.__compare(i,_,3)}static EQ(i,_){for(;;){if(JSBI.__isBigInt(i))return JSBI.__isBigInt(_)?JSBI.equal(i,_):JSBI.EQ(_,i);if("number"==typeof i){if(JSBI.__isBigInt(_))return JSBI.__equalToNumber(_,i);if("object"!=typeof _)return i==_;_=JSBI.__toPrimitive(_)}else if("string"==typeof i){if(JSBI.__isBigInt(_))return i=JSBI.__fromString(i),null!==i&&JSBI.equal(i,_);if("object"!=typeof _)return i==_;_=JSBI.__toPrimitive(_)}else if("boolean"==typeof i){if(JSBI.__isBigInt(_))return JSBI.__equalToNumber(_,+i);if("object"!=typeof _)return i==_;_=JSBI.__toPrimitive(_)}else if("symbol"==typeof i){if(JSBI.__isBigInt(_))return!1;if("object"!=typeof _)return i==_;_=JSBI.__toPrimitive(_)}else if("object"==typeof i){if("object"==typeof _&&_.constructor!==JSBI)return i==_;i=JSBI.__toPrimitive(i)}else return i==_}}static NE(i,_){return!JSBI.EQ(i,_)}static __zero(){return new JSBI(0,!1)}static __oneDigit(i,_){const t=new JSBI(1,_);return t.__setDigit(0,i),t}__copy(){const _=new JSBI(this.length,this.sign);for(let t=0;t_)n=-_-1;else{if(0===t)return-1;t--,e=i.__digit(t),n=29}let g=1<>>20,t=_-1023,e=(0|t/30)+1,n=new JSBI(e,0>i);let g=1048575&JSBI.__kBitConversionInts[1]|1048576,o=JSBI.__kBitConversionInts[0];const s=20,l=t%30;let r,a=0;if(l<20){const i=s-l;a=i+32,r=g>>>i,g=g<<32-i|o>>>i,o<<=32-i}else if(l===20)a=32,r=g,g=o,o=0;else{const i=l-s;a=32-i,r=g<>>32-i,g=o<>>2,g=g<<30|o>>>2,o<<=30):r=0,n.__setDigit(_,r);return n.__trim()}static __isWhitespace(i){return!!(13>=i&&9<=i)||(159>=i?32==i:131071>=i?160==i||5760==i:196607>=i?(i&=131071,10>=i||40==i||41==i||47==i||95==i||4096==i):65279==i)}static __fromString(i,_=0){let t=0;const e=i.length;let n=0;if(n===e)return JSBI.__zero();let g=i.charCodeAt(n);for(;JSBI.__isWhitespace(g);){if(++n===e)return JSBI.__zero();g=i.charCodeAt(n)}if(43===g){if(++n===e)return null;g=i.charCodeAt(n),t=1}else if(45===g){if(++n===e)return null;g=i.charCodeAt(n),t=-1}if(0===_){if(_=10,48===g){if(++n===e)return JSBI.__zero();if(g=i.charCodeAt(n),88===g||120===g){if(_=16,++n===e)return null;g=i.charCodeAt(n)}else if(79===g||111===g){if(_=8,++n===e)return null;g=i.charCodeAt(n)}else if(66===g||98===g){if(_=2,++n===e)return null;g=i.charCodeAt(n)}}}else if(16===_&&48===g){if(++n===e)return JSBI.__zero();if(g=i.charCodeAt(n),88===g||120===g){if(++n===e)return null;g=i.charCodeAt(n)}}if(0!=t&&10!==_)return null;for(;48===g;){if(++n===e)return JSBI.__zero();g=i.charCodeAt(n)}const o=e-n;let s=JSBI.__kMaxBitsPerChar[_],l=JSBI.__kBitsPerCharTableMultiplier-1;if(o>1073741824/s)return null;const r=s*o+l>>>JSBI.__kBitsPerCharTableShift,a=new JSBI(0|(r+29)/30,!1),u=10>_?_:10,h=10<_?_-10:0;if(0==(_&_-1)){s>>=JSBI.__kBitsPerCharTableShift;const _=[],t=[];let o=!1;do{let l=0,r=0;for(;;){let _;if(g-48>>>0>>0>>0>>0>>JSBI.__kBitsPerCharTableShift)/30;a.__inplaceMultiplyAdd(b,r,D)}while(!t)}if(n!==e){if(!JSBI.__isWhitespace(g))return null;for(n++;n>>l-o)}if(0!==g){if(n>=_.length)throw new Error("implementation bug");_.__setDigit(n++,g)}for(;n<_.length;n++)_.__setDigit(n,0)}static __toStringBasePowerOfTwo(_,i){const t=_.length;let e=i-1;e=(85&e>>>1)+(85&e),e=(51&e>>>2)+(51&e),e=(15&e>>>4)+(15&e);const n=e,g=i-1,o=_.__digit(t-1),s=JSBI.__clz30(o);let l=0|(30*t-s+n-1)/n;if(_.sign&&l++,268435456>>o,d=30-o;d>=n;)r[a--]=JSBI.__kConversionChars[u&g],u>>>=n,d-=n}const h=(u|o<>>n-d;0!==u;)r[a--]=JSBI.__kConversionChars[u&g],u>>>=n;if(_.sign&&(r[a--]="-"),-1!=a)throw new Error("implementation bug");return r.join("")}static __toStringGeneric(_,i,t){const e=_.length;if(0===e)return"";if(1===e){let e=_.__unsignedDigit(0).toString(i);return!1===t&&_.sign&&(e="-"+e),e}const n=30*e-JSBI.__clz30(_.__digit(e-1)),g=JSBI.__kMaxBitsPerChar[i],o=g-1;let s=n*JSBI.__kBitsPerCharTableMultiplier;s+=o-1,s=0|s/o;const l=s+1>>1,r=JSBI.exponentiate(JSBI.__oneDigit(i,!1),JSBI.__oneDigit(l,!1));let a,u;const d=r.__unsignedDigit(0);if(1===r.length&&32767>=d){a=new JSBI(_.length,!1),a.__initializeDigits();let t=0;for(let e=2*_.length-1;0<=e;e--){const i=t<<15|_.__halfDigit(e);a.__setHalfDigit(e,0|i/d),t=0|i%d}u=t.toString(i)}else{const t=JSBI.__absoluteDivLarge(_,r,!0,!0);a=t.quotient;const e=t.remainder.__trim();u=JSBI.__toStringGeneric(e,i,!0)}a.__trim();let h=JSBI.__toStringGeneric(a,i,!0);for(;u.lengthe?JSBI.__absoluteLess(t):0}static __compareToNumber(i,_){if(JSBI.__isOneDigitInt(_)){const t=i.sign,e=0>_;if(t!==e)return JSBI.__unequalSign(t);if(0===i.length){if(e)throw new Error("implementation bug");return 0===_?0:-1}if(1n?JSBI.__absoluteGreater(t):g_)return JSBI.__unequalSign(t);if(0===_)throw new Error("implementation bug: should be handled elsewhere");if(0===i.length)return-1;JSBI.__kBitConversionDouble[0]=_;const e=2047&JSBI.__kBitConversionInts[1]>>>20;if(2047==e)throw new Error("implementation bug: handled elsewhere");const n=e-1023;if(0>n)return JSBI.__absoluteGreater(t);const g=i.length;let o=i.__digit(g-1);const s=JSBI.__clz30(o),l=30*g-s,r=n+1;if(lr)return JSBI.__absoluteGreater(t);let a=1048576|1048575&JSBI.__kBitConversionInts[1],u=JSBI.__kBitConversionInts[0];const d=20,h=29-s;if(h!==(0|(l-1)%30))throw new Error("implementation bug");let m,b=0;if(20>h){const i=d-h;b=i+32,m=a>>>i,a=a<<32-i|u>>>i,u<<=32-i}else if(20===h)b=32,m=a,a=u,u=0;else{const i=h-d;b=32-i,m=a<>>32-i,a=u<>>=0,m>>>=0,o>m)return JSBI.__absoluteGreater(t);if(o>>2,a=a<<30|u>>>2,u<<=30):m=0;const _=i.__unsignedDigit(e);if(_>m)return JSBI.__absoluteGreater(t);if(__&&i.__unsignedDigit(0)===t(_):0===JSBI.__compareToDouble(i,_)}static __comparisonResultToBool(i,_){return 0===_?0>i:1===_?0>=i:2===_?0_;case 3:return i>=_;}if(JSBI.__isBigInt(i)&&"string"==typeof _)return _=JSBI.__fromString(_),null!==_&&JSBI.__comparisonResultToBool(JSBI.__compareToBigInt(i,_),t);if("string"==typeof i&&JSBI.__isBigInt(_))return i=JSBI.__fromString(i),null!==i&&JSBI.__comparisonResultToBool(JSBI.__compareToBigInt(i,_),t);if(i=JSBI.__toNumeric(i),_=JSBI.__toNumeric(_),JSBI.__isBigInt(i)){if(JSBI.__isBigInt(_))return JSBI.__comparisonResultToBool(JSBI.__compareToBigInt(i,_),t);if("number"!=typeof _)throw new Error("implementation bug");return JSBI.__comparisonResultToBool(JSBI.__compareToNumber(i,_),t)}if("number"!=typeof i)throw new Error("implementation bug");if(JSBI.__isBigInt(_))return JSBI.__comparisonResultToBool(JSBI.__compareToNumber(_,i),2^t);if("number"!=typeof _)throw new Error("implementation bug");return 0===t?i<_:1===t?i<=_:2===t?i>_:3===t?i>=_:void 0}__clzmsd(){return JSBI.__clz30(this.__digit(this.length-1))}static __absoluteAdd(_,t,e){if(_.length>>30,g.__setDigit(s,1073741823&i)}for(;s<_.length;s++){const i=_.__digit(s)+o;o=i>>>30,g.__setDigit(s,1073741823&i)}return s>>30,n.__setDigit(o,1073741823&i)}for(;o<_.length;o++){const i=_.__digit(o)-g;g=1&i>>>30,n.__setDigit(o,1073741823&i)}return n.__trim()}static __absoluteAddOne(_,i,t=null){const e=_.length;null===t?t=new JSBI(e,i):t.sign=i;let n=1;for(let g=0;g>>30,t.__setDigit(g,1073741823&i)}return 0!=n&&t.__setDigitGrow(e,1),t}static __absoluteSubOne(_,t){const e=_.length;t=t||e;const n=new JSBI(t,!1);let g=1;for(let o=0;o>>30,n.__setDigit(o,1073741823&i)}if(0!=g)throw new Error("implementation bug");for(let g=e;gn?0:_.__unsignedDigit(n)>t.__unsignedDigit(n)?1:-1}static __multiplyAccumulate(_,t,e,n){if(0===t)return;const g=32767&t,o=t>>>15;let s=0,l=0;for(let r,a=0;a<_.length;a++,n++){r=e.__digit(n);const i=_.__digit(a),t=32767&i,u=i>>>15,d=JSBI.__imul(t,g),h=JSBI.__imul(t,o),m=JSBI.__imul(u,g),b=JSBI.__imul(u,o);r+=l+d+s,s=r>>>30,r&=1073741823,r+=((32767&h)<<15)+((32767&m)<<15),s+=r>>>30,l=b+(h>>>15)+(m>>>15),e.__setDigit(n,1073741823&r)}for(;0!=s||0!==l;n++){let i=e.__digit(n);i+=s+l,l=0,s=i>>>30,e.__setDigit(n,1073741823&i)}}static __internalMultiplyAdd(_,t,e,g,o){let s=e,l=0;for(let n=0;n>>15,t),a=e+((32767&g)<<15)+l+s;s=a>>>30,l=g>>>15,o.__setDigit(n,1073741823&a)}if(o.length>g)for(o.__setDigit(g++,s+l);gthis.length&&(t=this.length);const e=32767&i,n=i>>>15;let g=0,o=_;for(let s=0;s>>15,l=JSBI.__imul(_,e),r=JSBI.__imul(_,n),a=JSBI.__imul(t,e),u=JSBI.__imul(t,n);let d=o+l+g;g=d>>>30,d&=1073741823,d+=((32767&r)<<15)+((32767&a)<<15),g+=d>>>30,o=u+(r>>>15)+(a>>>15),this.__setDigit(s,1073741823&d)}if(0!=g||0!==o)throw new Error("implementation bug")}static __absoluteDivSmall(_,t,e=null){null===e&&(e=new JSBI(_.length,!1));let n=0;for(let g,o=2*_.length-1;0<=o;o-=2){g=(n<<15|_.__halfDigit(o))>>>0;const i=0|g/t;n=0|g%t,g=(n<<15|_.__halfDigit(o-1))>>>0;const s=0|g/t;n=0|g%t,e.__setDigit(o>>>1,i<<15|s)}return e}static __absoluteModSmall(_,t){let e=0;for(let n=2*_.length-1;0<=n;n--){const i=(e<<15|_.__halfDigit(n))>>>0;e=0|i%t}return e}static __absoluteDivLarge(i,_,t,e){const g=_.__halfDigitLength(),n=_.length,o=i.__halfDigitLength()-g;let s=null;t&&(s=new JSBI(o+2>>>1,!1),s.__initializeDigits());const l=new JSBI(g+2>>>1,!1);l.__initializeDigits();const r=JSBI.__clz15(_.__halfDigit(g-1));0>>0;r=0|t/u;let e=0|t%u;const n=_.__halfDigit(g-2),o=a.__halfDigit(h+g-2);for(;JSBI.__imul(r,n)>>>0>(e<<16|o)>>>0&&(r--,e+=u,!(32767>>1,d|r))}if(e)return a.__inplaceRightShift(r),t?{quotient:s,remainder:a}:a;if(t)return s;throw new Error("unreachable")}static __clz15(i){return JSBI.__clz30(i)-15}__inplaceAdd(_,t,e){let n=0;for(let g=0;g>>15,this.__setHalfDigit(t+g,32767&i)}return n}__inplaceSub(_,t,e){let n=0;if(1&t){t>>=1;let g=this.__digit(t),o=32767&g,s=0;for(;s>>1;s++){const i=_.__digit(s),e=(g>>>15)-(32767&i)-n;n=1&e>>>15,this.__setDigit(t+s,(32767&e)<<15|32767&o),g=this.__digit(t+s+1),o=(32767&g)-(i>>>15)-n,n=1&o>>>15}const i=_.__digit(s),l=(g>>>15)-(32767&i)-n;n=1&l>>>15,this.__setDigit(t+s,(32767&l)<<15|32767&o);if(t+s+1>=this.length)throw new RangeError("out of bounds");0==(1&e)&&(g=this.__digit(t+s+1),o=(32767&g)-(i>>>15)-n,n=1&o>>>15,this.__setDigit(t+_.length,1073709056&g|32767&o))}else{t>>=1;let g=0;for(;g<_.length-1;g++){const i=this.__digit(t+g),e=_.__digit(g),o=(32767&i)-(32767&e)-n;n=1&o>>>15;const s=(i>>>15)-(e>>>15)-n;n=1&s>>>15,this.__setDigit(t+g,(32767&s)<<15|32767&o)}const i=this.__digit(t+g),o=_.__digit(g),s=(32767&i)-(32767&o)-n;n=1&s>>>15;let l=0;0==(1&e)&&(l=(i>>>15)-(o>>>15)-n,n=1&l>>>15),this.__setDigit(t+g,(32767&l)<<15|32767&s)}return n}__inplaceRightShift(_){if(0===_)return;let t=this.__digit(0)>>>_;const e=this.length-1;for(let n=0;n>>_}this.__setDigit(e,t)}static __specialLeftShift(_,t,e){const g=_.length,n=new JSBI(g+e,!1);if(0===t){for(let t=0;t>>30-t}return 0t)throw new RangeError("BigInt too big");const e=0|t/30,n=t%30,g=_.length,o=0!==n&&0!=_.__digit(g-1)>>>30-n,s=g+e+(o?1:0),l=new JSBI(s,_.sign);if(0===n){let t=0;for(;t>>30-n}if(o)l.__setDigit(g+e,t);else if(0!==t)throw new Error("implementation bug")}return l.__trim()}static __rightShiftByAbsolute(_,i){const t=_.length,e=_.sign,n=JSBI.__toShiftAmount(i);if(0>n)return JSBI.__rightShiftByMaximum(e);const g=0|n/30,o=n%30;let s=t-g;if(0>=s)return JSBI.__rightShiftByMaximum(e);let l=!1;if(e){if(0!=(_.__digit(g)&(1<>>o;const n=t-g-1;for(let t=0;t>>o}r.__setDigit(n,e)}return l&&(r=JSBI.__absoluteAddOne(r,!0,r)),r.__trim()}static __rightShiftByMaximum(i){return i?JSBI.__oneDigit(1,!0):JSBI.__zero()}static __toShiftAmount(i){if(1JSBI.__kMaxLengthBits?-1:_}static __toPrimitive(i,_="default"){if("object"!=typeof i)return i;if(i.constructor===JSBI)return i;if("undefined"!=typeof Symbol&&"symbol"==typeof Symbol.toPrimitive){const t=i[Symbol.toPrimitive];if(t){const i=t(_);if("object"!=typeof i)return i;throw new TypeError("Cannot convert object to primitive value")}}const t=i.valueOf;if(t){const _=t.call(i);if("object"!=typeof _)return _}const e=i.toString;if(e){const _=e.call(i);if("object"!=typeof _)return _}throw new TypeError("Cannot convert object to primitive value")}static __toNumeric(i){return JSBI.__isBigInt(i)?i:+i}static __isBigInt(i){return"object"==typeof i&&null!==i&&i.constructor===JSBI}static __truncateToNBits(i,_){const t=0|(i+29)/30,e=new JSBI(t,_.sign),n=t-1;for(let t=0;t>>_}return e.__setDigit(n,g),e.__trim()}static __truncateAndSubFromPowerOfTwo(_,t,e){var n=Math.min;const g=0|(_+29)/30,o=new JSBI(g,e);let s=0;const l=g-1;let a=0;for(const i=n(l,t.length);s>>30,o.__setDigit(s,1073741823&i)}for(;s>>i;const _=1<<32-i;h=_-u-a,h&=_-1}return o.__setDigit(l,h),o.__trim()}__digit(_){return this[_]}__unsignedDigit(_){return this[_]>>>0}__setDigit(_,i){this[_]=0|i}__setDigitGrow(_,i){this[_]=0|i}__halfDigitLength(){const i=this.length;return 32767>=this.__unsignedDigit(i-1)?2*i-1:2*i}__halfDigit(_){return 32767&this[_>>>1]>>>15*(1&_)}__setHalfDigit(_,i){const t=_>>>1,e=this.__digit(t),n=1&_?32767&e|i<<15:1073709056&e|32767&i;this.__setDigit(t,n)}static __digitPow(i,_){let t=1;for(;0<_;)1&_&&(t*=i),_>>>=1,i*=i;return t}static __isOneDigitInt(i){return(1073741823&i)===i}}JSBI.__kMaxLength=33554432,JSBI.__kMaxLengthBits=JSBI.__kMaxLength<<5,JSBI.__kMaxBitsPerChar=[0,0,32,51,64,75,83,90,96,102,107,111,115,119,122,126,128,131,134,136,139,141,143,145,147,149,151,153,154,156,158,159,160,162,163,165,166],JSBI.__kBitsPerCharTableShift=5,JSBI.__kBitsPerCharTableMultiplier=1<>>0)/Math.LN2)},JSBI.__imul=Math.imul||function(i,_){return 0|i*_},module.exports=JSBI; +//# sourceMappingURL=jsbi-cjs.js.map /***/ }), -/***/ 75442: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 55031: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var json_stringify = (__nccwpck_require__(78574).stringify); +var json_parse = __nccwpck_require__(89099); -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(84213), exports); -tslib_1.__exportStar(__nccwpck_require__(34073), exports); -tslib_1.__exportStar(__nccwpck_require__(72533), exports); -tslib_1.__exportStar(__nccwpck_require__(63135), exports); -tslib_1.__exportStar(__nccwpck_require__(19136), exports); -tslib_1.__exportStar(__nccwpck_require__(28344), exports); -tslib_1.__exportStar(__nccwpck_require__(42535), exports); +module.exports = function(options) { + return { + parse: json_parse(options), + stringify: json_stringify + } +}; +//create the default method members with no options applied for backwards compatibility +module.exports.parse = json_parse(); +module.exports.stringify = json_stringify; /***/ }), -/***/ 42535: -/***/ ((__unused_webpack_module, exports) => { +/***/ 89099: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var BigNumber = null; -Object.defineProperty(exports, "__esModule", ({ value: true })); +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors +const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; -/***/ }), +/* + json_parse.js + 2012-06-20 -/***/ 66318: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + Public Domain. -"use strict"; + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.callFunction = void 0; -const customEndpointFunctions_1 = __nccwpck_require__(38824); -const endpointFunctions_1 = __nccwpck_require__(70953); -const evaluateExpression_1 = __nccwpck_require__(91692); -const callFunction = ({ fn, argv }, options) => { - const evaluatedArgs = argv.map((arg) => ["boolean", "number"].includes(typeof arg) ? arg : (0, evaluateExpression_1.evaluateExpression)(arg, "arg", options)); - const fnSegments = fn.split("."); - if (fnSegments[0] in customEndpointFunctions_1.customEndpointFunctions && fnSegments[1] != null) { - return customEndpointFunctions_1.customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); - } - return endpointFunctions_1.endpointFunctions[fn](...evaluatedArgs); -}; -exports.callFunction = callFunction; + This file creates a json_parse function. + During create you can (optionally) specify some behavioural switches + require('json-bigint')(options) -/***/ }), + The optional options parameter holds switches that drive certain + aspects of the parsing process: + * options.strict = true will warn about duplicate-key usage in the json. + The default (strict = false) will silently ignore those and overwrite + values for keys that are in duplicate use. -/***/ 38824: -/***/ ((__unused_webpack_module, exports) => { + The resulting function follows this signature: + json_parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. -"use strict"; + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.customEndpointFunctions = void 0; -exports.customEndpointFunctions = {}; + Example: + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. -/***/ }), + myData = json_parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); -/***/ 70953: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + This is a reference implementation. You are free to copy, modify, or + redistribute. -"use strict"; + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.endpointFunctions = void 0; -const lib_1 = __nccwpck_require__(36559); -exports.endpointFunctions = { - booleanEquals: lib_1.booleanEquals, - getAttr: lib_1.getAttr, - isSet: lib_1.isSet, - isValidHostLabel: lib_1.isValidHostLabel, - not: lib_1.not, - parseURL: lib_1.parseURL, - stringEquals: lib_1.stringEquals, - substring: lib_1.substring, - uriEncode: lib_1.uriEncode, -}; + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ +/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, + hasOwnProperty, message, n, name, prototype, push, r, t, text +*/ -/***/ }), +var json_parse = function (options) { + 'use strict'; + + // This is a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + // We are defining the function inside of another function to avoid creating + // global variables. + + // Default options one can override by passing options to the parse() + var _options = { + strict: false, // not being strict means do not generate syntax errors for "duplicate key" + storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string + alwaysParseAsBig: false, // toggles whether all numbers should be Big + useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js + protoAction: 'error', + constructorAction: 'error', + }; -/***/ 42138: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // If there are options, then use them to override the default _options + if (options !== undefined && options !== null) { + if (options.strict === true) { + _options.strict = true; + } + if (options.storeAsString === true) { + _options.storeAsString = true; + } + _options.alwaysParseAsBig = + options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; + _options.useNativeBigInt = + options.useNativeBigInt === true ? options.useNativeBigInt : false; -"use strict"; + if (typeof options.constructorAction !== 'undefined') { + if ( + options.constructorAction === 'error' || + options.constructorAction === 'ignore' || + options.constructorAction === 'preserve' + ) { + _options.constructorAction = options.constructorAction; + } else { + throw new Error( + `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` + ); + } + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.evaluateCondition = void 0; -const debug_1 = __nccwpck_require__(30540); -const types_1 = __nccwpck_require__(75442); -const callFunction_1 = __nccwpck_require__(66318); -const evaluateCondition = ({ assign, ...fnArgs }, options) => { - var _a, _b; - if (assign && assign in options.referenceRecord) { - throw new types_1.EndpointError(`'${assign}' is already defined in Reference Record.`); + if (typeof options.protoAction !== 'undefined') { + if ( + options.protoAction === 'error' || + options.protoAction === 'ignore' || + options.protoAction === 'preserve' + ) { + _options.protoAction = options.protoAction; + } else { + throw new Error( + `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` + ); + } } - const value = (0, callFunction_1.callFunction)(fnArgs, options); - (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `evaluateCondition: ${(0, debug_1.toDebugString)(fnArgs)} = ${(0, debug_1.toDebugString)(value)}`); - return { - result: value === "" ? true : !!value, - ...(assign != null && { toAssign: { name: assign, value } }), - }; -}; -exports.evaluateCondition = evaluateCondition; + } + var at, // The index of the current character + ch, // The current character + escapee = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + }, + text, + error = function (m) { + // Call error when something is wrong. + + throw { + name: 'SyntaxError', + message: m, + at: at, + text: text, + }; + }, + next = function (c) { + // If a c parameter is provided, verify that it matches the current character. -/***/ }), + if (c && c !== ch) { + error("Expected '" + c + "' instead of '" + ch + "'"); + } -/***/ 69584: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // Get the next character. When there are no more characters, + // return the empty string. -"use strict"; + ch = text.charAt(at); + at += 1; + return ch; + }, + number = function () { + // Parse a number value. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.evaluateConditions = void 0; -const debug_1 = __nccwpck_require__(30540); -const evaluateCondition_1 = __nccwpck_require__(42138); -const evaluateConditions = (conditions = [], options) => { - var _a, _b; - const conditionsReferenceRecord = {}; - for (const condition of conditions) { - const { result, toAssign } = (0, evaluateCondition_1.evaluateCondition)(condition, { - ...options, - referenceRecord: { - ...options.referenceRecord, - ...conditionsReferenceRecord, - }, - }); - if (!result) { - return { result }; + var number, + string = ''; + + if (ch === '-') { + string = '-'; + next('-'); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; } - if (toAssign) { - conditionsReferenceRecord[toAssign.name] = toAssign.value; - (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `assign: ${toAssign.name} := ${(0, debug_1.toDebugString)(toAssign.value)}`); + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); } - } - return { result: true, referenceRecord: conditionsReferenceRecord }; -}; -exports.evaluateConditions = evaluateConditions; - - -/***/ }), + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + } + number = +string; + if (!isFinite(number)) { + error('Bad number'); + } else { + if (BigNumber == null) BigNumber = __nccwpck_require__(87558); + //if (number > 9007199254740992 || number < -9007199254740992) + // Bignumber has stricter check: everything with length > 15 digits disallowed + if (string.length > 15) + return _options.storeAsString + ? string + : _options.useNativeBigInt + ? BigInt(string) + : new BigNumber(string); + else + return !_options.alwaysParseAsBig + ? number + : _options.useNativeBigInt + ? BigInt(number) + : new BigNumber(number); + } + }, + string = function () { + // Parse a string value. + + var hex, + i, + string = '', + uffff; + + // When parsing for string values, we must look for " and \ characters. + + if (ch === '"') { + var startAt = at; + while (next()) { + if (ch === '"') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + return string; + } + if (ch === '\\') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; + } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; + } + startAt = at; + } + } + } + error('Bad string'); + }, + white = function () { + // Skip whitespace. -/***/ 14405: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + while (ch && ch <= ' ') { + next(); + } + }, + word = function () { + // true, false, or null. + + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + } + error("Unexpected '" + ch + "'"); + }, + value, // Place holder for the value function. + array = function () { + // Parse an array value. + + var array = []; + + if (ch === '[') { + next('['); + white(); + if (ch === ']') { + next(']'); + return array; // empty array + } + while (ch) { + array.push(value()); + white(); + if (ch === ']') { + next(']'); + return array; + } + next(','); + white(); + } + } + error('Bad array'); + }, + object = function () { + // Parse an object value. + + var key, + object = Object.create(null); + + if (ch === '{') { + next('{'); + white(); + if (ch === '}') { + next('}'); + return object; // empty object + } + while (ch) { + key = string(); + white(); + next(':'); + if ( + _options.strict === true && + Object.hasOwnProperty.call(object, key) + ) { + error('Duplicate key "' + key + '"'); + } -"use strict"; + if (suspectProtoRx.test(key) === true) { + if (_options.protoAction === 'error') { + error('Object contains forbidden prototype property'); + } else if (_options.protoAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else if (suspectConstructorRx.test(key) === true) { + if (_options.constructorAction === 'error') { + error('Object contains forbidden constructor property'); + } else if (_options.constructorAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else { + object[key] = value(); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.evaluateEndpointRule = void 0; -const debug_1 = __nccwpck_require__(30540); -const evaluateConditions_1 = __nccwpck_require__(69584); -const getEndpointHeaders_1 = __nccwpck_require__(57225); -const getEndpointProperties_1 = __nccwpck_require__(83067); -const getEndpointUrl_1 = __nccwpck_require__(25672); -const evaluateEndpointRule = (endpointRule, options) => { - var _a, _b; - const { conditions, endpoint } = endpointRule; - const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); - if (!result) { - return; - } - const endpointRuleOptions = { - ...options, - referenceRecord: { ...options.referenceRecord, ...referenceRecord }, - }; - const { url, properties, headers } = endpoint; - (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `Resolving endpoint from template: ${(0, debug_1.toDebugString)(endpoint)}`); - return { - ...(headers != undefined && { - headers: (0, getEndpointHeaders_1.getEndpointHeaders)(headers, endpointRuleOptions), - }), - ...(properties != undefined && { - properties: (0, getEndpointProperties_1.getEndpointProperties)(properties, endpointRuleOptions), - }), - url: (0, getEndpointUrl_1.getEndpointUrl)(url, endpointRuleOptions), + white(); + if (ch === '}') { + next('}'); + return object; + } + next(','); + white(); + } + } + error('Bad object'); }; -}; -exports.evaluateEndpointRule = evaluateEndpointRule; + value = function () { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or a word. + + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + return string(); + case '-': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); + } + }; -/***/ }), - -/***/ 57563: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // Return the json_parse function. It will have access to all of the above + // functions and variables. -"use strict"; + return function (source, reviver) { + var result; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.evaluateErrorRule = void 0; -const types_1 = __nccwpck_require__(75442); -const evaluateConditions_1 = __nccwpck_require__(69584); -const evaluateExpression_1 = __nccwpck_require__(91692); -const evaluateErrorRule = (errorRule, options) => { - const { conditions, error } = errorRule; - const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); - if (!result) { - return; - } - throw new types_1.EndpointError((0, evaluateExpression_1.evaluateExpression)(error, "Error", { - ...options, - referenceRecord: { ...options.referenceRecord, ...referenceRecord }, - })); + text = source + ''; + at = 0; + ch = ' '; + result = value(); + white(); + if (ch) { + error('Syntax error'); + } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + return typeof reviver === 'function' + ? (function walk(holder, key) { + var k, + v, + value = holder[key]; + if (value && typeof value === 'object') { + Object.keys(value).forEach(function (k) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + }); + } + return reviver.call(holder, key, value); + })({ '': result }, '') + : result; + }; }; -exports.evaluateErrorRule = evaluateErrorRule; +module.exports = json_parse; -/***/ }), - -/***/ 91692: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; +/***/ }), -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.evaluateExpression = void 0; -const types_1 = __nccwpck_require__(75442); -const callFunction_1 = __nccwpck_require__(66318); -const evaluateTemplate_1 = __nccwpck_require__(21922); -const getReferenceValue_1 = __nccwpck_require__(17142); -const evaluateExpression = (obj, keyName, options) => { - if (typeof obj === "string") { - return (0, evaluateTemplate_1.evaluateTemplate)(obj, options); - } - else if (obj["fn"]) { - return (0, callFunction_1.callFunction)(obj, options); - } - else if (obj["ref"]) { - return (0, getReferenceValue_1.getReferenceValue)(obj, options); - } - throw new types_1.EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); -}; -exports.evaluateExpression = evaluateExpression; +/***/ 78574: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var BigNumber = __nccwpck_require__(87558); -/***/ }), +/* + json2.js + 2013-05-26 -/***/ 48830: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + Public Domain. -"use strict"; + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.evaluateRules = void 0; -const types_1 = __nccwpck_require__(75442); -const evaluateEndpointRule_1 = __nccwpck_require__(14405); -const evaluateErrorRule_1 = __nccwpck_require__(57563); -const evaluateTreeRule_1 = __nccwpck_require__(55085); -const evaluateRules = (rules, options) => { - for (const rule of rules) { - if (rule.type === "endpoint") { - const endpointOrUndefined = (0, evaluateEndpointRule_1.evaluateEndpointRule)(rule, options); - if (endpointOrUndefined) { - return endpointOrUndefined; - } - } - else if (rule.type === "error") { - (0, evaluateErrorRule_1.evaluateErrorRule)(rule, options); - } - else if (rule.type === "tree") { - const endpointOrUndefined = (0, evaluateTreeRule_1.evaluateTreeRule)(rule, options); - if (endpointOrUndefined) { - return endpointOrUndefined; - } - } - else { - throw new types_1.EndpointError(`Unknown endpoint rule: ${rule}`); - } - } - throw new types_1.EndpointError(`Rules evaluation failed`); -}; -exports.evaluateRules = evaluateRules; + See http://www.JSON.org/js.html -/***/ }), + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html -/***/ 21922: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. -"use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.evaluateTemplate = void 0; -const lib_1 = __nccwpck_require__(36559); -const evaluateTemplate = (template, options) => { - const evaluatedTemplateArr = []; - const templateContext = { - ...options.endpointParams, - ...options.referenceRecord, - }; - let currentIndex = 0; - while (currentIndex < template.length) { - const openingBraceIndex = template.indexOf("{", currentIndex); - if (openingBraceIndex === -1) { - evaluatedTemplateArr.push(template.slice(currentIndex)); - break; - } - evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); - const closingBraceIndex = template.indexOf("}", openingBraceIndex); - if (closingBraceIndex === -1) { - evaluatedTemplateArr.push(template.slice(openingBraceIndex)); - break; - } - if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { - evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); - currentIndex = closingBraceIndex + 2; - } - const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); - if (parameterName.includes("#")) { - const [refName, attrName] = parameterName.split("#"); - evaluatedTemplateArr.push((0, lib_1.getAttr)(templateContext[refName], attrName)); - } - else { - evaluatedTemplateArr.push(templateContext[parameterName]); - } - currentIndex = closingBraceIndex + 1; - } - return evaluatedTemplateArr.join(""); -}; -exports.evaluateTemplate = evaluateTemplate; + This file creates a global JSON object containing two methods: stringify + and parse. + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. -/***/ }), + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. -/***/ 55085: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. -"use strict"; + This method produces a JSON text from a JavaScript value. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.evaluateTreeRule = void 0; -const evaluateConditions_1 = __nccwpck_require__(69584); -const evaluateRules_1 = __nccwpck_require__(48830); -const evaluateTreeRule = (treeRule, options) => { - const { conditions, rules } = treeRule; - const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); - if (!result) { - return; - } - return (0, evaluateRules_1.evaluateRules)(rules, { - ...options, - referenceRecord: { ...options.referenceRecord, ...referenceRecord }, - }); -}; -exports.evaluateTreeRule = evaluateTreeRule; + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + For example, this would serialize Dates as ISO strings. -/***/ }), + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } -/***/ 57225: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; -"use strict"; + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEndpointHeaders = void 0; -const types_1 = __nccwpck_require__(75442); -const evaluateExpression_1 = __nccwpck_require__(91692); -const getEndpointHeaders = (headers, options) => Object.entries(headers).reduce((acc, [headerKey, headerVal]) => ({ - ...acc, - [headerKey]: headerVal.map((headerValEntry) => { - const processedExpr = (0, evaluateExpression_1.evaluateExpression)(headerValEntry, "Header value entry", options); - if (typeof processedExpr !== "string") { - throw new types_1.EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); - } - return processedExpr; - }), -}), {}); -exports.getEndpointHeaders = getEndpointHeaders; + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. -/***/ }), + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. -/***/ 83067: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. -"use strict"; + Example: -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEndpointProperties = void 0; -const getEndpointProperty_1 = __nccwpck_require__(26152); -const getEndpointProperties = (properties, options) => Object.entries(properties).reduce((acc, [propertyKey, propertyVal]) => ({ - ...acc, - [propertyKey]: (0, getEndpointProperty_1.getEndpointProperty)(propertyVal, options), -}), {}); -exports.getEndpointProperties = getEndpointProperties; + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' -/***/ }), + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' -/***/ 26152: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' -"use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEndpointProperty = void 0; -const types_1 = __nccwpck_require__(75442); -const evaluateTemplate_1 = __nccwpck_require__(21922); -const getEndpointProperties_1 = __nccwpck_require__(83067); -const getEndpointProperty = (property, options) => { - if (Array.isArray(property)) { - return property.map((propertyEntry) => (0, exports.getEndpointProperty)(propertyEntry, options)); - } - switch (typeof property) { - case "string": - return (0, evaluateTemplate_1.evaluateTemplate)(property, options); - case "object": - if (property === null) { - throw new types_1.EndpointError(`Unexpected endpoint property: ${property}`); - } - return (0, getEndpointProperties_1.getEndpointProperties)(property, options); - case "boolean": - return property; - default: - throw new types_1.EndpointError(`Unexpected endpoint property type: ${typeof property}`); - } -}; -exports.getEndpointProperty = getEndpointProperty; + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. -/***/ }), + Example: -/***/ 25672: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. -"use strict"; + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEndpointUrl = void 0; -const types_1 = __nccwpck_require__(75442); -const evaluateExpression_1 = __nccwpck_require__(91692); -const getEndpointUrl = (endpointUrl, options) => { - const expression = (0, evaluateExpression_1.evaluateExpression)(endpointUrl, "Endpoint URL", options); - if (typeof expression === "string") { - try { - return new URL(expression); - } - catch (error) { - console.error(`Failed to construct URL with ${expression}`, error); - throw error; - } - } - throw new types_1.EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); -}; -exports.getEndpointUrl = getEndpointUrl; + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); -/***/ }), + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ -/***/ 17142: -/***/ ((__unused_webpack_module, exports) => { +/*jslint evil: true, regexp: true */ -"use strict"; +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getReferenceValue = void 0; -const getReferenceValue = ({ ref }, options) => { - const referenceRecord = { - ...options.endpointParams, - ...options.referenceRecord, - }; - return referenceRecord[ref]; -}; -exports.getReferenceValue = getReferenceValue; +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. -/***/ }), +var JSON = module.exports; -/***/ 96871: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; -"use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(38824), exports); -tslib_1.__exportStar(__nccwpck_require__(48830), exports); + function quote(string) { +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. -/***/ }), + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; + } -/***/ 45364: -/***/ ((__unused_webpack_module, exports) => { -"use strict"; + function str(key, holder) { -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toHex = exports.fromHex = void 0; -const SHORT_TO_HEX = {}; -const HEX_TO_SHORT = {}; -for (let i = 0; i < 256; i++) { - let encodedByte = i.toString(16).toLowerCase(); - if (encodedByte.length === 1) { - encodedByte = `0${encodedByte}`; - } - SHORT_TO_HEX[i] = encodedByte; - HEX_TO_SHORT[encodedByte] = i; -} -function fromHex(encoded) { - if (encoded.length % 2 !== 0) { - throw new Error("Hex encoded strings must have an even number length"); - } - const out = new Uint8Array(encoded.length / 2); - for (let i = 0; i < encoded.length; i += 2) { - const encodedByte = encoded.slice(i, i + 2).toLowerCase(); - if (encodedByte in HEX_TO_SHORT) { - out[i / 2] = HEX_TO_SHORT[encodedByte]; - } - else { - throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); - } - } - return out; -} -exports.fromHex = fromHex; -function toHex(bytes) { - let out = ""; - for (let i = 0; i < bytes.byteLength; i++) { - out += SHORT_TO_HEX[bytes[i]]; - } - return out; -} -exports.toHex = toHex; +// Produce a string from holder[key]. + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key], + isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); -/***/ }), +// If the value has a toJSON method, call it to obtain a replacement value. -/***/ 85730: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } -"use strict"; +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSmithyContext = void 0; -const types_1 = __nccwpck_require__(55756); -const getSmithyContext = (context) => context[types_1.SMITHY_CONTEXT_KEY] || (context[types_1.SMITHY_CONTEXT_KEY] = {}); -exports.getSmithyContext = getSmithyContext; + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } +// What happens next depends on the value's type. -/***/ }), + switch (typeof value) { + case 'string': + if (isBigNumber) { + return value; + } else { + return quote(value); + } -/***/ 2390: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + case 'number': -"use strict"; +// JSON numbers must be finite. Encode non-finite numbers as null. -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(85730), exports); -tslib_1.__exportStar(__nccwpck_require__(80149), exports); + return isFinite(value) ? String(value) : 'null'; + case 'boolean': + case 'null': + case 'bigint': -/***/ }), +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. -/***/ 80149: -/***/ ((__unused_webpack_module, exports) => { + return String(value); -"use strict"; +// If the type is 'object', we might be dealing with an object or an array or +// null. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.normalizeProvider = void 0; -const normalizeProvider = (input) => { - if (typeof input === "function") - return input; - const promisified = Promise.resolve(input); - return () => promisified; -}; -exports.normalizeProvider = normalizeProvider; + case 'object': +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. -/***/ }), + if (!value) { + return 'null'; + } -/***/ 65053: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// Make an array to hold the partial results of stringifying this object value. -"use strict"; + gap += indent; + partial = []; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AdaptiveRetryStrategy = void 0; -const config_1 = __nccwpck_require__(93435); -const DefaultRateLimiter_1 = __nccwpck_require__(22234); -const StandardRetryStrategy_1 = __nccwpck_require__(48361); -class AdaptiveRetryStrategy { - constructor(maxAttemptsProvider, options) { - this.maxAttemptsProvider = maxAttemptsProvider; - this.mode = config_1.RETRY_MODES.ADAPTIVE; - const { rateLimiter } = options !== null && options !== void 0 ? options : {}; - this.rateLimiter = rateLimiter !== null && rateLimiter !== void 0 ? rateLimiter : new DefaultRateLimiter_1.DefaultRateLimiter(); - this.standardRetryStrategy = new StandardRetryStrategy_1.StandardRetryStrategy(maxAttemptsProvider); - } - async acquireInitialRetryToken(retryTokenScope) { - await this.rateLimiter.getSendToken(); - return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); - } - async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { - this.rateLimiter.updateClientSendingRate(errorInfo); - return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); - } - recordSuccess(token) { - this.rateLimiter.updateClientSendingRate({}); - this.standardRetryStrategy.recordSuccess(token); - } -} -exports.AdaptiveRetryStrategy = AdaptiveRetryStrategy; +// Is the value an array? + if (Object.prototype.toString.apply(value) === '[object Array]') { -/***/ }), +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. -/***/ 25689: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } -"use strict"; +// Join all of the elements together, separated with commas, and wrap them in +// brackets. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ConfiguredRetryStrategy = void 0; -const constants_1 = __nccwpck_require__(66302); -const StandardRetryStrategy_1 = __nccwpck_require__(48361); -class ConfiguredRetryStrategy extends StandardRetryStrategy_1.StandardRetryStrategy { - constructor(maxAttempts, computeNextBackoffDelay = constants_1.DEFAULT_RETRY_DELAY_BASE) { - super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); - if (typeof computeNextBackoffDelay === "number") { - this.computeNextBackoffDelay = () => computeNextBackoffDelay; - } - else { - this.computeNextBackoffDelay = computeNextBackoffDelay; - } - } - async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { - const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); - token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); - return token; - } -} -exports.ConfiguredRetryStrategy = ConfiguredRetryStrategy; + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } +// If the replacer is an array, use it to select the members to be stringified. -/***/ }), + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { -/***/ 22234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// Otherwise, iterate through all of the keys in the object. -"use strict"; + Object.keys(value).forEach(function(k) { + var v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + }); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultRateLimiter = void 0; -const service_error_classification_1 = __nccwpck_require__(6375); -class DefaultRateLimiter { - constructor(options) { - var _a, _b, _c, _d, _e; - this.currentCapacity = 0; - this.enabled = false; - this.lastMaxRate = 0; - this.measuredTxRate = 0; - this.requestCount = 0; - this.lastTimestamp = 0; - this.timeWindow = 0; - this.beta = (_a = options === null || options === void 0 ? void 0 : options.beta) !== null && _a !== void 0 ? _a : 0.7; - this.minCapacity = (_b = options === null || options === void 0 ? void 0 : options.minCapacity) !== null && _b !== void 0 ? _b : 1; - this.minFillRate = (_c = options === null || options === void 0 ? void 0 : options.minFillRate) !== null && _c !== void 0 ? _c : 0.5; - this.scaleConstant = (_d = options === null || options === void 0 ? void 0 : options.scaleConstant) !== null && _d !== void 0 ? _d : 0.4; - this.smooth = (_e = options === null || options === void 0 ? void 0 : options.smooth) !== null && _e !== void 0 ? _e : 0.8; - const currentTimeInSeconds = this.getCurrentTimeInSeconds(); - this.lastThrottleTime = currentTimeInSeconds; - this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); - this.fillRate = this.minFillRate; - this.maxCapacity = this.minCapacity; - } - getCurrentTimeInSeconds() { - return Date.now() / 1000; - } - async getSendToken() { - return this.acquireTokenBucket(1); - } - async acquireTokenBucket(amount) { - if (!this.enabled) { - return; - } - this.refillTokenBucket(); - if (amount > this.currentCapacity) { - const delay = ((amount - this.currentCapacity) / this.fillRate) * 1000; - await new Promise((resolve) => setTimeout(resolve, delay)); - } - this.currentCapacity = this.currentCapacity - amount; - } - refillTokenBucket() { - const timestamp = this.getCurrentTimeInSeconds(); - if (!this.lastTimestamp) { - this.lastTimestamp = timestamp; - return; - } - const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; - this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); - this.lastTimestamp = timestamp; - } - updateClientSendingRate(response) { - let calculatedRate; - this.updateMeasuredRate(); - if ((0, service_error_classification_1.isThrottlingError)(response)) { - const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); - this.lastMaxRate = rateToUse; - this.calculateTimeWindow(); - this.lastThrottleTime = this.getCurrentTimeInSeconds(); - calculatedRate = this.cubicThrottle(rateToUse); - this.enableTokenBucket(); - } - else { - this.calculateTimeWindow(); - calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); - } - const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); - this.updateTokenBucketRate(newRate); - } - calculateTimeWindow() { - this.timeWindow = this.getPrecise(Math.pow((this.lastMaxRate * (1 - this.beta)) / this.scaleConstant, 1 / 3)); - } - cubicThrottle(rateToUse) { - return this.getPrecise(rateToUse * this.beta); - } - cubicSuccess(timestamp) { - return this.getPrecise(this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate); - } - enableTokenBucket() { - this.enabled = true; - } - updateTokenBucketRate(newRate) { - this.refillTokenBucket(); - this.fillRate = Math.max(newRate, this.minFillRate); - this.maxCapacity = Math.max(newRate, this.minCapacity); - this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); - } - updateMeasuredRate() { - const t = this.getCurrentTimeInSeconds(); - const timeBucket = Math.floor(t * 2) / 2; - this.requestCount++; - if (timeBucket > this.lastTxRateBucket) { - const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); - this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); - this.requestCount = 0; - this.lastTxRateBucket = timeBucket; +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; } } - getPrecise(num) { - return parseFloat(num.toFixed(8)); + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; } -} -exports.DefaultRateLimiter = DefaultRateLimiter; +}()); /***/ }), -/***/ 48361: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 53359: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var jws = __nccwpck_require__(22597); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.StandardRetryStrategy = void 0; -const config_1 = __nccwpck_require__(93435); -const constants_1 = __nccwpck_require__(66302); -const defaultRetryBackoffStrategy_1 = __nccwpck_require__(21337); -const defaultRetryToken_1 = __nccwpck_require__(1127); -class StandardRetryStrategy { - constructor(maxAttempts) { - this.maxAttempts = maxAttempts; - this.mode = config_1.RETRY_MODES.STANDARD; - this.capacity = constants_1.INITIAL_RETRY_TOKENS; - this.retryBackoffStrategy = (0, defaultRetryBackoffStrategy_1.getDefaultRetryBackoffStrategy)(); - this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; - } - async acquireInitialRetryToken(retryTokenScope) { - return (0, defaultRetryToken_1.createDefaultRetryToken)({ - retryDelay: constants_1.DEFAULT_RETRY_DELAY_BASE, - retryCount: 0, - }); - } - async refreshRetryTokenForRetry(token, errorInfo) { - const maxAttempts = await this.getMaxAttempts(); - if (this.shouldRetry(token, errorInfo, maxAttempts)) { - const errorType = errorInfo.errorType; - this.retryBackoffStrategy.setDelayBase(errorType === "THROTTLING" ? constants_1.THROTTLING_RETRY_DELAY_BASE : constants_1.DEFAULT_RETRY_DELAY_BASE); - const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); - const retryDelay = errorInfo.retryAfterHint - ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) - : delayFromErrorType; - const capacityCost = this.getCapacityCost(errorType); - this.capacity -= capacityCost; - return (0, defaultRetryToken_1.createDefaultRetryToken)({ - retryDelay, - retryCount: token.getRetryCount() + 1, - retryCost: capacityCost, - }); - } - throw new Error("No retry token available"); - } - recordSuccess(token) { - var _a; - this.capacity = Math.max(constants_1.INITIAL_RETRY_TOKENS, this.capacity + ((_a = token.getRetryCost()) !== null && _a !== void 0 ? _a : constants_1.NO_RETRY_INCREMENT)); - } - getCapacity() { - return this.capacity; - } - async getMaxAttempts() { - try { - return await this.maxAttemptsProvider(); - } - catch (error) { - console.warn(`Max attempts provider could not resolve. Using default of ${config_1.DEFAULT_MAX_ATTEMPTS}`); - return config_1.DEFAULT_MAX_ATTEMPTS; - } - } - shouldRetry(tokenToRenew, errorInfo, maxAttempts) { - const attempts = tokenToRenew.getRetryCount() + 1; - return (attempts < maxAttempts && - this.capacity >= this.getCapacityCost(errorInfo.errorType) && - this.isRetryableError(errorInfo.errorType)); - } - getCapacityCost(errorType) { - return errorType === "TRANSIENT" ? constants_1.TIMEOUT_RETRY_COST : constants_1.RETRY_COST; - } - isRetryableError(errorType) { - return errorType === "THROTTLING" || errorType === "TRANSIENT"; - } -} -exports.StandardRetryStrategy = StandardRetryStrategy; +module.exports = function (jwt, options) { + options = options || {}; + var decoded = jws.decode(jwt, options); + if (!decoded) { return null; } + var payload = decoded.payload; + + //try parse the payload + if(typeof payload === 'string') { + try { + var obj = JSON.parse(payload); + if(obj !== null && typeof obj === 'object') { + payload = obj; + } + } catch (e) { } + } + + //return header if `complete` option is enabled. header includes claims + //such as `kid` and `alg` used to select the key within a JWKS needed to + //verify the signature + if (options.complete === true) { + return { + header: decoded.header, + payload: payload, + signature: decoded.signature + }; + } + return payload; +}; /***/ }), -/***/ 93435: -/***/ ((__unused_webpack_module, exports) => { +/***/ 77486: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +module.exports = { + verify: __nccwpck_require__(12327), + sign: __nccwpck_require__(82022), + JsonWebTokenError: __nccwpck_require__(405), + NotBeforeError: __nccwpck_require__(4383), + TokenExpiredError: __nccwpck_require__(46637), +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DEFAULT_RETRY_MODE = exports.DEFAULT_MAX_ATTEMPTS = exports.RETRY_MODES = void 0; -var RETRY_MODES; -(function (RETRY_MODES) { - RETRY_MODES["STANDARD"] = "standard"; - RETRY_MODES["ADAPTIVE"] = "adaptive"; -})(RETRY_MODES = exports.RETRY_MODES || (exports.RETRY_MODES = {})); -exports.DEFAULT_MAX_ATTEMPTS = 3; -exports.DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; +Object.defineProperty(module.exports, "decode", ({ + enumerable: false, + value: __nccwpck_require__(53359), +})); /***/ }), -/***/ 66302: -/***/ ((__unused_webpack_module, exports) => { +/***/ 405: +/***/ ((module) => { -"use strict"; +var JsonWebTokenError = function (message, error) { + Error.call(this, message); + if(Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = 'JsonWebTokenError'; + this.message = message; + if (error) this.inner = error; +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.REQUEST_HEADER = exports.INVOCATION_ID_HEADER = exports.NO_RETRY_INCREMENT = exports.TIMEOUT_RETRY_COST = exports.RETRY_COST = exports.INITIAL_RETRY_TOKENS = exports.THROTTLING_RETRY_DELAY_BASE = exports.MAXIMUM_RETRY_DELAY = exports.DEFAULT_RETRY_DELAY_BASE = void 0; -exports.DEFAULT_RETRY_DELAY_BASE = 100; -exports.MAXIMUM_RETRY_DELAY = 20 * 1000; -exports.THROTTLING_RETRY_DELAY_BASE = 500; -exports.INITIAL_RETRY_TOKENS = 500; -exports.RETRY_COST = 5; -exports.TIMEOUT_RETRY_COST = 10; -exports.NO_RETRY_INCREMENT = 1; -exports.INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; -exports.REQUEST_HEADER = "amz-sdk-request"; +JsonWebTokenError.prototype = Object.create(Error.prototype); +JsonWebTokenError.prototype.constructor = JsonWebTokenError; + +module.exports = JsonWebTokenError; /***/ }), -/***/ 21337: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 4383: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var JsonWebTokenError = __nccwpck_require__(405); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getDefaultRetryBackoffStrategy = void 0; -const constants_1 = __nccwpck_require__(66302); -const getDefaultRetryBackoffStrategy = () => { - let delayBase = constants_1.DEFAULT_RETRY_DELAY_BASE; - const computeNextBackoffDelay = (attempts) => { - return Math.floor(Math.min(constants_1.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); - }; - const setDelayBase = (delay) => { - delayBase = delay; - }; - return { - computeNextBackoffDelay, - setDelayBase, - }; +var NotBeforeError = function (message, date) { + JsonWebTokenError.call(this, message); + this.name = 'NotBeforeError'; + this.date = date; }; -exports.getDefaultRetryBackoffStrategy = getDefaultRetryBackoffStrategy; +NotBeforeError.prototype = Object.create(JsonWebTokenError.prototype); + +NotBeforeError.prototype.constructor = NotBeforeError; + +module.exports = NotBeforeError; /***/ }), -/***/ 1127: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 46637: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var JsonWebTokenError = __nccwpck_require__(405); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createDefaultRetryToken = void 0; -const constants_1 = __nccwpck_require__(66302); -const createDefaultRetryToken = ({ retryDelay, retryCount, retryCost, }) => { - const getRetryCount = () => retryCount; - const getRetryDelay = () => Math.min(constants_1.MAXIMUM_RETRY_DELAY, retryDelay); - const getRetryCost = () => retryCost; - return { - getRetryCount, - getRetryDelay, - getRetryCost, - }; +var TokenExpiredError = function (message, expiredAt) { + JsonWebTokenError.call(this, message); + this.name = 'TokenExpiredError'; + this.expiredAt = expiredAt; }; -exports.createDefaultRetryToken = createDefaultRetryToken; +TokenExpiredError.prototype = Object.create(JsonWebTokenError.prototype); + +TokenExpiredError.prototype.constructor = TokenExpiredError; + +module.exports = TokenExpiredError; /***/ }), -/***/ 84902: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7622: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +const semver = __nccwpck_require__(11383); -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(65053), exports); -tslib_1.__exportStar(__nccwpck_require__(25689), exports); -tslib_1.__exportStar(__nccwpck_require__(22234), exports); -tslib_1.__exportStar(__nccwpck_require__(48361), exports); -tslib_1.__exportStar(__nccwpck_require__(93435), exports); -tslib_1.__exportStar(__nccwpck_require__(66302), exports); -tslib_1.__exportStar(__nccwpck_require__(75427), exports); +module.exports = semver.satisfies(process.version, '>=15.7.0'); /***/ }), -/***/ 75427: -/***/ ((__unused_webpack_module, exports) => { +/***/ 59085: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var semver = __nccwpck_require__(11383); -Object.defineProperty(exports, "__esModule", ({ value: true })); +module.exports = semver.satisfies(process.version, '^6.12.0 || >=8.0.0'); /***/ }), -/***/ 22094: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 45170: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +const semver = __nccwpck_require__(11383); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Uint8ArrayBlobAdapter = void 0; -const transforms_1 = __nccwpck_require__(82098); -class Uint8ArrayBlobAdapter extends Uint8Array { - static fromString(source, encoding = "utf-8") { - switch (typeof source) { - case "string": - return (0, transforms_1.transformFromString)(source, encoding); - default: - throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); - } - } - static mutate(source) { - Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype); - return source; - } - transformToString(encoding = "utf-8") { - return (0, transforms_1.transformToString)(this, encoding); - } -} -exports.Uint8ArrayBlobAdapter = Uint8ArrayBlobAdapter; +module.exports = semver.satisfies(process.version, '>=16.9.0'); /***/ }), -/***/ 82098: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 20910: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var ms = __nccwpck_require__(80900); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.transformFromString = exports.transformToString = void 0; -const util_base64_1 = __nccwpck_require__(75600); -const util_utf8_1 = __nccwpck_require__(41895); -const Uint8ArrayBlobAdapter_1 = __nccwpck_require__(22094); -function transformToString(payload, encoding = "utf-8") { - if (encoding === "base64") { - return (0, util_base64_1.toBase64)(payload); - } - return (0, util_utf8_1.toUtf8)(payload); -} -exports.transformToString = transformToString; -function transformFromString(str, encoding) { - if (encoding === "base64") { - return Uint8ArrayBlobAdapter_1.Uint8ArrayBlobAdapter.mutate((0, util_base64_1.fromBase64)(str)); +module.exports = function (time, iat) { + var timestamp = iat || Math.floor(Date.now() / 1000); + + if (typeof time === 'string') { + var milliseconds = ms(time); + if (typeof milliseconds === 'undefined') { + return; } - return Uint8ArrayBlobAdapter_1.Uint8ArrayBlobAdapter.mutate((0, util_utf8_1.fromUtf8)(str)); -} -exports.transformFromString = transformFromString; + return Math.floor(timestamp + milliseconds / 1000); + } else if (typeof time === 'number') { + return timestamp + time; + } else { + return; + } +}; /***/ }), -/***/ 23636: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 47596: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +const ASYMMETRIC_KEY_DETAILS_SUPPORTED = __nccwpck_require__(7622); +const RSA_PSS_KEY_DETAILS_SUPPORTED = __nccwpck_require__(45170); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getAwsChunkedEncodingStream = void 0; -const stream_1 = __nccwpck_require__(12781); -const getAwsChunkedEncodingStream = (readableStream, options) => { - const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; - const checksumRequired = base64Encoder !== undefined && - checksumAlgorithmFn !== undefined && - checksumLocationName !== undefined && - streamHasher !== undefined; - const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; - const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } }); - readableStream.on("data", (data) => { - const length = bodyLengthChecker(data) || 0; - awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); - awsChunkedEncodingStream.push(data); - awsChunkedEncodingStream.push("\r\n"); - }); - readableStream.on("end", async () => { - awsChunkedEncodingStream.push(`0\r\n`); - if (checksumRequired) { - const checksum = base64Encoder(await digest); - awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); - awsChunkedEncodingStream.push(`\r\n`); - } - awsChunkedEncodingStream.push(null); - }); - return awsChunkedEncodingStream; +const allowedAlgorithmsForKeys = { + 'ec': ['ES256', 'ES384', 'ES512'], + 'rsa': ['RS256', 'PS256', 'RS384', 'PS384', 'RS512', 'PS512'], + 'rsa-pss': ['PS256', 'PS384', 'PS512'] }; -exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; +const allowedCurves = { + ES256: 'prime256v1', + ES384: 'secp384r1', + ES512: 'secp521r1', +}; -/***/ }), +module.exports = function(algorithm, key) { + if (!algorithm || !key) return; -/***/ 96607: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + const keyType = key.asymmetricKeyType; + if (!keyType) return; -"use strict"; + const allowedAlgorithms = allowedAlgorithmsForKeys[keyType]; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(22094), exports); -tslib_1.__exportStar(__nccwpck_require__(23636), exports); -tslib_1.__exportStar(__nccwpck_require__(4515), exports); + if (!allowedAlgorithms) { + throw new Error(`Unknown key type "${keyType}".`); + } + if (!allowedAlgorithms.includes(algorithm)) { + throw new Error(`"alg" parameter for "${keyType}" key type must be one of: ${allowedAlgorithms.join(', ')}.`) + } -/***/ }), + /* + * Ignore the next block from test coverage because it gets executed + * conditionally depending on the Node version. Not ignoring it would + * prevent us from reaching the target % of coverage for versions of + * Node under 15.7.0. + */ + /* istanbul ignore next */ + if (ASYMMETRIC_KEY_DETAILS_SUPPORTED) { + switch (keyType) { + case 'ec': + const keyCurve = key.asymmetricKeyDetails.namedCurve; + const allowedCurve = allowedCurves[algorithm]; -/***/ 4515: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (keyCurve !== allowedCurve) { + throw new Error(`"alg" parameter "${algorithm}" requires curve "${allowedCurve}".`); + } + break; -"use strict"; + case 'rsa-pss': + if (RSA_PSS_KEY_DETAILS_SUPPORTED) { + const length = parseInt(algorithm.slice(-3), 10); + const { hashAlgorithm, mgf1HashAlgorithm, saltLength } = key.asymmetricKeyDetails; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.sdkStreamMixin = void 0; -const node_http_handler_1 = __nccwpck_require__(20258); -const util_buffer_from_1 = __nccwpck_require__(31381); -const stream_1 = __nccwpck_require__(12781); -const util_1 = __nccwpck_require__(73837); -const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; -const sdkStreamMixin = (stream) => { - var _a, _b; - if (!(stream instanceof stream_1.Readable)) { - const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; - throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); - } - let transformed = false; - const transformToByteArray = async () => { - if (transformed) { - throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + if (hashAlgorithm !== `sha${length}` || mgf1HashAlgorithm !== hashAlgorithm) { + throw new Error(`Invalid key for this operation, its RSA-PSS parameters do not meet the requirements of "alg" ${algorithm}.`); } - transformed = true; - return await (0, node_http_handler_1.streamCollector)(stream); - }; - return Object.assign(stream, { - transformToByteArray, - transformToString: async (encoding) => { - const buf = await transformToByteArray(); - if (encoding === undefined || Buffer.isEncoding(encoding)) { - return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); - } - else { - const decoder = new util_1.TextDecoder(encoding); - return decoder.decode(buf); - } - }, - transformToWebStream: () => { - if (transformed) { - throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); - } - if (stream.readableFlowing !== null) { - throw new Error("The stream has been consumed by other callbacks."); - } - if (typeof stream_1.Readable.toWeb !== "function") { - throw new Error("Readable.toWeb() is not supported. Please make sure you are using Node.js >= 17.0.0, or polyfill is available."); - } - transformed = true; - return stream_1.Readable.toWeb(stream); - }, - }); -}; -exports.sdkStreamMixin = sdkStreamMixin; + + if (saltLength !== undefined && saltLength > length >> 3) { + throw new Error(`Invalid key for this operation, its RSA-PSS parameter saltLength does not meet the requirements of "alg" ${algorithm}.`) + } + } + break; + } + } +} /***/ }), -/***/ 26174: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 92321: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var bufferEqual = __nccwpck_require__(9239); +var Buffer = (__nccwpck_require__(21867).Buffer); +var crypto = __nccwpck_require__(6113); +var formatEcdsa = __nccwpck_require__(11728); +var util = __nccwpck_require__(73837); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.escapeUriPath = void 0; -const escape_uri_1 = __nccwpck_require__(60010); -const escapeUriPath = (uri) => uri.split("/").map(escape_uri_1.escapeUri).join("/"); -exports.escapeUriPath = escapeUriPath; +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} -/***/ }), +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } -/***/ 60010: -/***/ ((__unused_webpack_module, exports) => { + if (typeof key === 'string') { + return; + } -"use strict"; + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.escapeUri = void 0; -const escapeUri = (uri) => encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode); -exports.escapeUri = escapeUri; -const hexEncode = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`; + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } -/***/ }), + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } -/***/ 54197: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; -"use strict"; +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(60010), exports); -tslib_1.__exportStar(__nccwpck_require__(26174), exports); + if (typeof key === 'string') { + return; + } + if (typeof key === 'object') { + return; + } -/***/ }), + throw typeError(MSG_INVALID_SIGNER_KEY); +}; -/***/ 45917: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } -"use strict"; + if (typeof key === 'string') { + return key; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromUtf8 = void 0; -const util_buffer_from_1 = __nccwpck_require__(31381); -const fromUtf8 = (input) => { - const buf = (0, util_buffer_from_1.fromString)(input, "utf8"); - return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); -}; -exports.fromUtf8 = fromUtf8; + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } -/***/ }), + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } -/***/ 41895: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} -"use strict"; +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(45917), exports); -tslib_1.__exportStar(__nccwpck_require__(95470), exports); -tslib_1.__exportStar(__nccwpck_require__(99960), exports); +function toBase64(base64url) { + base64url = base64url.toString(); + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } -/***/ }), + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} -/***/ 95470: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} -"use strict"; +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toUint8Array = void 0; -const fromUtf8_1 = __nccwpck_require__(45917); -const toUint8Array = (data) => { - if (typeof data === "string") { - return (0, fromUtf8_1.fromUtf8)(data); - } - if (ArrayBuffer.isView(data)) { - return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); - } - return new Uint8Array(data); -}; -exports.toUint8Array = toUint8Array; +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} -/***/ }), +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} -/***/ 99960: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} -"use strict"; +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toUtf8 = void 0; -const util_buffer_from_1 = __nccwpck_require__(31381); -const toUtf8 = (input) => (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); -exports.toUtf8 = toUtf8; +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/i); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; /***/ }), -/***/ 76991: +/***/ 22597: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; +/*global exports*/ +var SignStream = __nccwpck_require__(35070); +var VerifyStream = __nccwpck_require__(63974); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createWaiter = void 0; -const poller_1 = __nccwpck_require__(39033); -const utils_1 = __nccwpck_require__(26000); -const waiter_1 = __nccwpck_require__(79089); -const abortTimeout = async (abortSignal) => { - return new Promise((resolve) => { - abortSignal.onabort = () => resolve({ state: waiter_1.WaiterState.ABORTED }); - }); +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); }; -const createWaiter = async (options, input, acceptorChecks) => { - const params = { - ...waiter_1.waiterServiceDefaults, - ...options, - }; - (0, utils_1.validateWaiterOptions)(params); - const exitConditions = [(0, poller_1.runPolling)(params, input, acceptorChecks)]; - if (options.abortController) { - exitConditions.push(abortTimeout(options.abortController.signal)); - } - if (options.abortSignal) { - exitConditions.push(abortTimeout(options.abortSignal)); - } - return Promise.race(exitConditions); +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); }; -exports.createWaiter = createWaiter; /***/ }), -/***/ 78011: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 60704: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +/*global module, process*/ +var Buffer = (__nccwpck_require__(21867).Buffer); +var Stream = __nccwpck_require__(12781); +var util = __nccwpck_require__(73837); -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(76991), exports); -tslib_1.__exportStar(__nccwpck_require__(79089), exports); +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } -/***/ }), + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } -/***/ 39033: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } -"use strict"; + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.runPolling = void 0; -const sleep_1 = __nccwpck_require__(62380); -const waiter_1 = __nccwpck_require__(79089); -const exponentialBackoffWithJitter = (minDelay, maxDelay, attemptCeiling, attempt) => { - if (attempt > attemptCeiling) - return maxDelay; - const delay = minDelay * 2 ** (attempt - 1); - return randomInRange(minDelay, delay); +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); }; -const randomInRange = (min, max) => min + Math.random() * (max - min); -const runPolling = async ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }, input, acceptorChecks) => { - var _a; - const { state, reason } = await acceptorChecks(client, input); - if (state !== waiter_1.WaiterState.RETRY) { - return { state, reason }; - } - let currentAttempt = 1; - const waitUntil = Date.now() + maxWaitTime * 1000; - const attemptCeiling = Math.log(maxDelay / minDelay) / Math.log(2) + 1; - while (true) { - if (((_a = abortController === null || abortController === void 0 ? void 0 : abortController.signal) === null || _a === void 0 ? void 0 : _a.aborted) || (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted)) { - return { state: waiter_1.WaiterState.ABORTED }; - } - const delay = exponentialBackoffWithJitter(minDelay, maxDelay, attemptCeiling, currentAttempt); - if (Date.now() + delay * 1000 > waitUntil) { - return { state: waiter_1.WaiterState.TIMEOUT }; - } - await (0, sleep_1.sleep)(delay); - const { state, reason } = await acceptorChecks(client, input); - if (state !== waiter_1.WaiterState.RETRY) { - return { state, reason }; - } - currentAttempt += 1; - } + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; }; -exports.runPolling = runPolling; + +module.exports = DataStream; /***/ }), -/***/ 26000: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 35070: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +/*global module*/ +var Buffer = (__nccwpck_require__(21867).Buffer); +var DataStream = __nccwpck_require__(60704); +var jwa = __nccwpck_require__(92321); +var Stream = __nccwpck_require__(12781); +var toString = __nccwpck_require__(56206); +var util = __nccwpck_require__(73837); -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tslib_1 = __nccwpck_require__(4351); -tslib_1.__exportStar(__nccwpck_require__(62380), exports); -tslib_1.__exportStar(__nccwpck_require__(6594), exports); +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} -/***/ }), +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} -/***/ 62380: -/***/ ((__unused_webpack_module, exports) => { +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); -"use strict"; + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.sleep = void 0; -const sleep = (seconds) => { - return new Promise((resolve) => setTimeout(resolve, seconds * 1000)); +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } }; -exports.sleep = sleep; - - -/***/ }), - -/***/ 6594: -/***/ ((__unused_webpack_module, exports) => { -"use strict"; +SignStream.sign = jwsSign; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validateWaiterOptions = void 0; -const validateWaiterOptions = (options) => { - if (options.maxWaitTime < 1) { - throw new Error(`WaiterConfiguration.maxWaitTime must be greater than 0`); - } - else if (options.minDelay < 1) { - throw new Error(`WaiterConfiguration.minDelay must be greater than 0`); - } - else if (options.maxDelay < 1) { - throw new Error(`WaiterConfiguration.maxDelay must be greater than 0`); - } - else if (options.maxWaitTime <= options.minDelay) { - throw new Error(`WaiterConfiguration.maxWaitTime [${options.maxWaitTime}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); - } - else if (options.maxDelay < options.minDelay) { - throw new Error(`WaiterConfiguration.maxDelay [${options.maxDelay}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); - } -}; -exports.validateWaiterOptions = validateWaiterOptions; +module.exports = SignStream; /***/ }), -/***/ 79089: -/***/ ((__unused_webpack_module, exports) => { +/***/ 56206: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +/*global module*/ +var Buffer = (__nccwpck_require__(14300).Buffer); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.checkExceptions = exports.WaiterState = exports.waiterServiceDefaults = void 0; -exports.waiterServiceDefaults = { - minDelay: 2, - maxDelay: 120, -}; -var WaiterState; -(function (WaiterState) { - WaiterState["ABORTED"] = "ABORTED"; - WaiterState["FAILURE"] = "FAILURE"; - WaiterState["SUCCESS"] = "SUCCESS"; - WaiterState["RETRY"] = "RETRY"; - WaiterState["TIMEOUT"] = "TIMEOUT"; -})(WaiterState = exports.WaiterState || (exports.WaiterState = {})); -const checkExceptions = (result) => { - if (result.state === WaiterState.ABORTED) { - const abortError = new Error(`${JSON.stringify({ - ...result, - reason: "Request was aborted", - })}`); - abortError.name = "AbortError"; - throw abortError; - } - else if (result.state === WaiterState.TIMEOUT) { - const timeoutError = new Error(`${JSON.stringify({ - ...result, - reason: "Waiter has timed out", - })}`); - timeoutError.name = "TimeoutError"; - throw timeoutError; - } - else if (result.state !== WaiterState.SUCCESS) { - throw new Error(`${JSON.stringify({ result })}`); - } - return result; +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); }; -exports.checkExceptions = checkExceptions; /***/ }), -/***/ 81040: -/***/ ((__unused_webpack_module, exports) => { +/***/ 63974: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +/*global module*/ +var Buffer = (__nccwpck_require__(21867).Buffer); +var DataStream = __nccwpck_require__(60704); +var jwa = __nccwpck_require__(92321); +var Stream = __nccwpck_require__(12781); +var toString = __nccwpck_require__(56206); +var util = __nccwpck_require__(73837); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; -Object.defineProperty(exports, "__esModule", ({ value: true })); -function once(emitter, name, { signal } = {}) { - return new Promise((resolve, reject) => { - function cleanup() { - signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); - emitter.removeListener(name, onEvent); - emitter.removeListener('error', onError); - } - function onEvent(...args) { - cleanup(); - resolve(args); - } - function onError(err) { - cleanup(); - reject(err); - } - signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); - emitter.on(name, onEvent); - emitter.on('error', onError); - }); +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; } -exports["default"] = once; -//# sourceMappingURL=index.js.map -/***/ }), +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} -/***/ 49690: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} -"use strict"; +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const events_1 = __nccwpck_require__(82361); -const debug_1 = __importDefault(__nccwpck_require__(38237)); -const promisify_1 = __importDefault(__nccwpck_require__(66570)); -const debug = debug_1.default('agent-base'); -function isAgent(v) { - return Boolean(v) && typeof v.addRequest === 'function'; +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; } -function isSecureEndpoint() { - const { stack } = new Error(); - if (typeof stack !== 'string') - return false; - return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); } -function createAgent(callback, opts) { - return new createAgent.Agent(callback, opts); + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); } -(function (createAgent) { - /** - * Base `http.Agent` implementation. - * No pooling/keep-alive is implemented by default. - * - * @param {Function} callback - * @api public - */ - class Agent extends events_1.EventEmitter { - constructor(callback, _opts) { - super(); - let opts = _opts; - if (typeof callback === 'function') { - this.callback = callback; - } - else if (callback) { - opts = callback; - } - // Timeout for the socket to be returned from the callback - this.timeout = null; - if (opts && typeof opts.timeout === 'number') { - this.timeout = opts.timeout; - } - // These aren't actually used by `agent-base`, but are required - // for the TypeScript definition files in `@types/node` :/ - this.maxFreeSockets = 1; - this.maxSockets = 1; - this.maxTotalSockets = Infinity; - this.sockets = {}; - this.freeSockets = {}; - this.requests = {}; - this.options = {}; - } - get defaultPort() { - if (typeof this.explicitDefaultPort === 'number') { - return this.explicitDefaultPort; - } - return isSecureEndpoint() ? 443 : 80; - } - set defaultPort(v) { - this.explicitDefaultPort = v; - } - get protocol() { - if (typeof this.explicitProtocol === 'string') { - return this.explicitProtocol; - } - return isSecureEndpoint() ? 'https:' : 'http:'; - } - set protocol(v) { - this.explicitProtocol = v; - } - callback(req, opts, fn) { - throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); - } - /** - * Called by node-core's "_http_client.js" module when creating - * a new HTTP request with this Agent instance. - * - * @api public - */ - addRequest(req, _opts) { - const opts = Object.assign({}, _opts); - if (typeof opts.secureEndpoint !== 'boolean') { - opts.secureEndpoint = isSecureEndpoint(); - } - if (opts.host == null) { - opts.host = 'localhost'; - } - if (opts.port == null) { - opts.port = opts.secureEndpoint ? 443 : 80; - } - if (opts.protocol == null) { - opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; - } - if (opts.host && opts.path) { - // If both a `host` and `path` are specified then it's most - // likely the result of a `url.parse()` call... we need to - // remove the `path` portion so that `net.connect()` doesn't - // attempt to open that as a unix socket file. - delete opts.path; - } - delete opts.agent; - delete opts.hostname; - delete opts._defaultAgent; - delete opts.defaultPort; - delete opts.createConnection; - // Hint to use "Connection: close" - // XXX: non-documented `http` module API :( - req._last = true; - req.shouldKeepAlive = false; - let timedOut = false; - let timeoutId = null; - const timeoutMs = opts.timeout || this.timeout; - const onerror = (err) => { - if (req._hadError) - return; - req.emit('error', err); - // For Safety. Some additional errors might fire later on - // and we need to make sure we don't double-fire the error event. - req._hadError = true; - }; - const ontimeout = () => { - timeoutId = null; - timedOut = true; - const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); - err.code = 'ETIMEOUT'; - onerror(err); - }; - const callbackError = (err) => { - if (timedOut) - return; - if (timeoutId !== null) { - clearTimeout(timeoutId); - timeoutId = null; - } - onerror(err); - }; - const onsocket = (socket) => { - if (timedOut) - return; - if (timeoutId != null) { - clearTimeout(timeoutId); - timeoutId = null; - } - if (isAgent(socket)) { - // `socket` is actually an `http.Agent` instance, so - // relinquish responsibility for this `req` to the Agent - // from here on - debug('Callback returned another Agent instance %o', socket.constructor.name); - socket.addRequest(req, opts); - return; - } - if (socket) { - socket.once('free', () => { - this.freeSocket(socket, opts); - }); - req.onSocket(socket); - return; - } - const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); - onerror(err); - }; - if (typeof this.callback !== 'function') { - onerror(new Error('`callback` is not defined')); - return; - } - if (!this.promisifiedCallback) { - if (this.callback.length >= 3) { - debug('Converting legacy callback function to promise'); - this.promisifiedCallback = promisify_1.default(this.callback); - } - else { - this.promisifiedCallback = this.callback; - } - } - if (typeof timeoutMs === 'number' && timeoutMs > 0) { - timeoutId = setTimeout(ontimeout, timeoutMs); - } - if ('port' in opts && typeof opts.port !== 'number') { - opts.port = Number(opts.port); - } - try { - debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); - Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); - } - catch (err) { - Promise.reject(err).catch(callbackError); - } - } - freeSocket(socket, opts) { - debug('Freeing socket %o %o', socket.constructor.name, opts); - socket.destroy(); - } - destroy() { - debug('Destroying agent %o', this.constructor.name); - } - } - createAgent.Agent = Agent; - // So that `instanceof` works correctly - createAgent.prototype = createAgent.Agent.prototype; -})(createAgent || (createAgent = {})); -module.exports = createAgent; -//# sourceMappingURL=index.js.map -/***/ }), +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} -/***/ 66570: -/***/ ((__unused_webpack_module, exports) => { +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); -"use strict"; + if (!isValidJws(jwsSig)) + return null; -Object.defineProperty(exports, "__esModule", ({ value: true })); -function promisify(fn) { - return function (req, opts) { - return new Promise((resolve, reject) => { - fn.call(this, req, opts, (err, rtn) => { - if (err) { - reject(err); - } - else { - resolve(rtn); - } - }); - }); - }; -} -exports["default"] = promisify; -//# sourceMappingURL=promisify.js.map + var header = headerFromJWS(jwsSig); -/***/ }), + if (!header) + return null; -/***/ 14812: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); -module.exports = -{ - parallel : __nccwpck_require__(8210), - serial : __nccwpck_require__(50445), - serialOrdered : __nccwpck_require__(3578) + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } }; +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; -/***/ }), +module.exports = VerifyStream; -/***/ 1700: -/***/ ((module) => { -// API -module.exports = abort; +/***/ }), -/** - * Aborts leftover active jobs - * - * @param {object} state - current state object - */ -function abort(state) -{ - Object.keys(state.jobs).forEach(clean.bind(state)); +/***/ 82022: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const timespan = __nccwpck_require__(20910); +const PS_SUPPORTED = __nccwpck_require__(59085); +const validateAsymmetricKey = __nccwpck_require__(47596); +const jws = __nccwpck_require__(22597); +const {includes, isBoolean, isInteger, isNumber, isPlainObject, isString, once} = __nccwpck_require__(90250) +const { KeyObject, createSecretKey, createPrivateKey } = __nccwpck_require__(6113) - // reset leftover jobs - state.jobs = {}; +const SUPPORTED_ALGS = ['RS256', 'RS384', 'RS512', 'ES256', 'ES384', 'ES512', 'HS256', 'HS384', 'HS512', 'none']; +if (PS_SUPPORTED) { + SUPPORTED_ALGS.splice(3, 0, 'PS256', 'PS384', 'PS512'); } -/** - * Cleans up leftover job by invoking abort function for the provided job id - * - * @this state - * @param {string|number} key - job id to abort - */ -function clean(key) -{ - if (typeof this.jobs[key] == 'function') - { - this.jobs[key](); +const sign_options_schema = { + expiresIn: { isValid: function(value) { return isInteger(value) || (isString(value) && value); }, message: '"expiresIn" should be a number of seconds or string representing a timespan' }, + notBefore: { isValid: function(value) { return isInteger(value) || (isString(value) && value); }, message: '"notBefore" should be a number of seconds or string representing a timespan' }, + audience: { isValid: function(value) { return isString(value) || Array.isArray(value); }, message: '"audience" must be a string or array' }, + algorithm: { isValid: includes.bind(null, SUPPORTED_ALGS), message: '"algorithm" must be a valid string enum value' }, + header: { isValid: isPlainObject, message: '"header" must be an object' }, + encoding: { isValid: isString, message: '"encoding" must be a string' }, + issuer: { isValid: isString, message: '"issuer" must be a string' }, + subject: { isValid: isString, message: '"subject" must be a string' }, + jwtid: { isValid: isString, message: '"jwtid" must be a string' }, + noTimestamp: { isValid: isBoolean, message: '"noTimestamp" must be a boolean' }, + keyid: { isValid: isString, message: '"keyid" must be a string' }, + mutatePayload: { isValid: isBoolean, message: '"mutatePayload" must be a boolean' }, + allowInsecureKeySizes: { isValid: isBoolean, message: '"allowInsecureKeySizes" must be a boolean'}, + allowInvalidAsymmetricKeyTypes: { isValid: isBoolean, message: '"allowInvalidAsymmetricKeyTypes" must be a boolean'} +}; + +const registered_claims_schema = { + iat: { isValid: isNumber, message: '"iat" should be a number of seconds' }, + exp: { isValid: isNumber, message: '"exp" should be a number of seconds' }, + nbf: { isValid: isNumber, message: '"nbf" should be a number of seconds' } +}; + +function validate(schema, allowUnknown, object, parameterName) { + if (!isPlainObject(object)) { + throw new Error('Expected "' + parameterName + '" to be a plain object.'); } + Object.keys(object) + .forEach(function(key) { + const validator = schema[key]; + if (!validator) { + if (!allowUnknown) { + throw new Error('"' + key + '" is not allowed in "' + parameterName + '"'); + } + return; + } + if (!validator.isValid(object[key])) { + throw new Error(validator.message); + } + }); } +function validateOptions(options) { + return validate(sign_options_schema, false, options, 'options'); +} -/***/ }), +function validatePayload(payload) { + return validate(registered_claims_schema, true, payload, 'payload'); +} -/***/ 72794: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +const options_to_payload = { + 'audience': 'aud', + 'issuer': 'iss', + 'subject': 'sub', + 'jwtid': 'jti' +}; -var defer = __nccwpck_require__(15295); +const options_for_objects = [ + 'expiresIn', + 'notBefore', + 'noTimestamp', + 'audience', + 'issuer', + 'subject', + 'jwtid', +]; -// API -module.exports = async; +module.exports = function (payload, secretOrPrivateKey, options, callback) { + if (typeof options === 'function') { + callback = options; + options = {}; + } else { + options = options || {}; + } -/** - * Runs provided callback asynchronously - * even if callback itself is not - * - * @param {function} callback - callback to invoke - * @returns {function} - augmented callback - */ -function async(callback) -{ - var isAsync = false; + const isObjectPayload = typeof payload === 'object' && + !Buffer.isBuffer(payload); - // check if async happened - defer(function() { isAsync = true; }); + const header = Object.assign({ + alg: options.algorithm || 'HS256', + typ: isObjectPayload ? 'JWT' : undefined, + kid: options.keyid + }, options.header); - return function async_callback(err, result) - { - if (isAsync) - { - callback(err, result); - } - else - { - defer(function nextTick_callback() - { - callback(err, result); - }); + function failure(err) { + if (callback) { + return callback(err); } - }; -} - + throw err; + } -/***/ }), + if (!secretOrPrivateKey && options.algorithm !== 'none') { + return failure(new Error('secretOrPrivateKey must have a value')); + } -/***/ 15295: -/***/ ((module) => { + if (secretOrPrivateKey != null && !(secretOrPrivateKey instanceof KeyObject)) { + try { + secretOrPrivateKey = createPrivateKey(secretOrPrivateKey) + } catch (_) { + try { + secretOrPrivateKey = createSecretKey(typeof secretOrPrivateKey === 'string' ? Buffer.from(secretOrPrivateKey) : secretOrPrivateKey) + } catch (_) { + return failure(new Error('secretOrPrivateKey is not valid key material')); + } + } + } -module.exports = defer; + if (header.alg.startsWith('HS') && secretOrPrivateKey.type !== 'secret') { + return failure(new Error((`secretOrPrivateKey must be a symmetric key when using ${header.alg}`))) + } else if (/^(?:RS|PS|ES)/.test(header.alg)) { + if (secretOrPrivateKey.type !== 'private') { + return failure(new Error((`secretOrPrivateKey must be an asymmetric key when using ${header.alg}`))) + } + if (!options.allowInsecureKeySizes && + !header.alg.startsWith('ES') && + secretOrPrivateKey.asymmetricKeyDetails !== undefined && //KeyObject.asymmetricKeyDetails is supported in Node 15+ + secretOrPrivateKey.asymmetricKeyDetails.modulusLength < 2048) { + return failure(new Error(`secretOrPrivateKey has a minimum key size of 2048 bits for ${header.alg}`)); + } + } -/** - * Runs provided function on next iteration of the event loop - * - * @param {function} fn - function to run - */ -function defer(fn) -{ - var nextTick = typeof setImmediate == 'function' - ? setImmediate - : ( - typeof process == 'object' && typeof process.nextTick == 'function' - ? process.nextTick - : null - ); + if (typeof payload === 'undefined') { + return failure(new Error('payload is required')); + } else if (isObjectPayload) { + try { + validatePayload(payload); + } + catch (error) { + return failure(error); + } + if (!options.mutatePayload) { + payload = Object.assign({},payload); + } + } else { + const invalid_options = options_for_objects.filter(function (opt) { + return typeof options[opt] !== 'undefined'; + }); - if (nextTick) - { - nextTick(fn); - } - else - { - setTimeout(fn, 0); + if (invalid_options.length > 0) { + return failure(new Error('invalid ' + invalid_options.join(',') + ' option for ' + (typeof payload ) + ' payload')); + } } -} + if (typeof payload.exp !== 'undefined' && typeof options.expiresIn !== 'undefined') { + return failure(new Error('Bad "options.expiresIn" option the payload already has an "exp" property.')); + } -/***/ }), + if (typeof payload.nbf !== 'undefined' && typeof options.notBefore !== 'undefined') { + return failure(new Error('Bad "options.notBefore" option the payload already has an "nbf" property.')); + } -/***/ 9023: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + try { + validateOptions(options); + } + catch (error) { + return failure(error); + } -var async = __nccwpck_require__(72794) - , abort = __nccwpck_require__(1700) - ; + if (!options.allowInvalidAsymmetricKeyTypes) { + try { + validateAsymmetricKey(header.alg, secretOrPrivateKey); + } catch (error) { + return failure(error); + } + } -// API -module.exports = iterate; + const timestamp = payload.iat || Math.floor(Date.now() / 1000); -/** - * Iterates over each job object - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {object} state - current job status - * @param {function} callback - invoked when all elements processed - */ -function iterate(list, iterator, state, callback) -{ - // store current index - var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + if (options.noTimestamp) { + delete payload.iat; + } else if (isObjectPayload) { + payload.iat = timestamp; + } - state.jobs[key] = runJob(iterator, key, list[key], function(error, output) - { - // don't repeat yourself - // skip secondary callbacks - if (!(key in state.jobs)) - { - return; + if (typeof options.notBefore !== 'undefined') { + try { + payload.nbf = timespan(options.notBefore, timestamp); } + catch (err) { + return failure(err); + } + if (typeof payload.nbf === 'undefined') { + return failure(new Error('"notBefore" should be a number of seconds or string representing a timespan eg: "1d", "20h", 60')); + } + } - // clean up jobs - delete state.jobs[key]; - - if (error) - { - // don't process rest of the results - // stop still active jobs - // and reset the list - abort(state); + if (typeof options.expiresIn !== 'undefined' && typeof payload === 'object') { + try { + payload.exp = timespan(options.expiresIn, timestamp); } - else - { - state.results[key] = output; + catch (err) { + return failure(err); + } + if (typeof payload.exp === 'undefined') { + return failure(new Error('"expiresIn" should be a number of seconds or string representing a timespan eg: "1d", "20h", 60')); } + } - // return salvaged results - callback(error, state.results); + Object.keys(options_to_payload).forEach(function (key) { + const claim = options_to_payload[key]; + if (typeof options[key] !== 'undefined') { + if (typeof payload[claim] !== 'undefined') { + return failure(new Error('Bad "options.' + key + '" option. The payload already has an "' + claim + '" property.')); + } + payload[claim] = options[key]; + } }); -} -/** - * Runs iterator over provided job element - * - * @param {function} iterator - iterator to invoke - * @param {string|number} key - key/index of the element in the list of jobs - * @param {mixed} item - job description - * @param {function} callback - invoked after iterator is done with the job - * @returns {function|mixed} - job abort function or something else - */ -function runJob(iterator, key, item, callback) -{ - var aborter; + const encoding = options.encoding || 'utf8'; - // allow shortcut if iterator expects only two arguments - if (iterator.length == 2) - { - aborter = iterator(item, async(callback)); - } - // otherwise go with full three arguments - else - { - aborter = iterator(item, key, async(callback)); + if (typeof callback === 'function') { + callback = callback && once(callback); + + jws.createSign({ + header: header, + privateKey: secretOrPrivateKey, + payload: payload, + encoding: encoding + }).once('error', callback) + .once('done', function (signature) { + // TODO: Remove in favor of the modulus length check before signing once node 15+ is the minimum supported version + if(!options.allowInsecureKeySizes && /^(?:RS|PS)/.test(header.alg) && signature.length < 256) { + return callback(new Error(`secretOrPrivateKey has a minimum key size of 2048 bits for ${header.alg}`)) + } + callback(null, signature); + }); + } else { + let signature = jws.sign({header: header, payload: payload, secret: secretOrPrivateKey, encoding: encoding}); + // TODO: Remove in favor of the modulus length check before signing once node 15+ is the minimum supported version + if(!options.allowInsecureKeySizes && /^(?:RS|PS)/.test(header.alg) && signature.length < 256) { + throw new Error(`secretOrPrivateKey has a minimum key size of 2048 bits for ${header.alg}`) + } + return signature } +}; + - return aborter; +/***/ }), + +/***/ 12327: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const JsonWebTokenError = __nccwpck_require__(405); +const NotBeforeError = __nccwpck_require__(4383); +const TokenExpiredError = __nccwpck_require__(46637); +const decode = __nccwpck_require__(53359); +const timespan = __nccwpck_require__(20910); +const validateAsymmetricKey = __nccwpck_require__(47596); +const PS_SUPPORTED = __nccwpck_require__(59085); +const jws = __nccwpck_require__(22597); +const {KeyObject, createSecretKey, createPublicKey} = __nccwpck_require__(6113); + +const PUB_KEY_ALGS = ['RS256', 'RS384', 'RS512']; +const EC_KEY_ALGS = ['ES256', 'ES384', 'ES512']; +const RSA_KEY_ALGS = ['RS256', 'RS384', 'RS512']; +const HS_ALGS = ['HS256', 'HS384', 'HS512']; + +if (PS_SUPPORTED) { + PUB_KEY_ALGS.splice(PUB_KEY_ALGS.length, 0, 'PS256', 'PS384', 'PS512'); + RSA_KEY_ALGS.splice(RSA_KEY_ALGS.length, 0, 'PS256', 'PS384', 'PS512'); } +module.exports = function (jwtString, secretOrPublicKey, options, callback) { + if ((typeof options === 'function') && !callback) { + callback = options; + options = {}; + } -/***/ }), + if (!options) { + options = {}; + } -/***/ 42474: -/***/ ((module) => { + //clone this object since we are going to mutate it. + options = Object.assign({}, options); -// API -module.exports = state; + let done; -/** - * Creates initial state object - * for iteration over list - * - * @param {array|object} list - list to iterate over - * @param {function|null} sortMethod - function to use for keys sort, - * or `null` to keep them as is - * @returns {object} - initial state object - */ -function state(list, sortMethod) -{ - var isNamedList = !Array.isArray(list) - , initState = - { - index : 0, - keyedList: isNamedList || sortMethod ? Object.keys(list) : null, - jobs : {}, - results : isNamedList ? {} : [], - size : isNamedList ? Object.keys(list).length : list.length - } - ; + if (callback) { + done = callback; + } else { + done = function(err, data) { + if (err) throw err; + return data; + }; + } - if (sortMethod) - { - // sort array keys based on it's values - // sort object's keys just on own merit - initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) - { - return sortMethod(list[a], list[b]); - }); + if (options.clockTimestamp && typeof options.clockTimestamp !== 'number') { + return done(new JsonWebTokenError('clockTimestamp must be a number')); } - return initState; -} + if (options.nonce !== undefined && (typeof options.nonce !== 'string' || options.nonce.trim() === '')) { + return done(new JsonWebTokenError('nonce must be a non-empty string')); + } + if (options.allowInvalidAsymmetricKeyTypes !== undefined && typeof options.allowInvalidAsymmetricKeyTypes !== 'boolean') { + return done(new JsonWebTokenError('allowInvalidAsymmetricKeyTypes must be a boolean')); + } -/***/ }), + const clockTimestamp = options.clockTimestamp || Math.floor(Date.now() / 1000); -/***/ 37942: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!jwtString){ + return done(new JsonWebTokenError('jwt must be provided')); + } -var abort = __nccwpck_require__(1700) - , async = __nccwpck_require__(72794) - ; + if (typeof jwtString !== 'string') { + return done(new JsonWebTokenError('jwt must be a string')); + } -// API -module.exports = terminator; + const parts = jwtString.split('.'); -/** - * Terminates jobs in the attached state context - * - * @this AsyncKitState# - * @param {function} callback - final callback to invoke after termination - */ -function terminator(callback) -{ - if (!Object.keys(this.jobs).length) - { - return; + if (parts.length !== 3){ + return done(new JsonWebTokenError('jwt malformed')); } - // fast forward iteration index - this.index = this.size; + let decodedToken; - // abort jobs - abort(this); + try { + decodedToken = decode(jwtString, { complete: true }); + } catch(err) { + return done(err); + } - // send back results we have so far - async(callback)(null, this.results); -} + if (!decodedToken) { + return done(new JsonWebTokenError('invalid token')); + } + const header = decodedToken.header; + let getSecret; -/***/ }), + if(typeof secretOrPublicKey === 'function') { + if(!callback) { + return done(new JsonWebTokenError('verify must be called asynchronous if secret or public key is provided as a callback')); + } -/***/ 8210: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + getSecret = secretOrPublicKey; + } + else { + getSecret = function(header, secretCallback) { + return secretCallback(null, secretOrPublicKey); + }; + } -var iterate = __nccwpck_require__(9023) - , initState = __nccwpck_require__(42474) - , terminator = __nccwpck_require__(37942) - ; + return getSecret(header, function(err, secretOrPublicKey) { + if(err) { + return done(new JsonWebTokenError('error in secret or public key callback: ' + err.message)); + } -// Public API -module.exports = parallel; + const hasSignature = parts[2].trim() !== ''; -/** - * Runs iterator over provided array elements in parallel - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function parallel(list, iterator, callback) -{ - var state = initState(list); + if (!hasSignature && secretOrPublicKey){ + return done(new JsonWebTokenError('jwt signature is required')); + } - while (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, function(error, result) - { - if (error) - { - callback(error, result); - return; - } + if (hasSignature && !secretOrPublicKey) { + return done(new JsonWebTokenError('secret or public key must be provided')); + } - // looks like it's the last one - if (Object.keys(state.jobs).length === 0) - { - callback(null, state.results); - return; + if (!hasSignature && !options.algorithms) { + return done(new JsonWebTokenError('please specify "none" in "algorithms" to verify unsigned tokens')); + } + + if (secretOrPublicKey != null && !(secretOrPublicKey instanceof KeyObject)) { + try { + secretOrPublicKey = createPublicKey(secretOrPublicKey); + } catch (_) { + try { + secretOrPublicKey = createSecretKey(typeof secretOrPublicKey === 'string' ? Buffer.from(secretOrPublicKey) : secretOrPublicKey); + } catch (_) { + return done(new JsonWebTokenError('secretOrPublicKey is not valid key material')) + } } - }); + } - state.index++; - } + if (!options.algorithms) { + if (secretOrPublicKey.type === 'secret') { + options.algorithms = HS_ALGS; + } else if (['rsa', 'rsa-pss'].includes(secretOrPublicKey.asymmetricKeyType)) { + options.algorithms = RSA_KEY_ALGS + } else if (secretOrPublicKey.asymmetricKeyType === 'ec') { + options.algorithms = EC_KEY_ALGS + } else { + options.algorithms = PUB_KEY_ALGS + } + } - return terminator.bind(state, callback); -} + if (options.algorithms.indexOf(decodedToken.header.alg) === -1) { + return done(new JsonWebTokenError('invalid algorithm')); + } + if (header.alg.startsWith('HS') && secretOrPublicKey.type !== 'secret') { + return done(new JsonWebTokenError((`secretOrPublicKey must be a symmetric key when using ${header.alg}`))) + } else if (/^(?:RS|PS|ES)/.test(header.alg) && secretOrPublicKey.type !== 'public') { + return done(new JsonWebTokenError((`secretOrPublicKey must be an asymmetric key when using ${header.alg}`))) + } -/***/ }), + if (!options.allowInvalidAsymmetricKeyTypes) { + try { + validateAsymmetricKey(header.alg, secretOrPublicKey); + } catch (e) { + return done(e); + } + } -/***/ 50445: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + let valid; -var serialOrdered = __nccwpck_require__(3578); + try { + valid = jws.verify(jwtString, decodedToken.header.alg, secretOrPublicKey); + } catch (e) { + return done(e); + } -// Public API -module.exports = serial; + if (!valid) { + return done(new JsonWebTokenError('invalid signature')); + } -/** - * Runs iterator over provided array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serial(list, iterator, callback) -{ - return serialOrdered(list, iterator, null, callback); -} + const payload = decodedToken.payload; + if (typeof payload.nbf !== 'undefined' && !options.ignoreNotBefore) { + if (typeof payload.nbf !== 'number') { + return done(new JsonWebTokenError('invalid nbf value')); + } + if (payload.nbf > clockTimestamp + (options.clockTolerance || 0)) { + return done(new NotBeforeError('jwt not active', new Date(payload.nbf * 1000))); + } + } -/***/ }), + if (typeof payload.exp !== 'undefined' && !options.ignoreExpiration) { + if (typeof payload.exp !== 'number') { + return done(new JsonWebTokenError('invalid exp value')); + } + if (clockTimestamp >= payload.exp + (options.clockTolerance || 0)) { + return done(new TokenExpiredError('jwt expired', new Date(payload.exp * 1000))); + } + } -/***/ 3578: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (options.audience) { + const audiences = Array.isArray(options.audience) ? options.audience : [options.audience]; + const target = Array.isArray(payload.aud) ? payload.aud : [payload.aud]; -var iterate = __nccwpck_require__(9023) - , initState = __nccwpck_require__(42474) - , terminator = __nccwpck_require__(37942) - ; + const match = target.some(function (targetAudience) { + return audiences.some(function (audience) { + return audience instanceof RegExp ? audience.test(targetAudience) : audience === targetAudience; + }); + }); -// Public API -module.exports = serialOrdered; -// sorting helpers -module.exports.ascending = ascending; -module.exports.descending = descending; + if (!match) { + return done(new JsonWebTokenError('jwt audience invalid. expected: ' + audiences.join(' or '))); + } + } -/** - * Runs iterator over provided sorted array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serialOrdered(list, iterator, sortMethod, callback) -{ - var state = initState(list, sortMethod); + if (options.issuer) { + const invalid_issuer = + (typeof options.issuer === 'string' && payload.iss !== options.issuer) || + (Array.isArray(options.issuer) && options.issuer.indexOf(payload.iss) === -1); - iterate(list, iterator, state, function iteratorHandler(error, result) - { - if (error) - { - callback(error, result); - return; + if (invalid_issuer) { + return done(new JsonWebTokenError('jwt issuer invalid. expected: ' + options.issuer)); + } } - state.index++; + if (options.subject) { + if (payload.sub !== options.subject) { + return done(new JsonWebTokenError('jwt subject invalid. expected: ' + options.subject)); + } + } - // are we there yet? - if (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, iteratorHandler); - return; + if (options.jwtid) { + if (payload.jti !== options.jwtid) { + return done(new JsonWebTokenError('jwt jwtid invalid. expected: ' + options.jwtid)); + } } - // done here - callback(null, state.results); - }); + if (options.nonce) { + if (payload.nonce !== options.nonce) { + return done(new JsonWebTokenError('jwt nonce invalid. expected: ' + options.nonce)); + } + } - return terminator.bind(state, callback); -} + if (options.maxAge) { + if (typeof payload.iat !== 'number') { + return done(new JsonWebTokenError('iat required when maxAge is specified')); + } -/* - * -- Sort methods - */ + const maxAgeTimestamp = timespan(options.maxAge, payload.iat); + if (typeof maxAgeTimestamp === 'undefined') { + return done(new JsonWebTokenError('"maxAge" should be a number of seconds or string representing a timespan eg: "1d", "20h", 60')); + } + if (clockTimestamp >= maxAgeTimestamp + (options.clockTolerance || 0)) { + return done(new TokenExpiredError('maxAge exceeded', new Date(maxAgeTimestamp * 1000))); + } + } -/** - * sort helper to sort array elements in ascending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function ascending(a, b) -{ - return a < b ? -1 : a > b ? 1 : 0; -} + if (options.complete === true) { + const signature = decodedToken.signature; -/** - * sort helper to sort array elements in descending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function descending(a, b) -{ - return -1 * ascending(a, b); -} + return done(null, { + header: header, + payload: payload, + signature: signature + }); + } + + return done(null, payload); + }); +}; /***/ }), -/***/ 83682: +/***/ 96010: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var register = __nccwpck_require__(44670); -var addHook = __nccwpck_require__(5549); -var removeHook = __nccwpck_require__(6819); - -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind; -var bindable = bind.bind(bind); - -function bindApi(hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply( - null, - name ? [state, name] : [state] - ); - hook.api = { remove: removeHookRef }; - hook.remove = removeHookRef; - ["before", "error", "after", "wrap"].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind]; - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); - }); -} - -function HookSingular() { - var singularHookName = "h"; - var singularHookState = { - registry: {}, - }; - var singularHook = register.bind(null, singularHookState, singularHookName); - bindApi(singularHook, singularHookState, singularHookName); - return singularHook; -} - -function HookCollection() { - var state = { - registry: {}, - }; +var bufferEqual = __nccwpck_require__(9239); +var Buffer = (__nccwpck_require__(21867).Buffer); +var crypto = __nccwpck_require__(6113); +var formatEcdsa = __nccwpck_require__(11728); +var util = __nccwpck_require__(73837); - var hook = register.bind(null, state); - bindApi(hook, state); +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; - return hook; +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; } -var collectionHookDeprecationMessageDisplayed = false; -function Hook() { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn( - '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' - ); - collectionHookDeprecationMessageDisplayed = true; +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; } - return HookCollection(); -} - -Hook.Singular = HookSingular.bind(); -Hook.Collection = HookCollection.bind(); -module.exports = Hook; -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook; -module.exports.Singular = Hook.Singular; -module.exports.Collection = Hook.Collection; - - -/***/ }), - -/***/ 5549: -/***/ ((module) => { + if (typeof key === 'string') { + return; + } -module.exports = addHook; + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } -function addHook(state, kind, name, hook) { - var orig = hook; - if (!state.registry[name]) { - state.registry[name] = []; + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); } - if (kind === "before") { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)); - }; + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); } - if (kind === "after") { - hook = function (method, options) { - var result; - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_; - return orig(result, options); - }) - .then(function () { - return result; - }); - }; + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); } - if (kind === "error") { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options); - }); - }; + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); } +}; - state.registry[name].push({ - hook: hook, - orig: orig, - }); -} +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + if (typeof key === 'string') { + return; + } -/***/ }), + if (typeof key === 'object') { + return; + } -/***/ 44670: -/***/ ((module) => { + throw typeError(MSG_INVALID_SIGNER_KEY); +}; -module.exports = register; +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } -function register(state, name, method, options) { - if (typeof method !== "function") { - throw new Error("method for before hook must be a function"); + if (typeof key === 'string') { + return key; } - if (!options) { - options = {}; + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); } - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options); - }, method)(); + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); } - return Promise.resolve().then(function () { - if (!state.registry[name]) { - return method(options); - } + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } - return state.registry[name].reduce(function (method, registered) { - return registered.hook.bind(null, method, options); - }, method)(); - }); + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } } +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} -/***/ }), - -/***/ 6819: -/***/ ((module) => { - -module.exports = removeHook; - -function removeHook(state, name, method) { - if (!state.registry[name]) { - return; - } - - var index = state.registry[name] - .map(function (registered) { - return registered.orig; - }) - .indexOf(method); +function toBase64(base64url) { + base64url = base64url.toString(); - if (index === -1) { - return; + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } } - state.registry[name].splice(index, 1); + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); } +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} -/***/ }), - -/***/ 9239: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} -"use strict"; -/*jshint node:true */ +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} -var Buffer = (__nccwpck_require__(14300).Buffer); // browserify -var SlowBuffer = (__nccwpck_require__(14300).SlowBuffer); +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} -module.exports = bufferEq; +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} -function bufferEq(a, b) { +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} - // shortcutting on type is necessary for correctness - if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { - return false; +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); } +} - // buffer sizes should be well-known information, so despite this - // shortcutting, it doesn't leak any information about the *contents* of the - // buffers. - if (a.length !== b.length) { - return false; +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); } +} - var c = 0; - for (var i = 0; i < a.length; i++) { - /*jshint bitwise:false */ - c |= a[i] ^ b[i]; // XOR +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); } - return c === 0; } -bufferEq.install = function() { - Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { - return bufferEq(this, that); +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; }; -}; - -var origBufEqual = Buffer.prototype.equal; -var origSlowBufEqual = SlowBuffer.prototype.equal; -bufferEq.restore = function() { - Buffer.prototype.equal = origBufEqual; - SlowBuffer.prototype.equal = origSlowBufEqual; -}; - - -/***/ }), - -/***/ 85443: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = __nccwpck_require__(73837); -var Stream = (__nccwpck_require__(12781).Stream); -var DelayedStream = __nccwpck_require__(18611); +} -module.exports = CombinedStream; -function CombinedStream() { - this.writable = false; - this.readable = true; - this.dataSize = 0; - this.maxDataSize = 2 * 1024 * 1024; - this.pauseStreams = true; +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} - this._released = false; - this._streams = []; - this._currentStream = null; - this._insideLoop = false; - this._pendingNext = false; +function createNoneSigner() { + return function sign() { + return ''; + } } -util.inherits(CombinedStream, Stream); -CombinedStream.create = function(options) { - var combinedStream = new this(); +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} - options = options || {}; - for (var option in options) { - combinedStream[option] = options[option]; +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; - return combinedStream; + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } }; -CombinedStream.isStreamLike = function(stream) { - return (typeof stream !== 'function') - && (typeof stream !== 'string') - && (typeof stream !== 'boolean') - && (typeof stream !== 'number') - && (!Buffer.isBuffer(stream)); -}; -CombinedStream.prototype.append = function(stream) { - var isStreamLike = CombinedStream.isStreamLike(stream); +/***/ }), - if (isStreamLike) { - if (!(stream instanceof DelayedStream)) { - var newStream = DelayedStream.create(stream, { - maxDataSize: Infinity, - pauseStream: this.pauseStreams, - }); - stream.on('data', this._checkDataSize.bind(this)); - stream = newStream; - } +/***/ 4636: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - this._handleErrors(stream); +/*global exports*/ +var SignStream = __nccwpck_require__(73334); +var VerifyStream = __nccwpck_require__(5522); - if (this.pauseStreams) { - stream.pause(); - } - } +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; - this._streams.push(stream); - return this; +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); }; - -CombinedStream.prototype.pipe = function(dest, options) { - Stream.prototype.pipe.call(this, dest, options); - this.resume(); - return dest; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); }; -CombinedStream.prototype._getNext = function() { - this._currentStream = null; - if (this._insideLoop) { - this._pendingNext = true; - return; // defer call - } +/***/ }), - this._insideLoop = true; - try { - do { - this._pendingNext = false; - this._realGetNext(); - } while (this._pendingNext); - } finally { - this._insideLoop = false; - } -}; +/***/ 61868: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -CombinedStream.prototype._realGetNext = function() { - var stream = this._streams.shift(); +/*global module, process*/ +var Buffer = (__nccwpck_require__(21867).Buffer); +var Stream = __nccwpck_require__(12781); +var util = __nccwpck_require__(73837); +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; - if (typeof stream == 'undefined') { - this.end(); - return; + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; } - if (typeof stream !== 'function') { - this._pipeNext(stream); - return; + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; } - var getStream = stream; - getStream(function(stream) { - var isStreamLike = CombinedStream.isStreamLike(stream); - if (isStreamLike) { - stream.on('data', this._checkDataSize.bind(this)); - this._handleErrors(stream); - } - - this._pipeNext(stream); - }.bind(this)); -}; - -CombinedStream.prototype._pipeNext = function(stream) { - this._currentStream = stream; - - var isStreamLike = CombinedStream.isStreamLike(stream); - if (isStreamLike) { - stream.on('end', this._getNext.bind(this)); - stream.pipe(this, {end: false}); - return; + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; } - var value = stream; - this.write(value); - this._getNext(); -}; - -CombinedStream.prototype._handleErrors = function(stream) { - var self = this; - stream.on('error', function(err) { - self._emitError(err); - }); -}; + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); -CombinedStream.prototype.write = function(data) { +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); this.emit('data', data); }; -CombinedStream.prototype.pause = function() { - if (!this.pauseStreams) { - return; - } - - if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); - this.emit('pause'); +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; }; -CombinedStream.prototype.resume = function() { - if (!this._released) { - this._released = true; - this.writable = true; - this._getNext(); - } +module.exports = DataStream; - if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); - this.emit('resume'); -}; -CombinedStream.prototype.end = function() { - this._reset(); - this.emit('end'); -}; +/***/ }), -CombinedStream.prototype.destroy = function() { - this._reset(); - this.emit('close'); -}; +/***/ 73334: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -CombinedStream.prototype._reset = function() { - this.writable = false; - this._streams = []; - this._currentStream = null; -}; +/*global module*/ +var Buffer = (__nccwpck_require__(21867).Buffer); +var DataStream = __nccwpck_require__(61868); +var jwa = __nccwpck_require__(96010); +var Stream = __nccwpck_require__(12781); +var toString = __nccwpck_require__(65292); +var util = __nccwpck_require__(73837); -CombinedStream.prototype._checkDataSize = function() { - this._updateDataSize(); - if (this.dataSize <= this.maxDataSize) { - return; - } +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} - var message = - 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; - this._emitError(new Error(message)); -}; +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} -CombinedStream.prototype._updateDataSize = function() { - this.dataSize = 0; +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} - var self = this; - this._streams.forEach(function(stream) { - if (!stream.dataSize) { - return; - } +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); - self.dataSize += stream.dataSize; - }); + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); - if (this._currentStream && this._currentStream.dataSize) { - this.dataSize += this._currentStream.dataSize; +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); } }; -CombinedStream.prototype._emitError = function(err) { - this._reset(); - this.emit('error', err); -}; - +SignStream.sign = jwsSign; -/***/ }), +module.exports = SignStream; -/***/ 84697: -/***/ ((module) => { -/** - * Helpers. - */ +/***/ }), -var s = 1000; -var m = s * 60; -var h = m * 60; -var d = h * 24; -var w = d * 7; -var y = d * 365.25; +/***/ 65292: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Parse or format the given `val`. - * - * Options: - * - * - `long` verbose formatting [false] - * - * @param {String|Number} val - * @param {Object} [options] - * @throws {Error} throw an error if val is not a non-empty string or a number - * @return {String|Number} - * @api public - */ +/*global module*/ +var Buffer = (__nccwpck_require__(14300).Buffer); -module.exports = function(val, options) { - options = options || {}; - var type = typeof val; - if (type === 'string' && val.length > 0) { - return parse(val); - } else if (type === 'number' && isFinite(val)) { - return options.long ? fmtLong(val) : fmtShort(val); - } - throw new Error( - 'val is not a non-empty string or a valid number. val=' + - JSON.stringify(val) - ); +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); }; -/** - * Parse the given `str` and return milliseconds. - * - * @param {String} str - * @return {Number} - * @api private - */ -function parse(str) { - str = String(str); - if (str.length > 100) { - return; - } - var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( - str - ); - if (!match) { - return; - } - var n = parseFloat(match[1]); - var type = (match[2] || 'ms').toLowerCase(); - switch (type) { - case 'years': - case 'year': - case 'yrs': - case 'yr': - case 'y': - return n * y; - case 'weeks': - case 'week': - case 'w': - return n * w; - case 'days': - case 'day': - case 'd': - return n * d; - case 'hours': - case 'hour': - case 'hrs': - case 'hr': - case 'h': - return n * h; - case 'minutes': - case 'minute': - case 'mins': - case 'min': - case 'm': - return n * m; - case 'seconds': - case 'second': - case 'secs': - case 'sec': - case 's': - return n * s; - case 'milliseconds': - case 'millisecond': - case 'msecs': - case 'msec': - case 'ms': - return n; - default: - return undefined; - } -} +/***/ }), -/** - * Short format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private - */ +/***/ 5522: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function fmtShort(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return Math.round(ms / d) + 'd'; - } - if (msAbs >= h) { - return Math.round(ms / h) + 'h'; - } - if (msAbs >= m) { - return Math.round(ms / m) + 'm'; - } - if (msAbs >= s) { - return Math.round(ms / s) + 's'; - } - return ms + 'ms'; +/*global module*/ +var Buffer = (__nccwpck_require__(21867).Buffer); +var DataStream = __nccwpck_require__(61868); +var jwa = __nccwpck_require__(96010); +var Stream = __nccwpck_require__(12781); +var toString = __nccwpck_require__(65292); +var util = __nccwpck_require__(73837); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; } -/** - * Long format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private - */ +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} -function fmtLong(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return plural(ms, msAbs, d, 'day'); - } - if (msAbs >= h) { - return plural(ms, msAbs, h, 'hour'); - } - if (msAbs >= m) { - return plural(ms, msAbs, m, 'minute'); - } - if (msAbs >= s) { - return plural(ms, msAbs, s, 'second'); - } - return ms + ' ms'; +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); } -/** - * Pluralization helper. - */ +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} -function plural(ms, msAbs, n, name) { - var isPlural = msAbs >= n * 1.5; - return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; } +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} -/***/ }), +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} -/***/ 28222: -/***/ ((module, exports, __nccwpck_require__) => { +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} -/* eslint-env browser */ +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); -/** - * This is the web browser implementation of `debug()`. - */ + if (!isValidJws(jwsSig)) + return null; -exports.formatArgs = formatArgs; -exports.save = save; -exports.load = load; -exports.useColors = useColors; -exports.storage = localstorage(); -exports.destroy = (() => { - let warned = false; + var header = headerFromJWS(jwsSig); - return () => { - if (!warned) { - warned = true; - console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); - } - }; -})(); + if (!header) + return null; -/** - * Colors. - */ + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); -exports.colors = [ - '#0000CC', - '#0000FF', - '#0033CC', - '#0033FF', - '#0066CC', - '#0066FF', - '#0099CC', - '#0099FF', - '#00CC00', - '#00CC33', - '#00CC66', - '#00CC99', - '#00CCCC', - '#00CCFF', - '#3300CC', - '#3300FF', - '#3333CC', - '#3333FF', - '#3366CC', - '#3366FF', - '#3399CC', - '#3399FF', - '#33CC00', - '#33CC33', - '#33CC66', - '#33CC99', - '#33CCCC', - '#33CCFF', - '#6600CC', - '#6600FF', - '#6633CC', - '#6633FF', - '#66CC00', - '#66CC33', - '#9900CC', - '#9900FF', - '#9933CC', - '#9933FF', - '#99CC00', - '#99CC33', - '#CC0000', - '#CC0033', - '#CC0066', - '#CC0099', - '#CC00CC', - '#CC00FF', - '#CC3300', - '#CC3333', - '#CC3366', - '#CC3399', - '#CC33CC', - '#CC33FF', - '#CC6600', - '#CC6633', - '#CC9900', - '#CC9933', - '#CCCC00', - '#CCCC33', - '#FF0000', - '#FF0033', - '#FF0066', - '#FF0099', - '#FF00CC', - '#FF00FF', - '#FF3300', - '#FF3333', - '#FF3366', - '#FF3399', - '#FF33CC', - '#FF33FF', - '#FF6600', - '#FF6633', - '#FF9900', - '#FF9933', - '#FFCC00', - '#FFCC33' -]; + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} -/** - * Currently only WebKit-based Web Inspectors, Firefox >= v31, - * and the Firebug extension (any Firefox version) are known - * to support "%c" CSS customizations. - * - * TODO: add a `localStorage` variable to explicitly enable/disable colors - */ +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); -// eslint-disable-next-line complexity -function useColors() { - // NB: In an Electron preload script, document will be defined but not fully - // initialized. Since we know we're in Chrome, we'll just detect this case - // explicitly - if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { - return true; - } + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; - // Internet Explorer and Edge do not support colors. - if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { - return false; - } +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; - // Is webkit? http://stackoverflow.com/a/16459606/376773 - // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 - return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || - // Is firebug? http://stackoverflow.com/a/398120/376773 - (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || - // Is firefox >= v31? - // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages - (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || - // Double check webkit in userAgent just in case we are in a worker - (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); -} +module.exports = VerifyStream; -/** - * Colorize log arguments if enabled. - * - * @api public - */ -function formatArgs(args) { - args[0] = (this.useColors ? '%c' : '') + - this.namespace + - (this.useColors ? ' %c' : ' ') + - args[0] + - (this.useColors ? '%c ' : ' ') + - '+' + module.exports.humanize(this.diff); +/***/ }), - if (!this.useColors) { - return; - } +/***/ 7994: +/***/ ((module) => { - const c = 'color: ' + this.color; - args.splice(1, 0, c, 'color: inherit'); +/** + * lodash (Custom Build) + * Build: `lodash modularize exports="npm" -o ./` + * Copyright jQuery Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ - // The final "%c" is somewhat tricky, because there could be other - // arguments passed either before or after the %c, so we need to - // figure out the correct index to insert the CSS into - let index = 0; - let lastC = 0; - args[0].replace(/%[a-zA-Z%]/g, match => { - if (match === '%%') { - return; - } - index++; - if (match === '%c') { - // We only are interested in the *last* %c - // (the user may have provided their own) - lastC = index; - } - }); +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0; + +/** `Object#toString` result references. */ +var symbolTag = '[object Symbol]'; + +/** Used to match words composed of alphanumeric characters. */ +var reAsciiWord = /[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/g; + +/** Used to match Latin Unicode letters (excluding mathematical operators). */ +var reLatin = /[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g; + +/** Used to compose unicode character classes. */ +var rsAstralRange = '\\ud800-\\udfff', + rsComboMarksRange = '\\u0300-\\u036f\\ufe20-\\ufe23', + rsComboSymbolsRange = '\\u20d0-\\u20f0', + rsDingbatRange = '\\u2700-\\u27bf', + rsLowerRange = 'a-z\\xdf-\\xf6\\xf8-\\xff', + rsMathOpRange = '\\xac\\xb1\\xd7\\xf7', + rsNonCharRange = '\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf', + rsPunctuationRange = '\\u2000-\\u206f', + rsSpaceRange = ' \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000', + rsUpperRange = 'A-Z\\xc0-\\xd6\\xd8-\\xde', + rsVarRange = '\\ufe0e\\ufe0f', + rsBreakRange = rsMathOpRange + rsNonCharRange + rsPunctuationRange + rsSpaceRange; + +/** Used to compose unicode capture groups. */ +var rsApos = "['\u2019]", + rsAstral = '[' + rsAstralRange + ']', + rsBreak = '[' + rsBreakRange + ']', + rsCombo = '[' + rsComboMarksRange + rsComboSymbolsRange + ']', + rsDigits = '\\d+', + rsDingbat = '[' + rsDingbatRange + ']', + rsLower = '[' + rsLowerRange + ']', + rsMisc = '[^' + rsAstralRange + rsBreakRange + rsDigits + rsDingbatRange + rsLowerRange + rsUpperRange + ']', + rsFitz = '\\ud83c[\\udffb-\\udfff]', + rsModifier = '(?:' + rsCombo + '|' + rsFitz + ')', + rsNonAstral = '[^' + rsAstralRange + ']', + rsRegional = '(?:\\ud83c[\\udde6-\\uddff]){2}', + rsSurrPair = '[\\ud800-\\udbff][\\udc00-\\udfff]', + rsUpper = '[' + rsUpperRange + ']', + rsZWJ = '\\u200d'; + +/** Used to compose unicode regexes. */ +var rsLowerMisc = '(?:' + rsLower + '|' + rsMisc + ')', + rsUpperMisc = '(?:' + rsUpper + '|' + rsMisc + ')', + rsOptLowerContr = '(?:' + rsApos + '(?:d|ll|m|re|s|t|ve))?', + rsOptUpperContr = '(?:' + rsApos + '(?:D|LL|M|RE|S|T|VE))?', + reOptMod = rsModifier + '?', + rsOptVar = '[' + rsVarRange + ']?', + rsOptJoin = '(?:' + rsZWJ + '(?:' + [rsNonAstral, rsRegional, rsSurrPair].join('|') + ')' + rsOptVar + reOptMod + ')*', + rsSeq = rsOptVar + reOptMod + rsOptJoin, + rsEmoji = '(?:' + [rsDingbat, rsRegional, rsSurrPair].join('|') + ')' + rsSeq, + rsSymbol = '(?:' + [rsNonAstral + rsCombo + '?', rsCombo, rsRegional, rsSurrPair, rsAstral].join('|') + ')'; + +/** Used to match apostrophes. */ +var reApos = RegExp(rsApos, 'g'); + +/** + * Used to match [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks) and + * [combining diacritical marks for symbols](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks_for_Symbols). + */ +var reComboMark = RegExp(rsCombo, 'g'); + +/** Used to match [string symbols](https://mathiasbynens.be/notes/javascript-unicode). */ +var reUnicode = RegExp(rsFitz + '(?=' + rsFitz + ')|' + rsSymbol + rsSeq, 'g'); + +/** Used to match complex or compound words. */ +var reUnicodeWord = RegExp([ + rsUpper + '?' + rsLower + '+' + rsOptLowerContr + '(?=' + [rsBreak, rsUpper, '$'].join('|') + ')', + rsUpperMisc + '+' + rsOptUpperContr + '(?=' + [rsBreak, rsUpper + rsLowerMisc, '$'].join('|') + ')', + rsUpper + '?' + rsLowerMisc + '+' + rsOptLowerContr, + rsUpper + '+' + rsOptUpperContr, + rsDigits, + rsEmoji +].join('|'), 'g'); + +/** Used to detect strings with [zero-width joiners or code points from the astral planes](http://eev.ee/blog/2015/09/12/dark-corners-of-unicode/). */ +var reHasUnicode = RegExp('[' + rsZWJ + rsAstralRange + rsComboMarksRange + rsComboSymbolsRange + rsVarRange + ']'); + +/** Used to detect strings that need a more robust regexp to match words. */ +var reHasUnicodeWord = /[a-z][A-Z]|[A-Z]{2,}[a-z]|[0-9][a-zA-Z]|[a-zA-Z][0-9]|[^a-zA-Z0-9 ]/; + +/** Used to map Latin Unicode letters to basic Latin letters. */ +var deburredLetters = { + // Latin-1 Supplement block. + '\xc0': 'A', '\xc1': 'A', '\xc2': 'A', '\xc3': 'A', '\xc4': 'A', '\xc5': 'A', + '\xe0': 'a', '\xe1': 'a', '\xe2': 'a', '\xe3': 'a', '\xe4': 'a', '\xe5': 'a', + '\xc7': 'C', '\xe7': 'c', + '\xd0': 'D', '\xf0': 'd', + '\xc8': 'E', '\xc9': 'E', '\xca': 'E', '\xcb': 'E', + '\xe8': 'e', '\xe9': 'e', '\xea': 'e', '\xeb': 'e', + '\xcc': 'I', '\xcd': 'I', '\xce': 'I', '\xcf': 'I', + '\xec': 'i', '\xed': 'i', '\xee': 'i', '\xef': 'i', + '\xd1': 'N', '\xf1': 'n', + '\xd2': 'O', '\xd3': 'O', '\xd4': 'O', '\xd5': 'O', '\xd6': 'O', '\xd8': 'O', + '\xf2': 'o', '\xf3': 'o', '\xf4': 'o', '\xf5': 'o', '\xf6': 'o', '\xf8': 'o', + '\xd9': 'U', '\xda': 'U', '\xdb': 'U', '\xdc': 'U', + '\xf9': 'u', '\xfa': 'u', '\xfb': 'u', '\xfc': 'u', + '\xdd': 'Y', '\xfd': 'y', '\xff': 'y', + '\xc6': 'Ae', '\xe6': 'ae', + '\xde': 'Th', '\xfe': 'th', + '\xdf': 'ss', + // Latin Extended-A block. + '\u0100': 'A', '\u0102': 'A', '\u0104': 'A', + '\u0101': 'a', '\u0103': 'a', '\u0105': 'a', + '\u0106': 'C', '\u0108': 'C', '\u010a': 'C', '\u010c': 'C', + '\u0107': 'c', '\u0109': 'c', '\u010b': 'c', '\u010d': 'c', + '\u010e': 'D', '\u0110': 'D', '\u010f': 'd', '\u0111': 'd', + '\u0112': 'E', '\u0114': 'E', '\u0116': 'E', '\u0118': 'E', '\u011a': 'E', + '\u0113': 'e', '\u0115': 'e', '\u0117': 'e', '\u0119': 'e', '\u011b': 'e', + '\u011c': 'G', '\u011e': 'G', '\u0120': 'G', '\u0122': 'G', + '\u011d': 'g', '\u011f': 'g', '\u0121': 'g', '\u0123': 'g', + '\u0124': 'H', '\u0126': 'H', '\u0125': 'h', '\u0127': 'h', + '\u0128': 'I', '\u012a': 'I', '\u012c': 'I', '\u012e': 'I', '\u0130': 'I', + '\u0129': 'i', '\u012b': 'i', '\u012d': 'i', '\u012f': 'i', '\u0131': 'i', + '\u0134': 'J', '\u0135': 'j', + '\u0136': 'K', '\u0137': 'k', '\u0138': 'k', + '\u0139': 'L', '\u013b': 'L', '\u013d': 'L', '\u013f': 'L', '\u0141': 'L', + '\u013a': 'l', '\u013c': 'l', '\u013e': 'l', '\u0140': 'l', '\u0142': 'l', + '\u0143': 'N', '\u0145': 'N', '\u0147': 'N', '\u014a': 'N', + '\u0144': 'n', '\u0146': 'n', '\u0148': 'n', '\u014b': 'n', + '\u014c': 'O', '\u014e': 'O', '\u0150': 'O', + '\u014d': 'o', '\u014f': 'o', '\u0151': 'o', + '\u0154': 'R', '\u0156': 'R', '\u0158': 'R', + '\u0155': 'r', '\u0157': 'r', '\u0159': 'r', + '\u015a': 'S', '\u015c': 'S', '\u015e': 'S', '\u0160': 'S', + '\u015b': 's', '\u015d': 's', '\u015f': 's', '\u0161': 's', + '\u0162': 'T', '\u0164': 'T', '\u0166': 'T', + '\u0163': 't', '\u0165': 't', '\u0167': 't', + '\u0168': 'U', '\u016a': 'U', '\u016c': 'U', '\u016e': 'U', '\u0170': 'U', '\u0172': 'U', + '\u0169': 'u', '\u016b': 'u', '\u016d': 'u', '\u016f': 'u', '\u0171': 'u', '\u0173': 'u', + '\u0174': 'W', '\u0175': 'w', + '\u0176': 'Y', '\u0177': 'y', '\u0178': 'Y', + '\u0179': 'Z', '\u017b': 'Z', '\u017d': 'Z', + '\u017a': 'z', '\u017c': 'z', '\u017e': 'z', + '\u0132': 'IJ', '\u0133': 'ij', + '\u0152': 'Oe', '\u0153': 'oe', + '\u0149': "'n", '\u017f': 'ss' +}; + +/** Detect free variable `global` from Node.js. */ +var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; + +/** Detect free variable `self`. */ +var freeSelf = typeof self == 'object' && self && self.Object === Object && self; + +/** Used as a reference to the global object. */ +var root = freeGlobal || freeSelf || Function('return this')(); + +/** + * A specialized version of `_.reduce` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the first element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ +function arrayReduce(array, iteratee, accumulator, initAccum) { + var index = -1, + length = array ? array.length : 0; - args.splice(lastC, 0, c); + if (initAccum && length) { + accumulator = array[++index]; + } + while (++index < length) { + accumulator = iteratee(accumulator, array[index], index, array); + } + return accumulator; } /** - * Invokes `console.debug()` when available. - * No-op when `console.debug` is not a "function". - * If `console.debug` is not available, falls back - * to `console.log`. + * Converts an ASCII `string` to an array. * - * @api public + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. */ -exports.log = console.debug || console.log || (() => {}); +function asciiToArray(string) { + return string.split(''); +} /** - * Save `namespaces`. + * Splits an ASCII `string` into an array of its words. * - * @param {String} namespaces - * @api private + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. */ -function save(namespaces) { - try { - if (namespaces) { - exports.storage.setItem('debug', namespaces); - } else { - exports.storage.removeItem('debug'); - } - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } +function asciiWords(string) { + return string.match(reAsciiWord) || []; } /** - * Load `namespaces`. + * The base implementation of `_.propertyOf` without support for deep paths. * - * @return {String} returns the previously persisted debug modes - * @api private + * @private + * @param {Object} object The object to query. + * @returns {Function} Returns the new accessor function. */ -function load() { - let r; - try { - r = exports.storage.getItem('debug'); - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } - - // If debug isn't set in LS, and we're in Electron, try to load $DEBUG - if (!r && typeof process !== 'undefined' && 'env' in process) { - r = process.env.DEBUG; - } - - return r; +function basePropertyOf(object) { + return function(key) { + return object == null ? undefined : object[key]; + }; } /** - * Localstorage attempts to return the localstorage. + * Used by `_.deburr` to convert Latin-1 Supplement and Latin Extended-A + * letters to basic Latin letters. * - * This is necessary because safari throws - * when a user disables cookies/localstorage - * and you attempt to access it. - * - * @return {LocalStorage} - * @api private + * @private + * @param {string} letter The matched letter to deburr. + * @returns {string} Returns the deburred letter. */ +var deburrLetter = basePropertyOf(deburredLetters); -function localstorage() { - try { - // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context - // The Browser also has localStorage in the global context. - return localStorage; - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } +/** + * Checks if `string` contains Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a symbol is found, else `false`. + */ +function hasUnicode(string) { + return reHasUnicode.test(string); } -module.exports = __nccwpck_require__(46243)(exports); - -const {formatters} = module.exports; - /** - * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + * Checks if `string` contains a word composed of Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a word is found, else `false`. */ +function hasUnicodeWord(string) { + return reHasUnicodeWord.test(string); +} -formatters.j = function (v) { - try { - return JSON.stringify(v); - } catch (error) { - return '[UnexpectedJSONParseError]: ' + error.message; - } -}; - +/** + * Converts `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ +function stringToArray(string) { + return hasUnicode(string) + ? unicodeToArray(string) + : asciiToArray(string); +} -/***/ }), +/** + * Converts a Unicode `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ +function unicodeToArray(string) { + return string.match(reUnicode) || []; +} -/***/ 46243: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/** + * Splits a Unicode `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ +function unicodeWords(string) { + return string.match(reUnicodeWord) || []; +} +/** Used for built-in method references. */ +var objectProto = Object.prototype; /** - * This is the common logic for both the Node.js and web browser - * implementations of `debug()`. + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. */ +var objectToString = objectProto.toString; -function setup(env) { - createDebug.debug = createDebug; - createDebug.default = createDebug; - createDebug.coerce = coerce; - createDebug.disable = disable; - createDebug.enable = enable; - createDebug.enabled = enabled; - createDebug.humanize = __nccwpck_require__(84697); - createDebug.destroy = destroy; +/** Built-in value references. */ +var Symbol = root.Symbol; - Object.keys(env).forEach(key => { - createDebug[key] = env[key]; - }); - - /** - * The currently active debug mode names, and names to skip. - */ +/** Used to convert symbols to primitives and strings. */ +var symbolProto = Symbol ? Symbol.prototype : undefined, + symbolToString = symbolProto ? symbolProto.toString : undefined; - createDebug.names = []; - createDebug.skips = []; +/** + * The base implementation of `_.slice` without an iteratee call guard. + * + * @private + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ +function baseSlice(array, start, end) { + var index = -1, + length = array.length; - /** - * Map of special "%n" handling functions, for the debug "format" argument. - * - * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". - */ - createDebug.formatters = {}; + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = end > length ? length : end; + if (end < 0) { + end += length; + } + length = start > end ? 0 : ((end - start) >>> 0); + start >>>= 0; - /** - * Selects a color for a debug namespace - * @param {String} namespace The namespace string for the debug instance to be colored - * @return {Number|String} An ANSI color code for the given namespace - * @api private - */ - function selectColor(namespace) { - let hash = 0; + var result = Array(length); + while (++index < length) { + result[index] = array[index + start]; + } + return result; +} - for (let i = 0; i < namespace.length; i++) { - hash = ((hash << 5) - hash) + namespace.charCodeAt(i); - hash |= 0; // Convert to 32bit integer - } +/** + * The base implementation of `_.toString` which doesn't convert nullish + * values to empty strings. + * + * @private + * @param {*} value The value to process. + * @returns {string} Returns the string. + */ +function baseToString(value) { + // Exit early for strings to avoid a performance hit in some environments. + if (typeof value == 'string') { + return value; + } + if (isSymbol(value)) { + return symbolToString ? symbolToString.call(value) : ''; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; +} - return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; - } - createDebug.selectColor = selectColor; +/** + * Casts `array` to a slice if it's needed. + * + * @private + * @param {Array} array The array to inspect. + * @param {number} start The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the cast slice. + */ +function castSlice(array, start, end) { + var length = array.length; + end = end === undefined ? length : end; + return (!start && end >= length) ? array : baseSlice(array, start, end); +} - /** - * Create a debugger with the given `namespace`. - * - * @param {String} namespace - * @return {Function} - * @api public - */ - function createDebug(namespace) { - let prevTime; - let enableOverride = null; - let namespacesCache; - let enabledCache; +/** + * Creates a function like `_.lowerFirst`. + * + * @private + * @param {string} methodName The name of the `String` case method to use. + * @returns {Function} Returns the new case function. + */ +function createCaseFirst(methodName) { + return function(string) { + string = toString(string); - function debug(...args) { - // Disabled? - if (!debug.enabled) { - return; - } + var strSymbols = hasUnicode(string) + ? stringToArray(string) + : undefined; - const self = debug; + var chr = strSymbols + ? strSymbols[0] + : string.charAt(0); - // Set `diff` timestamp - const curr = Number(new Date()); - const ms = curr - (prevTime || curr); - self.diff = ms; - self.prev = prevTime; - self.curr = curr; - prevTime = curr; + var trailing = strSymbols + ? castSlice(strSymbols, 1).join('') + : string.slice(1); - args[0] = createDebug.coerce(args[0]); + return chr[methodName]() + trailing; + }; +} - if (typeof args[0] !== 'string') { - // Anything else let's inspect with %O - args.unshift('%O'); - } +/** + * Creates a function like `_.camelCase`. + * + * @private + * @param {Function} callback The function to combine each word. + * @returns {Function} Returns the new compounder function. + */ +function createCompounder(callback) { + return function(string) { + return arrayReduce(words(deburr(string).replace(reApos, '')), callback, ''); + }; +} - // Apply any `formatters` transformations - let index = 0; - args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { - // If we encounter an escaped % then don't increase the array index - if (match === '%%') { - return '%'; - } - index++; - const formatter = createDebug.formatters[format]; - if (typeof formatter === 'function') { - const val = args[index]; - match = formatter.call(self, val); +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return !!value && typeof value == 'object'; +} - // Now we need to remove `args[index]` since it's inlined in the `format` - args.splice(index, 1); - index--; - } - return match; - }); +/** + * Checks if `value` is classified as a `Symbol` primitive or object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. + * @example + * + * _.isSymbol(Symbol.iterator); + * // => true + * + * _.isSymbol('abc'); + * // => false + */ +function isSymbol(value) { + return typeof value == 'symbol' || + (isObjectLike(value) && objectToString.call(value) == symbolTag); +} - // Apply env-specific formatting (colors, etc.) - createDebug.formatArgs.call(self, args); +/** + * Converts `value` to a string. An empty string is returned for `null` + * and `undefined` values. The sign of `-0` is preserved. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to process. + * @returns {string} Returns the string. + * @example + * + * _.toString(null); + * // => '' + * + * _.toString(-0); + * // => '-0' + * + * _.toString([1, 2, 3]); + * // => '1,2,3' + */ +function toString(value) { + return value == null ? '' : baseToString(value); +} - const logFn = self.log || createDebug.log; - logFn.apply(self, args); - } +/** + * Converts `string` to [camel case](https://en.wikipedia.org/wiki/CamelCase). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the camel cased string. + * @example + * + * _.camelCase('Foo Bar'); + * // => 'fooBar' + * + * _.camelCase('--foo-bar--'); + * // => 'fooBar' + * + * _.camelCase('__FOO_BAR__'); + * // => 'fooBar' + */ +var camelCase = createCompounder(function(result, word, index) { + word = word.toLowerCase(); + return result + (index ? capitalize(word) : word); +}); - debug.namespace = namespace; - debug.useColors = createDebug.useColors(); - debug.color = createDebug.selectColor(namespace); - debug.extend = extend; - debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. +/** + * Converts the first character of `string` to upper case and the remaining + * to lower case. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to capitalize. + * @returns {string} Returns the capitalized string. + * @example + * + * _.capitalize('FRED'); + * // => 'Fred' + */ +function capitalize(string) { + return upperFirst(toString(string).toLowerCase()); +} - Object.defineProperty(debug, 'enabled', { - enumerable: true, - configurable: false, - get: () => { - if (enableOverride !== null) { - return enableOverride; - } - if (namespacesCache !== createDebug.namespaces) { - namespacesCache = createDebug.namespaces; - enabledCache = createDebug.enabled(namespace); - } +/** + * Deburrs `string` by converting + * [Latin-1 Supplement](https://en.wikipedia.org/wiki/Latin-1_Supplement_(Unicode_block)#Character_table) + * and [Latin Extended-A](https://en.wikipedia.org/wiki/Latin_Extended-A) + * letters to basic Latin letters and removing + * [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to deburr. + * @returns {string} Returns the deburred string. + * @example + * + * _.deburr('déjà vu'); + * // => 'deja vu' + */ +function deburr(string) { + string = toString(string); + return string && string.replace(reLatin, deburrLetter).replace(reComboMark, ''); +} - return enabledCache; - }, - set: v => { - enableOverride = v; - } - }); +/** + * Converts the first character of `string` to upper case. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.upperFirst('fred'); + * // => 'Fred' + * + * _.upperFirst('FRED'); + * // => 'FRED' + */ +var upperFirst = createCaseFirst('toUpperCase'); - // Env-specific initialization logic for debug instances - if (typeof createDebug.init === 'function') { - createDebug.init(debug); - } +/** + * Splits `string` into an array of its words. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to inspect. + * @param {RegExp|string} [pattern] The pattern to match words. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the words of `string`. + * @example + * + * _.words('fred, barney, & pebbles'); + * // => ['fred', 'barney', 'pebbles'] + * + * _.words('fred, barney, & pebbles', /[^, ]+/g); + * // => ['fred', 'barney', '&', 'pebbles'] + */ +function words(string, pattern, guard) { + string = toString(string); + pattern = guard ? undefined : pattern; - return debug; - } + if (pattern === undefined) { + return hasUnicodeWord(string) ? unicodeWords(string) : asciiWords(string); + } + return string.match(pattern) || []; +} - function extend(namespace, delimiter) { - const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); - newDebug.log = this.log; - return newDebug; - } +module.exports = camelCase; - /** - * Enables a debug mode by namespaces. This can include modes - * separated by a colon and wildcards. - * - * @param {String} namespaces - * @api public - */ - function enable(namespaces) { - createDebug.save(namespaces); - createDebug.namespaces = namespaces; - createDebug.names = []; - createDebug.skips = []; +/***/ }), - let i; - const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); - const len = split.length; +/***/ 90250: +/***/ (function(module, exports, __nccwpck_require__) { - for (i = 0; i < len; i++) { - if (!split[i]) { - // ignore empty strings - continue; - } +/* module decorator */ module = __nccwpck_require__.nmd(module); +/** + * @license + * Lodash + * Copyright OpenJS Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ +;(function() { - namespaces = split[i].replace(/\*/g, '.*?'); + /** Used as a safe reference for `undefined` in pre-ES5 environments. */ + var undefined; - if (namespaces[0] === '-') { - createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); - } else { - createDebug.names.push(new RegExp('^' + namespaces + '$')); - } - } - } + /** Used as the semantic version number. */ + var VERSION = '4.17.21'; - /** - * Disable debug output. - * - * @return {String} namespaces - * @api public - */ - function disable() { - const namespaces = [ - ...createDebug.names.map(toNamespace), - ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) - ].join(','); - createDebug.enable(''); - return namespaces; - } + /** Used as the size to enable large array optimizations. */ + var LARGE_ARRAY_SIZE = 200; - /** - * Returns true if the given mode name is enabled, false otherwise. - * - * @param {String} name - * @return {Boolean} - * @api public - */ - function enabled(name) { - if (name[name.length - 1] === '*') { - return true; - } + /** Error message constants. */ + var CORE_ERROR_TEXT = 'Unsupported core-js use. Try https://npms.io/search?q=ponyfill.', + FUNC_ERROR_TEXT = 'Expected a function', + INVALID_TEMPL_VAR_ERROR_TEXT = 'Invalid `variable` option passed into `_.template`'; - let i; - let len; + /** Used to stand-in for `undefined` hash values. */ + var HASH_UNDEFINED = '__lodash_hash_undefined__'; - for (i = 0, len = createDebug.skips.length; i < len; i++) { - if (createDebug.skips[i].test(name)) { - return false; - } - } + /** Used as the maximum memoize cache size. */ + var MAX_MEMOIZE_SIZE = 500; - for (i = 0, len = createDebug.names.length; i < len; i++) { - if (createDebug.names[i].test(name)) { - return true; - } - } + /** Used as the internal argument placeholder. */ + var PLACEHOLDER = '__lodash_placeholder__'; - return false; - } + /** Used to compose bitmasks for cloning. */ + var CLONE_DEEP_FLAG = 1, + CLONE_FLAT_FLAG = 2, + CLONE_SYMBOLS_FLAG = 4; - /** - * Convert regexp to namespace - * - * @param {RegExp} regxep - * @return {String} namespace - * @api private - */ - function toNamespace(regexp) { - return regexp.toString() - .substring(2, regexp.toString().length - 2) - .replace(/\.\*\?$/, '*'); - } + /** Used to compose bitmasks for value comparisons. */ + var COMPARE_PARTIAL_FLAG = 1, + COMPARE_UNORDERED_FLAG = 2; - /** - * Coerce `val`. - * - * @param {Mixed} val - * @return {Mixed} - * @api private - */ - function coerce(val) { - if (val instanceof Error) { - return val.stack || val.message; - } - return val; - } + /** Used to compose bitmasks for function metadata. */ + var WRAP_BIND_FLAG = 1, + WRAP_BIND_KEY_FLAG = 2, + WRAP_CURRY_BOUND_FLAG = 4, + WRAP_CURRY_FLAG = 8, + WRAP_CURRY_RIGHT_FLAG = 16, + WRAP_PARTIAL_FLAG = 32, + WRAP_PARTIAL_RIGHT_FLAG = 64, + WRAP_ARY_FLAG = 128, + WRAP_REARG_FLAG = 256, + WRAP_FLIP_FLAG = 512; - /** - * XXX DO NOT USE. This is a temporary stub function. - * XXX It WILL be removed in the next major release. - */ - function destroy() { - console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); - } + /** Used as default options for `_.truncate`. */ + var DEFAULT_TRUNC_LENGTH = 30, + DEFAULT_TRUNC_OMISSION = '...'; - createDebug.enable(createDebug.load()); + /** Used to detect hot functions by number of calls within a span of milliseconds. */ + var HOT_COUNT = 800, + HOT_SPAN = 16; - return createDebug; -} + /** Used to indicate the type of lazy iteratees. */ + var LAZY_FILTER_FLAG = 1, + LAZY_MAP_FLAG = 2, + LAZY_WHILE_FLAG = 3; -module.exports = setup; + /** Used as references for various `Number` constants. */ + var INFINITY = 1 / 0, + MAX_SAFE_INTEGER = 9007199254740991, + MAX_INTEGER = 1.7976931348623157e+308, + NAN = 0 / 0; + /** Used as references for the maximum length and index of an array. */ + var MAX_ARRAY_LENGTH = 4294967295, + MAX_ARRAY_INDEX = MAX_ARRAY_LENGTH - 1, + HALF_MAX_ARRAY_LENGTH = MAX_ARRAY_LENGTH >>> 1; -/***/ }), + /** Used to associate wrap methods with their bit flags. */ + var wrapFlags = [ + ['ary', WRAP_ARY_FLAG], + ['bind', WRAP_BIND_FLAG], + ['bindKey', WRAP_BIND_KEY_FLAG], + ['curry', WRAP_CURRY_FLAG], + ['curryRight', WRAP_CURRY_RIGHT_FLAG], + ['flip', WRAP_FLIP_FLAG], + ['partial', WRAP_PARTIAL_FLAG], + ['partialRight', WRAP_PARTIAL_RIGHT_FLAG], + ['rearg', WRAP_REARG_FLAG] + ]; -/***/ 38237: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** `Object#toString` result references. */ + var argsTag = '[object Arguments]', + arrayTag = '[object Array]', + asyncTag = '[object AsyncFunction]', + boolTag = '[object Boolean]', + dateTag = '[object Date]', + domExcTag = '[object DOMException]', + errorTag = '[object Error]', + funcTag = '[object Function]', + genTag = '[object GeneratorFunction]', + mapTag = '[object Map]', + numberTag = '[object Number]', + nullTag = '[object Null]', + objectTag = '[object Object]', + promiseTag = '[object Promise]', + proxyTag = '[object Proxy]', + regexpTag = '[object RegExp]', + setTag = '[object Set]', + stringTag = '[object String]', + symbolTag = '[object Symbol]', + undefinedTag = '[object Undefined]', + weakMapTag = '[object WeakMap]', + weakSetTag = '[object WeakSet]'; -/** - * Detect Electron renderer / nwjs process, which is node, but we should - * treat as a browser. - */ + var arrayBufferTag = '[object ArrayBuffer]', + dataViewTag = '[object DataView]', + float32Tag = '[object Float32Array]', + float64Tag = '[object Float64Array]', + int8Tag = '[object Int8Array]', + int16Tag = '[object Int16Array]', + int32Tag = '[object Int32Array]', + uint8Tag = '[object Uint8Array]', + uint8ClampedTag = '[object Uint8ClampedArray]', + uint16Tag = '[object Uint16Array]', + uint32Tag = '[object Uint32Array]'; -if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { - module.exports = __nccwpck_require__(28222); -} else { - module.exports = __nccwpck_require__(35332); -} + /** Used to match empty string literals in compiled template source. */ + var reEmptyStringLeading = /\b__p \+= '';/g, + reEmptyStringMiddle = /\b(__p \+=) '' \+/g, + reEmptyStringTrailing = /(__e\(.*?\)|\b__t\)) \+\n'';/g; + /** Used to match HTML entities and HTML characters. */ + var reEscapedHtml = /&(?:amp|lt|gt|quot|#39);/g, + reUnescapedHtml = /[&<>"']/g, + reHasEscapedHtml = RegExp(reEscapedHtml.source), + reHasUnescapedHtml = RegExp(reUnescapedHtml.source); -/***/ }), + /** Used to match template delimiters. */ + var reEscape = /<%-([\s\S]+?)%>/g, + reEvaluate = /<%([\s\S]+?)%>/g, + reInterpolate = /<%=([\s\S]+?)%>/g; -/***/ 35332: -/***/ ((module, exports, __nccwpck_require__) => { + /** Used to match property names within property paths. */ + var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, + reIsPlainProp = /^\w*$/, + rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; -/** - * Module dependencies. - */ + /** + * Used to match `RegExp` + * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). + */ + var reRegExpChar = /[\\^$.*+?()[\]{}|]/g, + reHasRegExpChar = RegExp(reRegExpChar.source); -const tty = __nccwpck_require__(76224); -const util = __nccwpck_require__(73837); + /** Used to match leading whitespace. */ + var reTrimStart = /^\s+/; -/** - * This is the Node.js implementation of `debug()`. - */ + /** Used to match a single whitespace character. */ + var reWhitespace = /\s/; -exports.init = init; -exports.log = log; -exports.formatArgs = formatArgs; -exports.save = save; -exports.load = load; -exports.useColors = useColors; -exports.destroy = util.deprecate( - () => {}, - 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' -); + /** Used to match wrap detail comments. */ + var reWrapComment = /\{(?:\n\/\* \[wrapped with .+\] \*\/)?\n?/, + reWrapDetails = /\{\n\/\* \[wrapped with (.+)\] \*/, + reSplitDetails = /,? & /; -/** - * Colors. - */ + /** Used to match words composed of alphanumeric characters. */ + var reAsciiWord = /[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/g; -exports.colors = [6, 2, 3, 4, 5, 1]; + /** + * Used to validate the `validate` option in `_.template` variable. + * + * Forbids characters which could potentially change the meaning of the function argument definition: + * - "()," (modification of function parameters) + * - "=" (default value) + * - "[]{}" (destructuring of function parameters) + * - "/" (beginning of a comment) + * - whitespace + */ + var reForbiddenIdentifierChars = /[()=,{}\[\]\/\s]/; -try { - // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) - // eslint-disable-next-line import/no-extraneous-dependencies - const supportsColor = __nccwpck_require__(59318); + /** Used to match backslashes in property paths. */ + var reEscapeChar = /\\(\\)?/g; - if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { - exports.colors = [ - 20, - 21, - 26, - 27, - 32, - 33, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 56, - 57, - 62, - 63, - 68, - 69, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 92, - 93, - 98, - 99, - 112, - 113, - 128, - 129, - 134, - 135, - 148, - 149, - 160, - 161, - 162, - 163, - 164, - 165, - 166, - 167, - 168, - 169, - 170, - 171, - 172, - 173, - 178, - 179, - 184, - 185, - 196, - 197, - 198, - 199, - 200, - 201, - 202, - 203, - 204, - 205, - 206, - 207, - 208, - 209, - 214, - 215, - 220, - 221 - ]; - } -} catch (error) { - // Swallow - we only care if `supports-color` is available; it doesn't have to be. -} + /** + * Used to match + * [ES template delimiters](http://ecma-international.org/ecma-262/7.0/#sec-template-literal-lexical-components). + */ + var reEsTemplate = /\$\{([^\\}]*(?:\\.[^\\}]*)*)\}/g; -/** - * Build up the default `inspectOpts` object from the environment variables. - * - * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js - */ + /** Used to match `RegExp` flags from their coerced string values. */ + var reFlags = /\w*$/; -exports.inspectOpts = Object.keys(process.env).filter(key => { - return /^debug_/i.test(key); -}).reduce((obj, key) => { - // Camel-case - const prop = key - .substring(6) - .toLowerCase() - .replace(/_([a-z])/g, (_, k) => { - return k.toUpperCase(); - }); + /** Used to detect bad signed hexadecimal string values. */ + var reIsBadHex = /^[-+]0x[0-9a-f]+$/i; - // Coerce string value into JS value - let val = process.env[key]; - if (/^(yes|on|true|enabled)$/i.test(val)) { - val = true; - } else if (/^(no|off|false|disabled)$/i.test(val)) { - val = false; - } else if (val === 'null') { - val = null; - } else { - val = Number(val); - } + /** Used to detect binary string values. */ + var reIsBinary = /^0b[01]+$/i; - obj[prop] = val; - return obj; -}, {}); + /** Used to detect host constructors (Safari). */ + var reIsHostCtor = /^\[object .+?Constructor\]$/; -/** - * Is stdout a TTY? Colored output is enabled when `true`. - */ + /** Used to detect octal string values. */ + var reIsOctal = /^0o[0-7]+$/i; -function useColors() { - return 'colors' in exports.inspectOpts ? - Boolean(exports.inspectOpts.colors) : - tty.isatty(process.stderr.fd); -} + /** Used to detect unsigned integer values. */ + var reIsUint = /^(?:0|[1-9]\d*)$/; -/** - * Adds ANSI color escape codes if enabled. - * - * @api public - */ + /** Used to match Latin Unicode letters (excluding mathematical operators). */ + var reLatin = /[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g; -function formatArgs(args) { - const {namespace: name, useColors} = this; + /** Used to ensure capturing order of template delimiters. */ + var reNoMatch = /($^)/; - if (useColors) { - const c = this.color; - const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); - const prefix = ` ${colorCode};1m${name} \u001B[0m`; + /** Used to match unescaped characters in compiled string literals. */ + var reUnescapedString = /['\n\r\u2028\u2029\\]/g; - args[0] = prefix + args[0].split('\n').join('\n' + prefix); - args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); - } else { - args[0] = getDate() + name + ' ' + args[0]; - } -} + /** Used to compose unicode character classes. */ + var rsAstralRange = '\\ud800-\\udfff', + rsComboMarksRange = '\\u0300-\\u036f', + reComboHalfMarksRange = '\\ufe20-\\ufe2f', + rsComboSymbolsRange = '\\u20d0-\\u20ff', + rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange, + rsDingbatRange = '\\u2700-\\u27bf', + rsLowerRange = 'a-z\\xdf-\\xf6\\xf8-\\xff', + rsMathOpRange = '\\xac\\xb1\\xd7\\xf7', + rsNonCharRange = '\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf', + rsPunctuationRange = '\\u2000-\\u206f', + rsSpaceRange = ' \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000', + rsUpperRange = 'A-Z\\xc0-\\xd6\\xd8-\\xde', + rsVarRange = '\\ufe0e\\ufe0f', + rsBreakRange = rsMathOpRange + rsNonCharRange + rsPunctuationRange + rsSpaceRange; -function getDate() { - if (exports.inspectOpts.hideDate) { - return ''; - } - return new Date().toISOString() + ' '; -} + /** Used to compose unicode capture groups. */ + var rsApos = "['\u2019]", + rsAstral = '[' + rsAstralRange + ']', + rsBreak = '[' + rsBreakRange + ']', + rsCombo = '[' + rsComboRange + ']', + rsDigits = '\\d+', + rsDingbat = '[' + rsDingbatRange + ']', + rsLower = '[' + rsLowerRange + ']', + rsMisc = '[^' + rsAstralRange + rsBreakRange + rsDigits + rsDingbatRange + rsLowerRange + rsUpperRange + ']', + rsFitz = '\\ud83c[\\udffb-\\udfff]', + rsModifier = '(?:' + rsCombo + '|' + rsFitz + ')', + rsNonAstral = '[^' + rsAstralRange + ']', + rsRegional = '(?:\\ud83c[\\udde6-\\uddff]){2}', + rsSurrPair = '[\\ud800-\\udbff][\\udc00-\\udfff]', + rsUpper = '[' + rsUpperRange + ']', + rsZWJ = '\\u200d'; -/** - * Invokes `util.format()` with the specified arguments and writes to stderr. - */ + /** Used to compose unicode regexes. */ + var rsMiscLower = '(?:' + rsLower + '|' + rsMisc + ')', + rsMiscUpper = '(?:' + rsUpper + '|' + rsMisc + ')', + rsOptContrLower = '(?:' + rsApos + '(?:d|ll|m|re|s|t|ve))?', + rsOptContrUpper = '(?:' + rsApos + '(?:D|LL|M|RE|S|T|VE))?', + reOptMod = rsModifier + '?', + rsOptVar = '[' + rsVarRange + ']?', + rsOptJoin = '(?:' + rsZWJ + '(?:' + [rsNonAstral, rsRegional, rsSurrPair].join('|') + ')' + rsOptVar + reOptMod + ')*', + rsOrdLower = '\\d*(?:1st|2nd|3rd|(?![123])\\dth)(?=\\b|[A-Z_])', + rsOrdUpper = '\\d*(?:1ST|2ND|3RD|(?![123])\\dTH)(?=\\b|[a-z_])', + rsSeq = rsOptVar + reOptMod + rsOptJoin, + rsEmoji = '(?:' + [rsDingbat, rsRegional, rsSurrPair].join('|') + ')' + rsSeq, + rsSymbol = '(?:' + [rsNonAstral + rsCombo + '?', rsCombo, rsRegional, rsSurrPair, rsAstral].join('|') + ')'; -function log(...args) { - return process.stderr.write(util.format(...args) + '\n'); -} + /** Used to match apostrophes. */ + var reApos = RegExp(rsApos, 'g'); -/** - * Save `namespaces`. - * - * @param {String} namespaces - * @api private - */ -function save(namespaces) { - if (namespaces) { - process.env.DEBUG = namespaces; - } else { - // If you set a process.env field to null or undefined, it gets cast to the - // string 'null' or 'undefined'. Just delete instead. - delete process.env.DEBUG; - } -} + /** + * Used to match [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks) and + * [combining diacritical marks for symbols](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks_for_Symbols). + */ + var reComboMark = RegExp(rsCombo, 'g'); -/** - * Load `namespaces`. - * - * @return {String} returns the previously persisted debug modes - * @api private - */ + /** Used to match [string symbols](https://mathiasbynens.be/notes/javascript-unicode). */ + var reUnicode = RegExp(rsFitz + '(?=' + rsFitz + ')|' + rsSymbol + rsSeq, 'g'); -function load() { - return process.env.DEBUG; -} + /** Used to match complex or compound words. */ + var reUnicodeWord = RegExp([ + rsUpper + '?' + rsLower + '+' + rsOptContrLower + '(?=' + [rsBreak, rsUpper, '$'].join('|') + ')', + rsMiscUpper + '+' + rsOptContrUpper + '(?=' + [rsBreak, rsUpper + rsMiscLower, '$'].join('|') + ')', + rsUpper + '?' + rsMiscLower + '+' + rsOptContrLower, + rsUpper + '+' + rsOptContrUpper, + rsOrdUpper, + rsOrdLower, + rsDigits, + rsEmoji + ].join('|'), 'g'); -/** - * Init logic for `debug` instances. - * - * Create a new `inspectOpts` object in case `useColors` is set - * differently for a particular `debug` instance. - */ + /** Used to detect strings with [zero-width joiners or code points from the astral planes](http://eev.ee/blog/2015/09/12/dark-corners-of-unicode/). */ + var reHasUnicode = RegExp('[' + rsZWJ + rsAstralRange + rsComboRange + rsVarRange + ']'); -function init(debug) { - debug.inspectOpts = {}; + /** Used to detect strings that need a more robust regexp to match words. */ + var reHasUnicodeWord = /[a-z][A-Z]|[A-Z]{2}[a-z]|[0-9][a-zA-Z]|[a-zA-Z][0-9]|[^a-zA-Z0-9 ]/; - const keys = Object.keys(exports.inspectOpts); - for (let i = 0; i < keys.length; i++) { - debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; - } -} + /** Used to assign default `context` object properties. */ + var contextProps = [ + 'Array', 'Buffer', 'DataView', 'Date', 'Error', 'Float32Array', 'Float64Array', + 'Function', 'Int8Array', 'Int16Array', 'Int32Array', 'Map', 'Math', 'Object', + 'Promise', 'RegExp', 'Set', 'String', 'Symbol', 'TypeError', 'Uint8Array', + 'Uint8ClampedArray', 'Uint16Array', 'Uint32Array', 'WeakMap', + '_', 'clearTimeout', 'isFinite', 'parseInt', 'setTimeout' + ]; -module.exports = __nccwpck_require__(46243)(exports); + /** Used to make template sourceURLs easier to identify. */ + var templateCounter = -1; -const {formatters} = module.exports; + /** Used to identify `toStringTag` values of typed arrays. */ + var typedArrayTags = {}; + typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = + typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = + typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = + typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = + typedArrayTags[uint32Tag] = true; + typedArrayTags[argsTag] = typedArrayTags[arrayTag] = + typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = + typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = + typedArrayTags[errorTag] = typedArrayTags[funcTag] = + typedArrayTags[mapTag] = typedArrayTags[numberTag] = + typedArrayTags[objectTag] = typedArrayTags[regexpTag] = + typedArrayTags[setTag] = typedArrayTags[stringTag] = + typedArrayTags[weakMapTag] = false; -/** - * Map %o to `util.inspect()`, all on a single line. - */ + /** Used to identify `toStringTag` values supported by `_.clone`. */ + var cloneableTags = {}; + cloneableTags[argsTag] = cloneableTags[arrayTag] = + cloneableTags[arrayBufferTag] = cloneableTags[dataViewTag] = + cloneableTags[boolTag] = cloneableTags[dateTag] = + cloneableTags[float32Tag] = cloneableTags[float64Tag] = + cloneableTags[int8Tag] = cloneableTags[int16Tag] = + cloneableTags[int32Tag] = cloneableTags[mapTag] = + cloneableTags[numberTag] = cloneableTags[objectTag] = + cloneableTags[regexpTag] = cloneableTags[setTag] = + cloneableTags[stringTag] = cloneableTags[symbolTag] = + cloneableTags[uint8Tag] = cloneableTags[uint8ClampedTag] = + cloneableTags[uint16Tag] = cloneableTags[uint32Tag] = true; + cloneableTags[errorTag] = cloneableTags[funcTag] = + cloneableTags[weakMapTag] = false; -formatters.o = function (v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts) - .split('\n') - .map(str => str.trim()) - .join(' '); -}; + /** Used to map Latin Unicode letters to basic Latin letters. */ + var deburredLetters = { + // Latin-1 Supplement block. + '\xc0': 'A', '\xc1': 'A', '\xc2': 'A', '\xc3': 'A', '\xc4': 'A', '\xc5': 'A', + '\xe0': 'a', '\xe1': 'a', '\xe2': 'a', '\xe3': 'a', '\xe4': 'a', '\xe5': 'a', + '\xc7': 'C', '\xe7': 'c', + '\xd0': 'D', '\xf0': 'd', + '\xc8': 'E', '\xc9': 'E', '\xca': 'E', '\xcb': 'E', + '\xe8': 'e', '\xe9': 'e', '\xea': 'e', '\xeb': 'e', + '\xcc': 'I', '\xcd': 'I', '\xce': 'I', '\xcf': 'I', + '\xec': 'i', '\xed': 'i', '\xee': 'i', '\xef': 'i', + '\xd1': 'N', '\xf1': 'n', + '\xd2': 'O', '\xd3': 'O', '\xd4': 'O', '\xd5': 'O', '\xd6': 'O', '\xd8': 'O', + '\xf2': 'o', '\xf3': 'o', '\xf4': 'o', '\xf5': 'o', '\xf6': 'o', '\xf8': 'o', + '\xd9': 'U', '\xda': 'U', '\xdb': 'U', '\xdc': 'U', + '\xf9': 'u', '\xfa': 'u', '\xfb': 'u', '\xfc': 'u', + '\xdd': 'Y', '\xfd': 'y', '\xff': 'y', + '\xc6': 'Ae', '\xe6': 'ae', + '\xde': 'Th', '\xfe': 'th', + '\xdf': 'ss', + // Latin Extended-A block. + '\u0100': 'A', '\u0102': 'A', '\u0104': 'A', + '\u0101': 'a', '\u0103': 'a', '\u0105': 'a', + '\u0106': 'C', '\u0108': 'C', '\u010a': 'C', '\u010c': 'C', + '\u0107': 'c', '\u0109': 'c', '\u010b': 'c', '\u010d': 'c', + '\u010e': 'D', '\u0110': 'D', '\u010f': 'd', '\u0111': 'd', + '\u0112': 'E', '\u0114': 'E', '\u0116': 'E', '\u0118': 'E', '\u011a': 'E', + '\u0113': 'e', '\u0115': 'e', '\u0117': 'e', '\u0119': 'e', '\u011b': 'e', + '\u011c': 'G', '\u011e': 'G', '\u0120': 'G', '\u0122': 'G', + '\u011d': 'g', '\u011f': 'g', '\u0121': 'g', '\u0123': 'g', + '\u0124': 'H', '\u0126': 'H', '\u0125': 'h', '\u0127': 'h', + '\u0128': 'I', '\u012a': 'I', '\u012c': 'I', '\u012e': 'I', '\u0130': 'I', + '\u0129': 'i', '\u012b': 'i', '\u012d': 'i', '\u012f': 'i', '\u0131': 'i', + '\u0134': 'J', '\u0135': 'j', + '\u0136': 'K', '\u0137': 'k', '\u0138': 'k', + '\u0139': 'L', '\u013b': 'L', '\u013d': 'L', '\u013f': 'L', '\u0141': 'L', + '\u013a': 'l', '\u013c': 'l', '\u013e': 'l', '\u0140': 'l', '\u0142': 'l', + '\u0143': 'N', '\u0145': 'N', '\u0147': 'N', '\u014a': 'N', + '\u0144': 'n', '\u0146': 'n', '\u0148': 'n', '\u014b': 'n', + '\u014c': 'O', '\u014e': 'O', '\u0150': 'O', + '\u014d': 'o', '\u014f': 'o', '\u0151': 'o', + '\u0154': 'R', '\u0156': 'R', '\u0158': 'R', + '\u0155': 'r', '\u0157': 'r', '\u0159': 'r', + '\u015a': 'S', '\u015c': 'S', '\u015e': 'S', '\u0160': 'S', + '\u015b': 's', '\u015d': 's', '\u015f': 's', '\u0161': 's', + '\u0162': 'T', '\u0164': 'T', '\u0166': 'T', + '\u0163': 't', '\u0165': 't', '\u0167': 't', + '\u0168': 'U', '\u016a': 'U', '\u016c': 'U', '\u016e': 'U', '\u0170': 'U', '\u0172': 'U', + '\u0169': 'u', '\u016b': 'u', '\u016d': 'u', '\u016f': 'u', '\u0171': 'u', '\u0173': 'u', + '\u0174': 'W', '\u0175': 'w', + '\u0176': 'Y', '\u0177': 'y', '\u0178': 'Y', + '\u0179': 'Z', '\u017b': 'Z', '\u017d': 'Z', + '\u017a': 'z', '\u017c': 'z', '\u017e': 'z', + '\u0132': 'IJ', '\u0133': 'ij', + '\u0152': 'Oe', '\u0153': 'oe', + '\u0149': "'n", '\u017f': 's' + }; -/** - * Map %O to `util.inspect()`, allowing multiple lines if needed. - */ + /** Used to map characters to HTML entities. */ + var htmlEscapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''' + }; -formatters.O = function (v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts); -}; + /** Used to map HTML entities to characters. */ + var htmlUnescapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + ''': "'" + }; + /** Used to escape characters for inclusion in compiled string literals. */ + var stringEscapes = { + '\\': '\\', + "'": "'", + '\n': 'n', + '\r': 'r', + '\u2028': 'u2028', + '\u2029': 'u2029' + }; -/***/ }), + /** Built-in method references without a dependency on `root`. */ + var freeParseFloat = parseFloat, + freeParseInt = parseInt; -/***/ 961: -/***/ ((module) => { + /** Detect free variable `global` from Node.js. */ + var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; -"use strict"; + /** Detect free variable `self`. */ + var freeSelf = typeof self == 'object' && self && self.Object === Object && self; -module.exports = (object, propertyName, fn) => { - const define = value => Object.defineProperty(object, propertyName, {value, enumerable: true, writable: true}); + /** Used as a reference to the global object. */ + var root = freeGlobal || freeSelf || Function('return this')(); - Object.defineProperty(object, propertyName, { - configurable: true, - enumerable: true, - get() { - const result = fn(); - define(result); - return result; - }, - set(value) { - define(value); - } - }); + /** Detect free variable `exports`. */ + var freeExports = true && exports && !exports.nodeType && exports; - return object; -}; + /** Detect free variable `module`. */ + var freeModule = freeExports && "object" == 'object' && module && !module.nodeType && module; + /** Detect the popular CommonJS extension `module.exports`. */ + var moduleExports = freeModule && freeModule.exports === freeExports; -/***/ }), + /** Detect free variable `process` from Node.js. */ + var freeProcess = moduleExports && freeGlobal.process; -/***/ 18611: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** Used to access faster Node.js helpers. */ + var nodeUtil = (function() { + try { + // Use `util.types` for Node.js 10+. + var types = freeModule && freeModule.require && freeModule.require('util').types; -var Stream = (__nccwpck_require__(12781).Stream); -var util = __nccwpck_require__(73837); + if (types) { + return types; + } -module.exports = DelayedStream; -function DelayedStream() { - this.source = null; - this.dataSize = 0; - this.maxDataSize = 1024 * 1024; - this.pauseStream = true; + // Legacy `process.binding('util')` for Node.js < 10. + return freeProcess && freeProcess.binding && freeProcess.binding('util'); + } catch (e) {} + }()); - this._maxDataSizeExceeded = false; - this._released = false; - this._bufferedEvents = []; -} -util.inherits(DelayedStream, Stream); + /* Node.js helper references. */ + var nodeIsArrayBuffer = nodeUtil && nodeUtil.isArrayBuffer, + nodeIsDate = nodeUtil && nodeUtil.isDate, + nodeIsMap = nodeUtil && nodeUtil.isMap, + nodeIsRegExp = nodeUtil && nodeUtil.isRegExp, + nodeIsSet = nodeUtil && nodeUtil.isSet, + nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray; -DelayedStream.create = function(source, options) { - var delayedStream = new this(); + /*--------------------------------------------------------------------------*/ - options = options || {}; - for (var option in options) { - delayedStream[option] = options[option]; + /** + * A faster alternative to `Function#apply`, this function invokes `func` + * with the `this` binding of `thisArg` and the arguments of `args`. + * + * @private + * @param {Function} func The function to invoke. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} args The arguments to invoke `func` with. + * @returns {*} Returns the result of `func`. + */ + function apply(func, thisArg, args) { + switch (args.length) { + case 0: return func.call(thisArg); + case 1: return func.call(thisArg, args[0]); + case 2: return func.call(thisArg, args[0], args[1]); + case 3: return func.call(thisArg, args[0], args[1], args[2]); + } + return func.apply(thisArg, args); } - delayedStream.source = source; - - var realEmit = source.emit; - source.emit = function() { - delayedStream._handleEmit(arguments); - return realEmit.apply(source, arguments); - }; + /** + * A specialized version of `baseAggregator` for arrays. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform keys. + * @param {Object} accumulator The initial aggregated object. + * @returns {Function} Returns `accumulator`. + */ + function arrayAggregator(array, setter, iteratee, accumulator) { + var index = -1, + length = array == null ? 0 : array.length; - source.on('error', function() {}); - if (delayedStream.pauseStream) { - source.pause(); + while (++index < length) { + var value = array[index]; + setter(accumulator, value, iteratee(value), array); + } + return accumulator; } - return delayedStream; -}; + /** + * A specialized version of `_.forEach` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns `array`. + */ + function arrayEach(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length; -Object.defineProperty(DelayedStream.prototype, 'readable', { - configurable: true, - enumerable: true, - get: function() { - return this.source.readable; + while (++index < length) { + if (iteratee(array[index], index, array) === false) { + break; + } + } + return array; } -}); -DelayedStream.prototype.setEncoding = function() { - return this.source.setEncoding.apply(this.source, arguments); -}; + /** + * A specialized version of `_.forEachRight` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns `array`. + */ + function arrayEachRight(array, iteratee) { + var length = array == null ? 0 : array.length; -DelayedStream.prototype.resume = function() { - if (!this._released) { - this.release(); + while (length--) { + if (iteratee(array[length], length, array) === false) { + break; + } + } + return array; } - this.source.resume(); -}; - -DelayedStream.prototype.pause = function() { - this.source.pause(); -}; - -DelayedStream.prototype.release = function() { - this._released = true; - - this._bufferedEvents.forEach(function(args) { - this.emit.apply(this, args); - }.bind(this)); - this._bufferedEvents = []; -}; - -DelayedStream.prototype.pipe = function() { - var r = Stream.prototype.pipe.apply(this, arguments); - this.resume(); - return r; -}; - -DelayedStream.prototype._handleEmit = function(args) { - if (this._released) { - this.emit.apply(this, args); - return; - } + /** + * A specialized version of `_.every` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + */ + function arrayEvery(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length; - if (args[0] === 'data') { - this.dataSize += args[1].length; - this._checkIfMaxDataSizeExceeded(); + while (++index < length) { + if (!predicate(array[index], index, array)) { + return false; + } + } + return true; } - this._bufferedEvents.push(args); -}; + /** + * A specialized version of `_.filter` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ + function arrayFilter(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length, + resIndex = 0, + result = []; -DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { - if (this._maxDataSizeExceeded) { - return; + while (++index < length) { + var value = array[index]; + if (predicate(value, index, array)) { + result[resIndex++] = value; + } + } + return result; } - if (this.dataSize <= this.maxDataSize) { - return; + /** + * A specialized version of `_.includes` for arrays without support for + * specifying an index to search from. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ + function arrayIncludes(array, value) { + var length = array == null ? 0 : array.length; + return !!length && baseIndexOf(array, value, 0) > -1; } - this._maxDataSizeExceeded = true; - var message = - 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' - this.emit('error', new Error(message)); -}; - - -/***/ }), - -/***/ 58932: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ + /** + * This function is like `arrayIncludes` except that it accepts a comparator. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @param {Function} comparator The comparator invoked per element. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ + function arrayIncludesWith(array, value, comparator) { + var index = -1, + length = array == null ? 0 : array.length; - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); + while (++index < length) { + if (comparator(value, array[index])) { + return true; + } } - - this.name = 'Deprecation'; + return false; } -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 11728: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var Buffer = (__nccwpck_require__(21867).Buffer); - -var getParamBytesForAlg = __nccwpck_require__(30528); - -var MAX_OCTET = 0x80, - CLASS_UNIVERSAL = 0, - PRIMITIVE_BIT = 0x20, - TAG_SEQ = 0x10, - TAG_INT = 0x02, - ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), - ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); - -function base64Url(base64) { - return base64 - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); -} - -function signatureAsBuffer(signature) { - if (Buffer.isBuffer(signature)) { - return signature; - } else if ('string' === typeof signature) { - return Buffer.from(signature, 'base64'); - } - - throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); -} - -function derToJose(signature, alg) { - signature = signatureAsBuffer(signature); - var paramBytes = getParamBytesForAlg(alg); + /** + * A specialized version of `_.map` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ + function arrayMap(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length, + result = Array(length); - // the DER encoded param should at most be the param size, plus a padding - // zero, since due to being a signed integer - var maxEncodedParamLength = paramBytes + 1; + while (++index < length) { + result[index] = iteratee(array[index], index, array); + } + return result; + } - var inputLength = signature.length; + /** + * Appends the elements of `values` to `array`. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to append. + * @returns {Array} Returns `array`. + */ + function arrayPush(array, values) { + var index = -1, + length = values.length, + offset = array.length; - var offset = 0; - if (signature[offset++] !== ENCODED_TAG_SEQ) { - throw new Error('Could not find expected "seq"'); - } + while (++index < length) { + array[offset + index] = values[index]; + } + return array; + } - var seqLength = signature[offset++]; - if (seqLength === (MAX_OCTET | 1)) { - seqLength = signature[offset++]; - } + /** + * A specialized version of `_.reduce` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the first element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ + function arrayReduce(array, iteratee, accumulator, initAccum) { + var index = -1, + length = array == null ? 0 : array.length; - if (inputLength - offset < seqLength) { - throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); - } + if (initAccum && length) { + accumulator = array[++index]; + } + while (++index < length) { + accumulator = iteratee(accumulator, array[index], index, array); + } + return accumulator; + } - if (signature[offset++] !== ENCODED_TAG_INT) { - throw new Error('Could not find expected "int" for "r"'); - } + /** + * A specialized version of `_.reduceRight` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the last element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ + function arrayReduceRight(array, iteratee, accumulator, initAccum) { + var length = array == null ? 0 : array.length; + if (initAccum && length) { + accumulator = array[--length]; + } + while (length--) { + accumulator = iteratee(accumulator, array[length], length, array); + } + return accumulator; + } - var rLength = signature[offset++]; + /** + * A specialized version of `_.some` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ + function arraySome(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length; - if (inputLength - offset - 2 < rLength) { - throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); - } + while (++index < length) { + if (predicate(array[index], index, array)) { + return true; + } + } + return false; + } - if (maxEncodedParamLength < rLength) { - throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); - } + /** + * Gets the size of an ASCII `string`. + * + * @private + * @param {string} string The string inspect. + * @returns {number} Returns the string size. + */ + var asciiSize = baseProperty('length'); - var rOffset = offset; - offset += rLength; + /** + * Converts an ASCII `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function asciiToArray(string) { + return string.split(''); + } - if (signature[offset++] !== ENCODED_TAG_INT) { - throw new Error('Could not find expected "int" for "s"'); - } + /** + * Splits an ASCII `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ + function asciiWords(string) { + return string.match(reAsciiWord) || []; + } - var sLength = signature[offset++]; + /** + * The base implementation of methods like `_.findKey` and `_.findLastKey`, + * without support for iteratee shorthands, which iterates over `collection` + * using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the found element or its key, else `undefined`. + */ + function baseFindKey(collection, predicate, eachFunc) { + var result; + eachFunc(collection, function(value, key, collection) { + if (predicate(value, key, collection)) { + result = key; + return false; + } + }); + return result; + } - if (inputLength - offset !== sLength) { - throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); - } + /** + * The base implementation of `_.findIndex` and `_.findLastIndex` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {number} fromIndex The index to search from. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseFindIndex(array, predicate, fromIndex, fromRight) { + var length = array.length, + index = fromIndex + (fromRight ? 1 : -1); - if (maxEncodedParamLength < sLength) { - throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); - } + while ((fromRight ? index-- : ++index < length)) { + if (predicate(array[index], index, array)) { + return index; + } + } + return -1; + } - var sOffset = offset; - offset += sLength; + /** + * The base implementation of `_.indexOf` without `fromIndex` bounds checks. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseIndexOf(array, value, fromIndex) { + return value === value + ? strictIndexOf(array, value, fromIndex) + : baseFindIndex(array, baseIsNaN, fromIndex); + } - if (offset !== inputLength) { - throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); - } + /** + * This function is like `baseIndexOf` except that it accepts a comparator. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @param {Function} comparator The comparator invoked per element. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseIndexOfWith(array, value, fromIndex, comparator) { + var index = fromIndex - 1, + length = array.length; - var rPadding = paramBytes - rLength, - sPadding = paramBytes - sLength; + while (++index < length) { + if (comparator(array[index], value)) { + return index; + } + } + return -1; + } - var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); + /** + * The base implementation of `_.isNaN` without support for number objects. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + */ + function baseIsNaN(value) { + return value !== value; + } - for (offset = 0; offset < rPadding; ++offset) { - dst[offset] = 0; - } - signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); + /** + * The base implementation of `_.mean` and `_.meanBy` without support for + * iteratee shorthands. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {number} Returns the mean. + */ + function baseMean(array, iteratee) { + var length = array == null ? 0 : array.length; + return length ? (baseSum(array, iteratee) / length) : NAN; + } - offset = paramBytes; + /** + * The base implementation of `_.property` without support for deep paths. + * + * @private + * @param {string} key The key of the property to get. + * @returns {Function} Returns the new accessor function. + */ + function baseProperty(key) { + return function(object) { + return object == null ? undefined : object[key]; + }; + } - for (var o = offset; offset < o + sPadding; ++offset) { - dst[offset] = 0; - } - signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); + /** + * The base implementation of `_.propertyOf` without support for deep paths. + * + * @private + * @param {Object} object The object to query. + * @returns {Function} Returns the new accessor function. + */ + function basePropertyOf(object) { + return function(key) { + return object == null ? undefined : object[key]; + }; + } - dst = dst.toString('base64'); - dst = base64Url(dst); + /** + * The base implementation of `_.reduce` and `_.reduceRight`, without support + * for iteratee shorthands, which iterates over `collection` using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} accumulator The initial value. + * @param {boolean} initAccum Specify using the first or last element of + * `collection` as the initial value. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the accumulated value. + */ + function baseReduce(collection, iteratee, accumulator, initAccum, eachFunc) { + eachFunc(collection, function(value, index, collection) { + accumulator = initAccum + ? (initAccum = false, value) + : iteratee(accumulator, value, index, collection); + }); + return accumulator; + } - return dst; -} + /** + * The base implementation of `_.sortBy` which uses `comparer` to define the + * sort order of `array` and replaces criteria objects with their corresponding + * values. + * + * @private + * @param {Array} array The array to sort. + * @param {Function} comparer The function to define sort order. + * @returns {Array} Returns `array`. + */ + function baseSortBy(array, comparer) { + var length = array.length; -function countPadding(buf, start, stop) { - var padding = 0; - while (start + padding < stop && buf[start + padding] === 0) { - ++padding; - } + array.sort(comparer); + while (length--) { + array[length] = array[length].value; + } + return array; + } - var needsSign = buf[start + padding] >= MAX_OCTET; - if (needsSign) { - --padding; - } + /** + * The base implementation of `_.sum` and `_.sumBy` without support for + * iteratee shorthands. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {number} Returns the sum. + */ + function baseSum(array, iteratee) { + var result, + index = -1, + length = array.length; - return padding; -} + while (++index < length) { + var current = iteratee(array[index]); + if (current !== undefined) { + result = result === undefined ? current : (result + current); + } + } + return result; + } -function joseToDer(signature, alg) { - signature = signatureAsBuffer(signature); - var paramBytes = getParamBytesForAlg(alg); + /** + * The base implementation of `_.times` without support for iteratee shorthands + * or max array length checks. + * + * @private + * @param {number} n The number of times to invoke `iteratee`. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the array of results. + */ + function baseTimes(n, iteratee) { + var index = -1, + result = Array(n); - var signatureBytes = signature.length; - if (signatureBytes !== paramBytes * 2) { - throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); - } + while (++index < n) { + result[index] = iteratee(index); + } + return result; + } - var rPadding = countPadding(signature, 0, paramBytes); - var sPadding = countPadding(signature, paramBytes, signature.length); - var rLength = paramBytes - rPadding; - var sLength = paramBytes - sPadding; + /** + * The base implementation of `_.toPairs` and `_.toPairsIn` which creates an array + * of key-value pairs for `object` corresponding to the property names of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the key-value pairs. + */ + function baseToPairs(object, props) { + return arrayMap(props, function(key) { + return [key, object[key]]; + }); + } - var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; + /** + * The base implementation of `_.trim`. + * + * @private + * @param {string} string The string to trim. + * @returns {string} Returns the trimmed string. + */ + function baseTrim(string) { + return string + ? string.slice(0, trimmedEndIndex(string) + 1).replace(reTrimStart, '') + : string; + } - var shortLength = rsBytes < MAX_OCTET; + /** + * The base implementation of `_.unary` without support for storing metadata. + * + * @private + * @param {Function} func The function to cap arguments for. + * @returns {Function} Returns the new capped function. + */ + function baseUnary(func) { + return function(value) { + return func(value); + }; + } - var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); + /** + * The base implementation of `_.values` and `_.valuesIn` which creates an + * array of `object` property values corresponding to the property names + * of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the array of property values. + */ + function baseValues(object, props) { + return arrayMap(props, function(key) { + return object[key]; + }); + } - var offset = 0; - dst[offset++] = ENCODED_TAG_SEQ; - if (shortLength) { - // Bit 8 has value "0" - // bits 7-1 give the length. - dst[offset++] = rsBytes; - } else { - // Bit 8 of first octet has value "1" - // bits 7-1 give the number of additional length octets. - dst[offset++] = MAX_OCTET | 1; - // length, base 256 - dst[offset++] = rsBytes & 0xff; - } - dst[offset++] = ENCODED_TAG_INT; - dst[offset++] = rLength; - if (rPadding < 0) { - dst[offset++] = 0; - offset += signature.copy(dst, offset, 0, paramBytes); - } else { - offset += signature.copy(dst, offset, rPadding, paramBytes); - } - dst[offset++] = ENCODED_TAG_INT; - dst[offset++] = sLength; - if (sPadding < 0) { - dst[offset++] = 0; - signature.copy(dst, offset, paramBytes); - } else { - signature.copy(dst, offset, paramBytes + sPadding); - } + /** + * Checks if a `cache` value for `key` exists. + * + * @private + * @param {Object} cache The cache to query. + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function cacheHas(cache, key) { + return cache.has(key); + } - return dst; -} + /** + * Used by `_.trim` and `_.trimStart` to get the index of the first string symbol + * that is not found in the character symbols. + * + * @private + * @param {Array} strSymbols The string symbols to inspect. + * @param {Array} chrSymbols The character symbols to find. + * @returns {number} Returns the index of the first unmatched string symbol. + */ + function charsStartIndex(strSymbols, chrSymbols) { + var index = -1, + length = strSymbols.length; -module.exports = { - derToJose: derToJose, - joseToDer: joseToDer -}; + while (++index < length && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} + return index; + } + /** + * Used by `_.trim` and `_.trimEnd` to get the index of the last string symbol + * that is not found in the character symbols. + * + * @private + * @param {Array} strSymbols The string symbols to inspect. + * @param {Array} chrSymbols The character symbols to find. + * @returns {number} Returns the index of the last unmatched string symbol. + */ + function charsEndIndex(strSymbols, chrSymbols) { + var index = strSymbols.length; -/***/ }), + while (index-- && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} + return index; + } -/***/ 30528: -/***/ ((module) => { + /** + * Gets the number of `placeholder` occurrences in `array`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} placeholder The placeholder to search for. + * @returns {number} Returns the placeholder count. + */ + function countHolders(array, placeholder) { + var length = array.length, + result = 0; -"use strict"; + while (length--) { + if (array[length] === placeholder) { + ++result; + } + } + return result; + } + /** + * Used by `_.deburr` to convert Latin-1 Supplement and Latin Extended-A + * letters to basic Latin letters. + * + * @private + * @param {string} letter The matched letter to deburr. + * @returns {string} Returns the deburred letter. + */ + var deburrLetter = basePropertyOf(deburredLetters); -function getParamSize(keySize) { - var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); - return result; -} + /** + * Used by `_.escape` to convert characters to HTML entities. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ + var escapeHtmlChar = basePropertyOf(htmlEscapes); -var paramBytesForAlg = { - ES256: getParamSize(256), - ES384: getParamSize(384), - ES512: getParamSize(521) -}; + /** + * Used by `_.template` to escape characters for inclusion in compiled string literals. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ + function escapeStringChar(chr) { + return '\\' + stringEscapes[chr]; + } -function getParamBytesForAlg(alg) { - var paramBytes = paramBytesForAlg[alg]; - if (paramBytes) { - return paramBytes; - } + /** + * Gets the value at `key` of `object`. + * + * @private + * @param {Object} [object] The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ + function getValue(object, key) { + return object == null ? undefined : object[key]; + } - throw new Error('Unknown algorithm "' + alg + '"'); -} + /** + * Checks if `string` contains Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a symbol is found, else `false`. + */ + function hasUnicode(string) { + return reHasUnicode.test(string); + } -module.exports = getParamBytesForAlg; + /** + * Checks if `string` contains a word composed of Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a word is found, else `false`. + */ + function hasUnicodeWord(string) { + return reHasUnicodeWord.test(string); + } + /** + * Converts `iterator` to an array. + * + * @private + * @param {Object} iterator The iterator to convert. + * @returns {Array} Returns the converted array. + */ + function iteratorToArray(iterator) { + var data, + result = []; -/***/ }), + while (!(data = iterator.next()).done) { + result.push(data.value); + } + return result; + } -/***/ 30969: -/***/ ((module) => { + /** + * Converts `map` to its key-value pairs. + * + * @private + * @param {Object} map The map to convert. + * @returns {Array} Returns the key-value pairs. + */ + function mapToArray(map) { + var index = -1, + result = Array(map.size); -"use strict"; + map.forEach(function(value, key) { + result[++index] = [key, value]; + }); + return result; + } + /** + * Creates a unary function that invokes `func` with its argument transformed. + * + * @private + * @param {Function} func The function to wrap. + * @param {Function} transform The argument transform. + * @returns {Function} Returns the new function. + */ + function overArg(func, transform) { + return function(arg) { + return func(transform(arg)); + }; + } -module.exports = function (data, opts) { - if (!opts) opts = {}; - if (typeof opts === 'function') opts = { cmp: opts }; - var cycles = (typeof opts.cycles === 'boolean') ? opts.cycles : false; + /** + * Replaces all `placeholder` elements in `array` with an internal placeholder + * and returns an array of their indexes. + * + * @private + * @param {Array} array The array to modify. + * @param {*} placeholder The placeholder to replace. + * @returns {Array} Returns the new array of placeholder indexes. + */ + function replaceHolders(array, placeholder) { + var index = -1, + length = array.length, + resIndex = 0, + result = []; - var cmp = opts.cmp && (function (f) { - return function (node) { - return function (a, b) { - var aobj = { key: a, value: node[a] }; - var bobj = { key: b, value: node[b] }; - return f(aobj, bobj); - }; - }; - })(opts.cmp); + while (++index < length) { + var value = array[index]; + if (value === placeholder || value === PLACEHOLDER) { + array[index] = PLACEHOLDER; + result[resIndex++] = index; + } + } + return result; + } - var seen = []; - return (function stringify (node) { - if (node && node.toJSON && typeof node.toJSON === 'function') { - node = node.toJSON(); - } + /** + * Converts `set` to an array of its values. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the values. + */ + function setToArray(set) { + var index = -1, + result = Array(set.size); - if (node === undefined) return; - if (typeof node == 'number') return isFinite(node) ? '' + node : 'null'; - if (typeof node !== 'object') return JSON.stringify(node); + set.forEach(function(value) { + result[++index] = value; + }); + return result; + } - var i, out; - if (Array.isArray(node)) { - out = '['; - for (i = 0; i < node.length; i++) { - if (i) out += ','; - out += stringify(node[i]) || 'null'; - } - return out + ']'; - } + /** + * Converts `set` to its value-value pairs. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the value-value pairs. + */ + function setToPairs(set) { + var index = -1, + result = Array(set.size); - if (node === null) return 'null'; + set.forEach(function(value) { + result[++index] = [value, value]; + }); + return result; + } - if (seen.indexOf(node) !== -1) { - if (cycles) return JSON.stringify('__cycle__'); - throw new TypeError('Converting circular structure to JSON'); - } + /** + * A specialized version of `_.indexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function strictIndexOf(array, value, fromIndex) { + var index = fromIndex - 1, + length = array.length; - var seenIndex = seen.push(node) - 1; - var keys = Object.keys(node).sort(cmp && cmp(node)); - out = ''; - for (i = 0; i < keys.length; i++) { - var key = keys[i]; - var value = stringify(node[key]); + while (++index < length) { + if (array[index] === value) { + return index; + } + } + return -1; + } - if (!value) continue; - if (out) out += ','; - out += JSON.stringify(key) + ':' + value; - } - seen.splice(seenIndex, 1); - return '{' + out + '}'; - })(data); -}; + /** + * A specialized version of `_.lastIndexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function strictLastIndexOf(array, value, fromIndex) { + var index = fromIndex + 1; + while (index--) { + if (array[index] === value) { + return index; + } + } + return index; + } + /** + * Gets the number of symbols in `string`. + * + * @private + * @param {string} string The string to inspect. + * @returns {number} Returns the string size. + */ + function stringSize(string) { + return hasUnicode(string) + ? unicodeSize(string) + : asciiSize(string); + } -/***/ }), + /** + * Converts `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function stringToArray(string) { + return hasUnicode(string) + ? unicodeToArray(string) + : asciiToArray(string); + } -/***/ 12603: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Used by `_.trim` and `_.trimEnd` to get the index of the last non-whitespace + * character of `string`. + * + * @private + * @param {string} string The string to inspect. + * @returns {number} Returns the index of the last non-whitespace character. + */ + function trimmedEndIndex(string) { + var index = string.length; -"use strict"; + while (index-- && reWhitespace.test(string.charAt(index))) {} + return index; + } + /** + * Used by `_.unescape` to convert HTML entities to characters. + * + * @private + * @param {string} chr The matched character to unescape. + * @returns {string} Returns the unescaped character. + */ + var unescapeHtmlChar = basePropertyOf(htmlUnescapes); -const validator = __nccwpck_require__(61739); -const XMLParser = __nccwpck_require__(42380); -const XMLBuilder = __nccwpck_require__(80660); + /** + * Gets the size of a Unicode `string`. + * + * @private + * @param {string} string The string inspect. + * @returns {number} Returns the string size. + */ + function unicodeSize(string) { + var result = reUnicode.lastIndex = 0; + while (reUnicode.test(string)) { + ++result; + } + return result; + } -module.exports = { - XMLParser: XMLParser, - XMLValidator: validator, - XMLBuilder: XMLBuilder -} + /** + * Converts a Unicode `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function unicodeToArray(string) { + return string.match(reUnicode) || []; + } -/***/ }), + /** + * Splits a Unicode `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ + function unicodeWords(string) { + return string.match(reUnicodeWord) || []; + } -/***/ 38280: -/***/ ((__unused_webpack_module, exports) => { + /*--------------------------------------------------------------------------*/ -"use strict"; + /** + * Create a new pristine `lodash` function using the `context` object. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Util + * @param {Object} [context=root] The context object. + * @returns {Function} Returns a new `lodash` function. + * @example + * + * _.mixin({ 'foo': _.constant('foo') }); + * + * var lodash = _.runInContext(); + * lodash.mixin({ 'bar': lodash.constant('bar') }); + * + * _.isFunction(_.foo); + * // => true + * _.isFunction(_.bar); + * // => false + * + * lodash.isFunction(lodash.foo); + * // => false + * lodash.isFunction(lodash.bar); + * // => true + * + * // Create a suped-up `defer` in Node.js. + * var defer = _.runInContext({ 'setTimeout': setImmediate }).defer; + */ + var runInContext = (function runInContext(context) { + context = context == null ? root : _.defaults(root.Object(), context, _.pick(root, contextProps)); + /** Built-in constructor references. */ + var Array = context.Array, + Date = context.Date, + Error = context.Error, + Function = context.Function, + Math = context.Math, + Object = context.Object, + RegExp = context.RegExp, + String = context.String, + TypeError = context.TypeError; -const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; -const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; -const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' -const regexName = new RegExp('^' + nameRegexp + '$'); + /** Used for built-in method references. */ + var arrayProto = Array.prototype, + funcProto = Function.prototype, + objectProto = Object.prototype; -const getAllMatches = function(string, regex) { - const matches = []; - let match = regex.exec(string); - while (match) { - const allmatches = []; - allmatches.startIndex = regex.lastIndex - match[0].length; - const len = match.length; - for (let index = 0; index < len; index++) { - allmatches.push(match[index]); - } - matches.push(allmatches); - match = regex.exec(string); - } - return matches; -}; + /** Used to detect overreaching core-js shims. */ + var coreJsData = context['__core-js_shared__']; -const isName = function(string) { - const match = regexName.exec(string); - return !(match === null || typeof match === 'undefined'); -}; + /** Used to resolve the decompiled source of functions. */ + var funcToString = funcProto.toString; -exports.isExist = function(v) { - return typeof v !== 'undefined'; -}; + /** Used to check objects for own properties. */ + var hasOwnProperty = objectProto.hasOwnProperty; -exports.isEmptyObject = function(obj) { - return Object.keys(obj).length === 0; -}; + /** Used to generate unique IDs. */ + var idCounter = 0; -/** - * Copy all the properties of a into b. - * @param {*} target - * @param {*} a - */ -exports.merge = function(target, a, arrayMode) { - if (a) { - const keys = Object.keys(a); // will return an array of own properties - const len = keys.length; //don't make it inline - for (let i = 0; i < len; i++) { - if (arrayMode === 'strict') { - target[keys[i]] = [ a[keys[i]] ]; - } else { - target[keys[i]] = a[keys[i]]; - } - } - } -}; -/* exports.merge =function (b,a){ - return Object.assign(b,a); -} */ + /** Used to detect methods masquerading as native. */ + var maskSrcKey = (function() { + var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); + return uid ? ('Symbol(src)_1.' + uid) : ''; + }()); -exports.getValue = function(v) { - if (exports.isExist(v)) { - return v; - } else { - return ''; - } -}; + /** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ + var nativeObjectToString = objectProto.toString; -// const fakeCall = function(a) {return a;}; -// const fakeCallNoReturn = function() {}; + /** Used to infer the `Object` constructor. */ + var objectCtorString = funcToString.call(Object); -exports.isName = isName; -exports.getAllMatches = getAllMatches; -exports.nameRegexp = nameRegexp; + /** Used to restore the original `_` reference in `_.noConflict`. */ + var oldDash = root._; + /** Used to detect if a method is native. */ + var reIsNative = RegExp('^' + + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') + .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' + ); -/***/ }), + /** Built-in value references. */ + var Buffer = moduleExports ? context.Buffer : undefined, + Symbol = context.Symbol, + Uint8Array = context.Uint8Array, + allocUnsafe = Buffer ? Buffer.allocUnsafe : undefined, + getPrototype = overArg(Object.getPrototypeOf, Object), + objectCreate = Object.create, + propertyIsEnumerable = objectProto.propertyIsEnumerable, + splice = arrayProto.splice, + spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : undefined, + symIterator = Symbol ? Symbol.iterator : undefined, + symToStringTag = Symbol ? Symbol.toStringTag : undefined; -/***/ 61739: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var defineProperty = (function() { + try { + var func = getNative(Object, 'defineProperty'); + func({}, '', {}); + return func; + } catch (e) {} + }()); -"use strict"; + /** Mocked built-ins. */ + var ctxClearTimeout = context.clearTimeout !== root.clearTimeout && context.clearTimeout, + ctxNow = Date && Date.now !== root.Date.now && Date.now, + ctxSetTimeout = context.setTimeout !== root.setTimeout && context.setTimeout; + /* Built-in method references for those with the same name as other `lodash` methods. */ + var nativeCeil = Math.ceil, + nativeFloor = Math.floor, + nativeGetSymbols = Object.getOwnPropertySymbols, + nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined, + nativeIsFinite = context.isFinite, + nativeJoin = arrayProto.join, + nativeKeys = overArg(Object.keys, Object), + nativeMax = Math.max, + nativeMin = Math.min, + nativeNow = Date.now, + nativeParseInt = context.parseInt, + nativeRandom = Math.random, + nativeReverse = arrayProto.reverse; -const util = __nccwpck_require__(38280); + /* Built-in method references that are verified to be native. */ + var DataView = getNative(context, 'DataView'), + Map = getNative(context, 'Map'), + Promise = getNative(context, 'Promise'), + Set = getNative(context, 'Set'), + WeakMap = getNative(context, 'WeakMap'), + nativeCreate = getNative(Object, 'create'); -const defaultOptions = { - allowBooleanAttributes: false, //A tag can have attributes without any value - unpairedTags: [] -}; + /** Used to store function metadata. */ + var metaMap = WeakMap && new WeakMap; -//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); -exports.validate = function (xmlData, options) { - options = Object.assign({}, defaultOptions, options); + /** Used to lookup unminified function names. */ + var realNames = {}; - //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line - //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag - //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE - const tags = []; - let tagFound = false; + /** Used to detect maps, sets, and weakmaps. */ + var dataViewCtorString = toSource(DataView), + mapCtorString = toSource(Map), + promiseCtorString = toSource(Promise), + setCtorString = toSource(Set), + weakMapCtorString = toSource(WeakMap); - //indicates that the root tag has been closed (aka. depth 0 has been reached) - let reachedRoot = false; + /** Used to convert symbols to primitives and strings. */ + var symbolProto = Symbol ? Symbol.prototype : undefined, + symbolValueOf = symbolProto ? symbolProto.valueOf : undefined, + symbolToString = symbolProto ? symbolProto.toString : undefined; - if (xmlData[0] === '\ufeff') { - // check for byte order mark (BOM) - xmlData = xmlData.substr(1); - } - - for (let i = 0; i < xmlData.length; i++) { + /*------------------------------------------------------------------------*/ - if (xmlData[i] === '<' && xmlData[i+1] === '?') { - i+=2; - i = readPI(xmlData,i); - if (i.err) return i; - }else if (xmlData[i] === '<') { - //starting of tag - //read until you reach to '>' avoiding any '>' in attribute value - let tagStartPos = i; - i++; - - if (xmlData[i] === '!') { - i = readCommentAndCDATA(xmlData, i); - continue; - } else { - let closingTag = false; - if (xmlData[i] === '/') { - //closing tag - closingTag = true; - i++; + /** + * Creates a `lodash` object which wraps `value` to enable implicit method + * chain sequences. Methods that operate on and return arrays, collections, + * and functions can be chained together. Methods that retrieve a single value + * or may return a primitive value will automatically end the chain sequence + * and return the unwrapped value. Otherwise, the value must be unwrapped + * with `_#value`. + * + * Explicit chain sequences, which must be unwrapped with `_#value`, may be + * enabled using `_.chain`. + * + * The execution of chained methods is lazy, that is, it's deferred until + * `_#value` is implicitly or explicitly called. + * + * Lazy evaluation allows several methods to support shortcut fusion. + * Shortcut fusion is an optimization to merge iteratee calls; this avoids + * the creation of intermediate arrays and can greatly reduce the number of + * iteratee executions. Sections of a chain sequence qualify for shortcut + * fusion if the section is applied to an array and iteratees accept only + * one argument. The heuristic for whether a section qualifies for shortcut + * fusion is subject to change. + * + * Chaining is supported in custom builds as long as the `_#value` method is + * directly or indirectly included in the build. + * + * In addition to lodash methods, wrappers have `Array` and `String` methods. + * + * The wrapper `Array` methods are: + * `concat`, `join`, `pop`, `push`, `shift`, `sort`, `splice`, and `unshift` + * + * The wrapper `String` methods are: + * `replace` and `split` + * + * The wrapper methods that support shortcut fusion are: + * `at`, `compact`, `drop`, `dropRight`, `dropWhile`, `filter`, `find`, + * `findLast`, `head`, `initial`, `last`, `map`, `reject`, `reverse`, `slice`, + * `tail`, `take`, `takeRight`, `takeRightWhile`, `takeWhile`, and `toArray` + * + * The chainable wrapper methods are: + * `after`, `ary`, `assign`, `assignIn`, `assignInWith`, `assignWith`, `at`, + * `before`, `bind`, `bindAll`, `bindKey`, `castArray`, `chain`, `chunk`, + * `commit`, `compact`, `concat`, `conforms`, `constant`, `countBy`, `create`, + * `curry`, `debounce`, `defaults`, `defaultsDeep`, `defer`, `delay`, + * `difference`, `differenceBy`, `differenceWith`, `drop`, `dropRight`, + * `dropRightWhile`, `dropWhile`, `extend`, `extendWith`, `fill`, `filter`, + * `flatMap`, `flatMapDeep`, `flatMapDepth`, `flatten`, `flattenDeep`, + * `flattenDepth`, `flip`, `flow`, `flowRight`, `fromPairs`, `functions`, + * `functionsIn`, `groupBy`, `initial`, `intersection`, `intersectionBy`, + * `intersectionWith`, `invert`, `invertBy`, `invokeMap`, `iteratee`, `keyBy`, + * `keys`, `keysIn`, `map`, `mapKeys`, `mapValues`, `matches`, `matchesProperty`, + * `memoize`, `merge`, `mergeWith`, `method`, `methodOf`, `mixin`, `negate`, + * `nthArg`, `omit`, `omitBy`, `once`, `orderBy`, `over`, `overArgs`, + * `overEvery`, `overSome`, `partial`, `partialRight`, `partition`, `pick`, + * `pickBy`, `plant`, `property`, `propertyOf`, `pull`, `pullAll`, `pullAllBy`, + * `pullAllWith`, `pullAt`, `push`, `range`, `rangeRight`, `rearg`, `reject`, + * `remove`, `rest`, `reverse`, `sampleSize`, `set`, `setWith`, `shuffle`, + * `slice`, `sort`, `sortBy`, `splice`, `spread`, `tail`, `take`, `takeRight`, + * `takeRightWhile`, `takeWhile`, `tap`, `throttle`, `thru`, `toArray`, + * `toPairs`, `toPairsIn`, `toPath`, `toPlainObject`, `transform`, `unary`, + * `union`, `unionBy`, `unionWith`, `uniq`, `uniqBy`, `uniqWith`, `unset`, + * `unshift`, `unzip`, `unzipWith`, `update`, `updateWith`, `values`, + * `valuesIn`, `without`, `wrap`, `xor`, `xorBy`, `xorWith`, `zip`, + * `zipObject`, `zipObjectDeep`, and `zipWith` + * + * The wrapper methods that are **not** chainable by default are: + * `add`, `attempt`, `camelCase`, `capitalize`, `ceil`, `clamp`, `clone`, + * `cloneDeep`, `cloneDeepWith`, `cloneWith`, `conformsTo`, `deburr`, + * `defaultTo`, `divide`, `each`, `eachRight`, `endsWith`, `eq`, `escape`, + * `escapeRegExp`, `every`, `find`, `findIndex`, `findKey`, `findLast`, + * `findLastIndex`, `findLastKey`, `first`, `floor`, `forEach`, `forEachRight`, + * `forIn`, `forInRight`, `forOwn`, `forOwnRight`, `get`, `gt`, `gte`, `has`, + * `hasIn`, `head`, `identity`, `includes`, `indexOf`, `inRange`, `invoke`, + * `isArguments`, `isArray`, `isArrayBuffer`, `isArrayLike`, `isArrayLikeObject`, + * `isBoolean`, `isBuffer`, `isDate`, `isElement`, `isEmpty`, `isEqual`, + * `isEqualWith`, `isError`, `isFinite`, `isFunction`, `isInteger`, `isLength`, + * `isMap`, `isMatch`, `isMatchWith`, `isNaN`, `isNative`, `isNil`, `isNull`, + * `isNumber`, `isObject`, `isObjectLike`, `isPlainObject`, `isRegExp`, + * `isSafeInteger`, `isSet`, `isString`, `isUndefined`, `isTypedArray`, + * `isWeakMap`, `isWeakSet`, `join`, `kebabCase`, `last`, `lastIndexOf`, + * `lowerCase`, `lowerFirst`, `lt`, `lte`, `max`, `maxBy`, `mean`, `meanBy`, + * `min`, `minBy`, `multiply`, `noConflict`, `noop`, `now`, `nth`, `pad`, + * `padEnd`, `padStart`, `parseInt`, `pop`, `random`, `reduce`, `reduceRight`, + * `repeat`, `result`, `round`, `runInContext`, `sample`, `shift`, `size`, + * `snakeCase`, `some`, `sortedIndex`, `sortedIndexBy`, `sortedLastIndex`, + * `sortedLastIndexBy`, `startCase`, `startsWith`, `stubArray`, `stubFalse`, + * `stubObject`, `stubString`, `stubTrue`, `subtract`, `sum`, `sumBy`, + * `template`, `times`, `toFinite`, `toInteger`, `toJSON`, `toLength`, + * `toLower`, `toNumber`, `toSafeInteger`, `toString`, `toUpper`, `trim`, + * `trimEnd`, `trimStart`, `truncate`, `unescape`, `uniqueId`, `upperCase`, + * `upperFirst`, `value`, and `words` + * + * @name _ + * @constructor + * @category Seq + * @param {*} value The value to wrap in a `lodash` instance. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * function square(n) { + * return n * n; + * } + * + * var wrapped = _([1, 2, 3]); + * + * // Returns an unwrapped value. + * wrapped.reduce(_.add); + * // => 6 + * + * // Returns a wrapped value. + * var squares = wrapped.map(square); + * + * _.isArray(squares); + * // => false + * + * _.isArray(squares.value()); + * // => true + */ + function lodash(value) { + if (isObjectLike(value) && !isArray(value) && !(value instanceof LazyWrapper)) { + if (value instanceof LodashWrapper) { + return value; } - //read tagname - let tagName = ''; - for (; i < xmlData.length && - xmlData[i] !== '>' && - xmlData[i] !== ' ' && - xmlData[i] !== '\t' && - xmlData[i] !== '\n' && - xmlData[i] !== '\r'; i++ - ) { - tagName += xmlData[i]; + if (hasOwnProperty.call(value, '__wrapped__')) { + return wrapperClone(value); } - tagName = tagName.trim(); - //console.log(tagName); + } + return new LodashWrapper(value); + } - if (tagName[tagName.length - 1] === '/') { - //self closing tag without attributes - tagName = tagName.substring(0, tagName.length - 1); - //continue; - i--; + /** + * The base implementation of `_.create` without support for assigning + * properties to the created object. + * + * @private + * @param {Object} proto The object to inherit from. + * @returns {Object} Returns the new object. + */ + var baseCreate = (function() { + function object() {} + return function(proto) { + if (!isObject(proto)) { + return {}; } - if (!validateTagName(tagName)) { - let msg; - if (tagName.trim().length === 0) { - msg = "Invalid space after '<'."; - } else { - msg = "Tag '"+tagName+"' is an invalid name."; - } - return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + if (objectCreate) { + return objectCreate(proto); } + object.prototype = proto; + var result = new object; + object.prototype = undefined; + return result; + }; + }()); - const result = readAttributeStr(xmlData, i); - if (result === false) { - return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); - } - let attrStr = result.value; - i = result.index; + /** + * The function whose prototype chain sequence wrappers inherit from. + * + * @private + */ + function baseLodash() { + // No operation performed. + } - if (attrStr[attrStr.length - 1] === '/') { - //self closing tag - const attrStrStart = i - attrStr.length; - attrStr = attrStr.substring(0, attrStr.length - 1); - const isValid = validateAttributeString(attrStr, options); - if (isValid === true) { - tagFound = true; - //continue; //text may presents after self closing tag - } else { - //the result from the nested function returns the position of the error within the attribute - //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute - //this gives us the absolute index in the entire xml, which we can use to find the line at last - return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); - } - } else if (closingTag) { - if (!result.tagClosed) { - return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); - } else if (attrStr.trim().length > 0) { - return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); - } else { - const otg = tags.pop(); - if (tagName !== otg.tagName) { - let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); - return getErrorObject('InvalidTag', - "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", - getLineNumberForPosition(xmlData, tagStartPos)); - } + /** + * The base constructor for creating `lodash` wrapper objects. + * + * @private + * @param {*} value The value to wrap. + * @param {boolean} [chainAll] Enable explicit method chain sequences. + */ + function LodashWrapper(value, chainAll) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__chain__ = !!chainAll; + this.__index__ = 0; + this.__values__ = undefined; + } - //when there are no more tags, we reached the root level. - if (tags.length == 0) { - reachedRoot = true; - } - } - } else { - const isValid = validateAttributeString(attrStr, options); - if (isValid !== true) { - //the result from the nested function returns the position of the error within the attribute - //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute - //this gives us the absolute index in the entire xml, which we can use to find the line at last - return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); - } + /** + * By default, the template delimiters used by lodash are like those in + * embedded Ruby (ERB) as well as ES2015 template strings. Change the + * following template settings to use alternative delimiters. + * + * @static + * @memberOf _ + * @type {Object} + */ + lodash.templateSettings = { - //if the root level has been reached before ... - if (reachedRoot === true) { - return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); - } else if(options.unpairedTags.indexOf(tagName) !== -1){ - //don't push into stack - } else { - tags.push({tagName, tagStartPos}); - } - tagFound = true; - } + /** + * Used to detect `data` property values to be HTML-escaped. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'escape': reEscape, - //skip tag text value - //It may include comments and CDATA value - for (i++; i < xmlData.length; i++) { - if (xmlData[i] === '<') { - if (xmlData[i + 1] === '!') { - //comment or CADATA - i++; - i = readCommentAndCDATA(xmlData, i); - continue; - } else if (xmlData[i+1] === '?') { - i = readPI(xmlData, ++i); - if (i.err) return i; - } else{ - break; - } - } else if (xmlData[i] === '&') { - const afterAmp = validateAmpersand(xmlData, i); - if (afterAmp == -1) - return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); - i = afterAmp; - }else{ - if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { - return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); - } - } - } //end of reading tag text value - if (xmlData[i] === '<') { - i--; - } - } - } else { - if ( isWhiteSpace(xmlData[i])) { - continue; - } - return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); - } - } + /** + * Used to detect code to be evaluated. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'evaluate': reEvaluate, - if (!tagFound) { - return getErrorObject('InvalidXml', 'Start tag expected.', 1); - }else if (tags.length == 1) { - return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); - }else if (tags.length > 0) { - return getErrorObject('InvalidXml', "Invalid '"+ - JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ - "' found.", {line: 1, col: 1}); - } + /** + * Used to detect `data` property values to inject. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'interpolate': reInterpolate, - return true; -}; + /** + * Used to reference the data object in the template text. + * + * @memberOf _.templateSettings + * @type {string} + */ + 'variable': '', -function isWhiteSpace(char){ - return char === ' ' || char === '\t' || char === '\n' || char === '\r'; -} -/** - * Read Processing insstructions and skip - * @param {*} xmlData - * @param {*} i - */ -function readPI(xmlData, i) { - const start = i; - for (; i < xmlData.length; i++) { - if (xmlData[i] == '?' || xmlData[i] == ' ') { - //tagname - const tagname = xmlData.substr(start, i - start); - if (i > 5 && tagname === 'xml') { - return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); - } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { - //check if valid attribut string - i++; - break; - } else { - continue; - } - } - } - return i; -} + /** + * Used to import variables into the compiled template. + * + * @memberOf _.templateSettings + * @type {Object} + */ + 'imports': { -function readCommentAndCDATA(xmlData, i) { - if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { - //comment - for (i += 3; i < xmlData.length; i++) { - if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { - i += 2; - break; - } - } - } else if ( - xmlData.length > i + 8 && - xmlData[i + 1] === 'D' && - xmlData[i + 2] === 'O' && - xmlData[i + 3] === 'C' && - xmlData[i + 4] === 'T' && - xmlData[i + 5] === 'Y' && - xmlData[i + 6] === 'P' && - xmlData[i + 7] === 'E' - ) { - let angleBracketsCount = 1; - for (i += 8; i < xmlData.length; i++) { - if (xmlData[i] === '<') { - angleBracketsCount++; - } else if (xmlData[i] === '>') { - angleBracketsCount--; - if (angleBracketsCount === 0) { - break; - } - } - } - } else if ( - xmlData.length > i + 9 && - xmlData[i + 1] === '[' && - xmlData[i + 2] === 'C' && - xmlData[i + 3] === 'D' && - xmlData[i + 4] === 'A' && - xmlData[i + 5] === 'T' && - xmlData[i + 6] === 'A' && - xmlData[i + 7] === '[' - ) { - for (i += 8; i < xmlData.length; i++) { - if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { - i += 2; - break; + /** + * A reference to the `lodash` function. + * + * @memberOf _.templateSettings.imports + * @type {Function} + */ + '_': lodash } - } - } + }; - return i; -} + // Ensure wrappers are instances of `baseLodash`. + lodash.prototype = baseLodash.prototype; + lodash.prototype.constructor = lodash; -const doubleQuote = '"'; -const singleQuote = "'"; + LodashWrapper.prototype = baseCreate(baseLodash.prototype); + LodashWrapper.prototype.constructor = LodashWrapper; -/** - * Keep reading xmlData until '<' is found outside the attribute value. - * @param {string} xmlData - * @param {number} i - */ -function readAttributeStr(xmlData, i) { - let attrStr = ''; - let startChar = ''; - let tagClosed = false; - for (; i < xmlData.length; i++) { - if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { - if (startChar === '') { - startChar = xmlData[i]; - } else if (startChar !== xmlData[i]) { - //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + /*------------------------------------------------------------------------*/ + + /** + * Creates a lazy wrapper object which wraps `value` to enable lazy evaluation. + * + * @private + * @constructor + * @param {*} value The value to wrap. + */ + function LazyWrapper(value) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__dir__ = 1; + this.__filtered__ = false; + this.__iteratees__ = []; + this.__takeCount__ = MAX_ARRAY_LENGTH; + this.__views__ = []; + } + + /** + * Creates a clone of the lazy wrapper object. + * + * @private + * @name clone + * @memberOf LazyWrapper + * @returns {Object} Returns the cloned `LazyWrapper` object. + */ + function lazyClone() { + var result = new LazyWrapper(this.__wrapped__); + result.__actions__ = copyArray(this.__actions__); + result.__dir__ = this.__dir__; + result.__filtered__ = this.__filtered__; + result.__iteratees__ = copyArray(this.__iteratees__); + result.__takeCount__ = this.__takeCount__; + result.__views__ = copyArray(this.__views__); + return result; + } + + /** + * Reverses the direction of lazy iteration. + * + * @private + * @name reverse + * @memberOf LazyWrapper + * @returns {Object} Returns the new reversed `LazyWrapper` object. + */ + function lazyReverse() { + if (this.__filtered__) { + var result = new LazyWrapper(this); + result.__dir__ = -1; + result.__filtered__ = true; } else { - startChar = ''; - } - } else if (xmlData[i] === '>') { - if (startChar === '') { - tagClosed = true; - break; + result = this.clone(); + result.__dir__ *= -1; } + return result; } - attrStr += xmlData[i]; - } - if (startChar !== '') { - return false; - } - - return { - value: attrStr, - index: i, - tagClosed: tagClosed - }; -} -/** - * Select all the attributes whether valid or invalid. - */ -const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + /** + * Extracts the unwrapped value from its lazy wrapper. + * + * @private + * @name value + * @memberOf LazyWrapper + * @returns {*} Returns the unwrapped value. + */ + function lazyValue() { + var array = this.__wrapped__.value(), + dir = this.__dir__, + isArr = isArray(array), + isRight = dir < 0, + arrLength = isArr ? array.length : 0, + view = getView(0, arrLength, this.__views__), + start = view.start, + end = view.end, + length = end - start, + index = isRight ? end : (start - 1), + iteratees = this.__iteratees__, + iterLength = iteratees.length, + resIndex = 0, + takeCount = nativeMin(length, this.__takeCount__); -//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + if (!isArr || (!isRight && arrLength == length && takeCount == length)) { + return baseWrapperValue(array, this.__actions__); + } + var result = []; -function validateAttributeString(attrStr, options) { - //console.log("start:"+attrStr+":end"); + outer: + while (length-- && resIndex < takeCount) { + index += dir; - //if(attrStr.trim().length === 0) return true; //empty string + var iterIndex = -1, + value = array[index]; - const matches = util.getAllMatches(attrStr, validAttrStrRegxp); - const attrNames = {}; + while (++iterIndex < iterLength) { + var data = iteratees[iterIndex], + iteratee = data.iteratee, + type = data.type, + computed = iteratee(value); - for (let i = 0; i < matches.length; i++) { - if (matches[i][1].length === 0) { - //nospace before attribute name: a="sd"b="saf" - return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) - } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { - return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); - } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { - //independent attribute: ab - return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); - } - /* else if(matches[i][6] === undefined){//attribute without value: ab= - return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; - } */ - const attrName = matches[i][2]; - if (!validateAttrName(attrName)) { - return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); - } - if (!attrNames.hasOwnProperty(attrName)) { - //check for duplicate attribute. - attrNames[attrName] = 1; - } else { - return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + if (type == LAZY_MAP_FLAG) { + value = computed; + } else if (!computed) { + if (type == LAZY_FILTER_FLAG) { + continue outer; + } else { + break outer; + } + } + } + result[resIndex++] = value; + } + return result; } - } - - return true; -} -function validateNumberAmpersand(xmlData, i) { - let re = /\d/; - if (xmlData[i] === 'x') { - i++; - re = /[\da-fA-F]/; - } - for (; i < xmlData.length; i++) { - if (xmlData[i] === ';') - return i; - if (!xmlData[i].match(re)) - break; - } - return -1; -} + // Ensure `LazyWrapper` is an instance of `baseLodash`. + LazyWrapper.prototype = baseCreate(baseLodash.prototype); + LazyWrapper.prototype.constructor = LazyWrapper; -function validateAmpersand(xmlData, i) { - // https://www.w3.org/TR/xml/#dt-charref - i++; - if (xmlData[i] === ';') - return -1; - if (xmlData[i] === '#') { - i++; - return validateNumberAmpersand(xmlData, i); - } - let count = 0; - for (; i < xmlData.length; i++, count++) { - if (xmlData[i].match(/\w/) && count < 20) - continue; - if (xmlData[i] === ';') - break; - return -1; - } - return i; -} + /*------------------------------------------------------------------------*/ -function getErrorObject(code, message, lineNumber) { - return { - err: { - code: code, - msg: message, - line: lineNumber.line || lineNumber, - col: lineNumber.col, - }, - }; -} + /** + * Creates a hash object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function Hash(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; -function validateAttrName(attrName) { - return util.isName(attrName); -} + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } -// const startsWithXML = /^xml/i; + /** + * Removes all key-value entries from the hash. + * + * @private + * @name clear + * @memberOf Hash + */ + function hashClear() { + this.__data__ = nativeCreate ? nativeCreate(null) : {}; + this.size = 0; + } -function validateTagName(tagname) { - return util.isName(tagname) /* && !tagname.match(startsWithXML) */; -} + /** + * Removes `key` and its value from the hash. + * + * @private + * @name delete + * @memberOf Hash + * @param {Object} hash The hash to modify. + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function hashDelete(key) { + var result = this.has(key) && delete this.__data__[key]; + this.size -= result ? 1 : 0; + return result; + } -//this function returns the line number for the character at the given index -function getLineNumberForPosition(xmlData, index) { - const lines = xmlData.substring(0, index).split(/\r?\n/); - return { - line: lines.length, + /** + * Gets the hash value for `key`. + * + * @private + * @name get + * @memberOf Hash + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function hashGet(key) { + var data = this.__data__; + if (nativeCreate) { + var result = data[key]; + return result === HASH_UNDEFINED ? undefined : result; + } + return hasOwnProperty.call(data, key) ? data[key] : undefined; + } - // column number is last line's length + 1, because column numbering starts at 1: - col: lines[lines.length - 1].length + 1 - }; -} + /** + * Checks if a hash value for `key` exists. + * + * @private + * @name has + * @memberOf Hash + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function hashHas(key) { + var data = this.__data__; + return nativeCreate ? (data[key] !== undefined) : hasOwnProperty.call(data, key); + } -//this function returns the position of the first character of match within attrStr -function getPositionFromMatch(match) { - return match.startIndex + match[1].length; -} + /** + * Sets the hash `key` to `value`. + * + * @private + * @name set + * @memberOf Hash + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the hash instance. + */ + function hashSet(key, value) { + var data = this.__data__; + this.size += this.has(key) ? 0 : 1; + data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; + return this; + } + // Add methods to `Hash`. + Hash.prototype.clear = hashClear; + Hash.prototype['delete'] = hashDelete; + Hash.prototype.get = hashGet; + Hash.prototype.has = hashHas; + Hash.prototype.set = hashSet; -/***/ }), + /*------------------------------------------------------------------------*/ -/***/ 80660: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Creates an list cache object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function ListCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; -"use strict"; + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } -//parse Empty Node as self closing node -const buildFromOrderedJs = __nccwpck_require__(72462); + /** + * Removes all key-value entries from the list cache. + * + * @private + * @name clear + * @memberOf ListCache + */ + function listCacheClear() { + this.__data__ = []; + this.size = 0; + } -const defaultOptions = { - attributeNamePrefix: '@_', - attributesGroupName: false, - textNodeName: '#text', - ignoreAttributes: true, - cdataPropName: false, - format: false, - indentBy: ' ', - suppressEmptyNode: false, - suppressUnpairedNode: true, - suppressBooleanAttributes: true, - tagValueProcessor: function(key, a) { - return a; - }, - attributeValueProcessor: function(attrName, a) { - return a; - }, - preserveOrder: false, - commentPropName: false, - unpairedTags: [], - entities: [ - { regex: new RegExp("&", "g"), val: "&" },//it must be on top - { regex: new RegExp(">", "g"), val: ">" }, - { regex: new RegExp("<", "g"), val: "<" }, - { regex: new RegExp("\'", "g"), val: "'" }, - { regex: new RegExp("\"", "g"), val: """ } - ], - processEntities: true, - stopNodes: [], - // transformTagName: false, - // transformAttributeName: false, - oneListGroup: false -}; + /** + * Removes `key` and its value from the list cache. + * + * @private + * @name delete + * @memberOf ListCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function listCacheDelete(key) { + var data = this.__data__, + index = assocIndexOf(data, key); -function Builder(options) { - this.options = Object.assign({}, defaultOptions, options); - if (this.options.ignoreAttributes || this.options.attributesGroupName) { - this.isAttribute = function(/*a*/) { - return false; - }; - } else { - this.attrPrefixLen = this.options.attributeNamePrefix.length; - this.isAttribute = isAttribute; - } + if (index < 0) { + return false; + } + var lastIndex = data.length - 1; + if (index == lastIndex) { + data.pop(); + } else { + splice.call(data, index, 1); + } + --this.size; + return true; + } - this.processTextOrObjNode = processTextOrObjNode + /** + * Gets the list cache value for `key`. + * + * @private + * @name get + * @memberOf ListCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function listCacheGet(key) { + var data = this.__data__, + index = assocIndexOf(data, key); - if (this.options.format) { - this.indentate = indentate; - this.tagEndChar = '>\n'; - this.newLine = '\n'; - } else { - this.indentate = function() { - return ''; - }; - this.tagEndChar = '>'; - this.newLine = ''; - } -} + return index < 0 ? undefined : data[index][1]; + } -Builder.prototype.build = function(jObj) { - if(this.options.preserveOrder){ - return buildFromOrderedJs(jObj, this.options); - }else { - if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ - jObj = { - [this.options.arrayNodeName] : jObj - } + /** + * Checks if a list cache value for `key` exists. + * + * @private + * @name has + * @memberOf ListCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function listCacheHas(key) { + return assocIndexOf(this.__data__, key) > -1; } - return this.j2x(jObj, 0).val; - } -}; -Builder.prototype.j2x = function(jObj, level) { - let attrStr = ''; - let val = ''; - for (let key in jObj) { - if (typeof jObj[key] === 'undefined') { - // supress undefined node - } else if (jObj[key] === null) { - if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; - else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; - // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; - } else if (jObj[key] instanceof Date) { - val += this.buildTextValNode(jObj[key], key, '', level); - } else if (typeof jObj[key] !== 'object') { - //premitive type - const attr = this.isAttribute(key); - if (attr) { - attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); - }else { - //tag value - if (key === this.options.textNodeName) { - let newval = this.options.tagValueProcessor(key, '' + jObj[key]); - val += this.replaceEntitiesValue(newval); - } else { - val += this.buildTextValNode(jObj[key], key, '', level); - } - } - } else if (Array.isArray(jObj[key])) { - //repeated nodes - const arrLen = jObj[key].length; - let listTagVal = ""; - for (let j = 0; j < arrLen; j++) { - const item = jObj[key][j]; - if (typeof item === 'undefined') { - // supress undefined node - } else if (item === null) { - if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; - else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; - // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; - } else if (typeof item === 'object') { - if(this.options.oneListGroup ){ - listTagVal += this.j2x(item, level + 1).val; - }else{ - listTagVal += this.processTextOrObjNode(item, key, level) - } - } else { - listTagVal += this.buildTextValNode(item, key, '', level); - } - } - if(this.options.oneListGroup){ - listTagVal = this.buildObjectNode(listTagVal, key, '', level); - } - val += listTagVal; - } else { - //nested node - if (this.options.attributesGroupName && key === this.options.attributesGroupName) { - const Ks = Object.keys(jObj[key]); - const L = Ks.length; - for (let j = 0; j < L; j++) { - attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); - } + /** + * Sets the list cache `key` to `value`. + * + * @private + * @name set + * @memberOf ListCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the list cache instance. + */ + function listCacheSet(key, value) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + ++this.size; + data.push([key, value]); } else { - val += this.processTextOrObjNode(jObj[key], key, level) + data[index][1] = value; } + return this; } - } - return {attrStr: attrStr, val: val}; -}; -Builder.prototype.buildAttrPairStr = function(attrName, val){ - val = this.options.attributeValueProcessor(attrName, '' + val); - val = this.replaceEntitiesValue(val); - if (this.options.suppressBooleanAttributes && val === "true") { - return ' ' + attrName; - } else return ' ' + attrName + '="' + val + '"'; -} + // Add methods to `ListCache`. + ListCache.prototype.clear = listCacheClear; + ListCache.prototype['delete'] = listCacheDelete; + ListCache.prototype.get = listCacheGet; + ListCache.prototype.has = listCacheHas; + ListCache.prototype.set = listCacheSet; -function processTextOrObjNode (object, key, level) { - const result = this.j2x(object, level + 1); - if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { - return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); - } else { - return this.buildObjectNode(result.val, key, result.attrStr, level); - } -} + /*------------------------------------------------------------------------*/ -Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { - if(val === ""){ - if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; - else { - return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; - } - }else{ + /** + * Creates a map cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function MapCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; - let tagEndExp = '' + val + tagEndExp ); - } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { - return this.indentate(level) + `` + this.newLine; - }else { - return ( - this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + - val + - this.indentate(level) + tagEndExp ); + + /** + * Removes all key-value entries from the map. + * + * @private + * @name clear + * @memberOf MapCache + */ + function mapCacheClear() { + this.size = 0; + this.__data__ = { + 'hash': new Hash, + 'map': new (Map || ListCache), + 'string': new Hash + }; } - } -} -Builder.prototype.closeTag = function(key){ - let closeTag = ""; - if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired - if(!this.options.suppressUnpairedNode) closeTag = "/" - }else if(this.options.suppressEmptyNode){ //empty - closeTag = "/"; - }else{ - closeTag = `>` + this.newLine; - }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { - return this.indentate(level) + `` + this.newLine; - }else if(key[0] === "?") {//PI tag - return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; - }else{ - let textValue = this.options.tagValueProcessor(key, val); - textValue = this.replaceEntitiesValue(textValue); - - if( textValue === ''){ - return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; - }else{ - return this.indentate(level) + '<' + key + attrStr + '>' + - textValue + - ' 0 && this.options.processEntities){ - for (let i=0; i { + /** + * Checks if `value` is in the array cache. + * + * @private + * @name has + * @memberOf SetCache + * @param {*} value The value to search for. + * @returns {number} Returns `true` if `value` is found, else `false`. + */ + function setCacheHas(value) { + return this.__data__.has(value); + } -const EOL = "\n"; + // Add methods to `SetCache`. + SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; + SetCache.prototype.has = setCacheHas; -/** - * - * @param {array} jArray - * @param {any} options - * @returns - */ -function toXml(jArray, options) { - let indentation = ""; - if (options.format && options.indentBy.length > 0) { - indentation = EOL; + /*------------------------------------------------------------------------*/ + + /** + * Creates a stack cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function Stack(entries) { + var data = this.__data__ = new ListCache(entries); + this.size = data.size; } - return arrToStr(jArray, options, "", indentation); -} -function arrToStr(arr, options, jPath, indentation) { - let xmlStr = ""; - let isPreviousElementTag = false; + /** + * Removes all key-value entries from the stack. + * + * @private + * @name clear + * @memberOf Stack + */ + function stackClear() { + this.__data__ = new ListCache; + this.size = 0; + } - for (let i = 0; i < arr.length; i++) { - const tagObj = arr[i]; - const tagName = propName(tagObj); - let newJPath = ""; - if (jPath.length === 0) newJPath = tagName - else newJPath = `${jPath}.${tagName}`; + /** + * Removes `key` and its value from the stack. + * + * @private + * @name delete + * @memberOf Stack + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function stackDelete(key) { + var data = this.__data__, + result = data['delete'](key); - if (tagName === options.textNodeName) { - let tagText = tagObj[tagName]; - if (!isStopNode(newJPath, options)) { - tagText = options.tagValueProcessor(tagName, tagText); - tagText = replaceEntitiesValue(tagText, options); - } - if (isPreviousElementTag) { - xmlStr += indentation; - } - xmlStr += tagText; - isPreviousElementTag = false; - continue; - } else if (tagName === options.cdataPropName) { - if (isPreviousElementTag) { - xmlStr += indentation; - } - xmlStr += ``; - isPreviousElementTag = false; - continue; - } else if (tagName === options.commentPropName) { - xmlStr += indentation + ``; - isPreviousElementTag = true; - continue; - } else if (tagName[0] === "?") { - const attStr = attr_to_str(tagObj[":@"], options); - const tempInd = tagName === "?xml" ? "" : indentation; - let piTextNodeName = tagObj[tagName][0][options.textNodeName]; - piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing - xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; - isPreviousElementTag = true; - continue; - } - let newIdentation = indentation; - if (newIdentation !== "") { - newIdentation += options.indentBy; - } - const attStr = attr_to_str(tagObj[":@"], options); - const tagStart = indentation + `<${tagName}${attStr}`; - const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); - if (options.unpairedTags.indexOf(tagName) !== -1) { - if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; - else xmlStr += tagStart + "/>"; - } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { - xmlStr += tagStart + "/>"; - } else if (tagValue && tagValue.endsWith(">")) { - xmlStr += tagStart + `>${tagValue}${indentation}`; - } else { - xmlStr += tagStart + ">"; - if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; - } - isPreviousElementTag = true; + this.size = data.size; + return result; } - return xmlStr; -} + /** + * Gets the stack value for `key`. + * + * @private + * @name get + * @memberOf Stack + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function stackGet(key) { + return this.__data__.get(key); + } -function propName(obj) { - const keys = Object.keys(obj); - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - if (key !== ":@") return key; + /** + * Checks if a stack value for `key` exists. + * + * @private + * @name has + * @memberOf Stack + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function stackHas(key) { + return this.__data__.has(key); } -} -function attr_to_str(attrMap, options) { - let attrStr = ""; - if (attrMap && !options.ignoreAttributes) { - for (let attr in attrMap) { - let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); - attrVal = replaceEntitiesValue(attrVal, options); - if (attrVal === true && options.suppressBooleanAttributes) { - attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; - } else { - attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; - } + /** + * Sets the stack `key` to `value`. + * + * @private + * @name set + * @memberOf Stack + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the stack cache instance. + */ + function stackSet(key, value) { + var data = this.__data__; + if (data instanceof ListCache) { + var pairs = data.__data__; + if (!Map || (pairs.length < LARGE_ARRAY_SIZE - 1)) { + pairs.push([key, value]); + this.size = ++data.size; + return this; } + data = this.__data__ = new MapCache(pairs); + } + data.set(key, value); + this.size = data.size; + return this; } - return attrStr; -} -function isStopNode(jPath, options) { - jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); - let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); - for (let index in options.stopNodes) { - if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; - } - return false; -} + // Add methods to `Stack`. + Stack.prototype.clear = stackClear; + Stack.prototype['delete'] = stackDelete; + Stack.prototype.get = stackGet; + Stack.prototype.has = stackHas; + Stack.prototype.set = stackSet; -function replaceEntitiesValue(textValue, options) { - if (textValue && textValue.length > 0 && options.processEntities) { - for (let i = 0; i < options.entities.length; i++) { - const entity = options.entities[i]; - textValue = textValue.replace(entity.regex, entity.val); + /*------------------------------------------------------------------------*/ + + /** + * Creates an array of the enumerable property names of the array-like `value`. + * + * @private + * @param {*} value The value to query. + * @param {boolean} inherited Specify returning inherited property names. + * @returns {Array} Returns the array of property names. + */ + function arrayLikeKeys(value, inherited) { + var isArr = isArray(value), + isArg = !isArr && isArguments(value), + isBuff = !isArr && !isArg && isBuffer(value), + isType = !isArr && !isArg && !isBuff && isTypedArray(value), + skipIndexes = isArr || isArg || isBuff || isType, + result = skipIndexes ? baseTimes(value.length, String) : [], + length = result.length; + + for (var key in value) { + if ((inherited || hasOwnProperty.call(value, key)) && + !(skipIndexes && ( + // Safari 9 has enumerable `arguments.length` in strict mode. + key == 'length' || + // Node.js 0.10 has enumerable non-index properties on buffers. + (isBuff && (key == 'offset' || key == 'parent')) || + // PhantomJS 2 has enumerable non-index properties on typed arrays. + (isType && (key == 'buffer' || key == 'byteLength' || key == 'byteOffset')) || + // Skip index properties. + isIndex(key, length) + ))) { + result.push(key); } + } + return result; } - return textValue; -} -module.exports = toXml; + /** + * A specialized version of `_.sample` for arrays. + * + * @private + * @param {Array} array The array to sample. + * @returns {*} Returns the random element. + */ + function arraySample(array) { + var length = array.length; + return length ? array[baseRandom(0, length - 1)] : undefined; + } -/***/ }), + /** + * A specialized version of `_.sampleSize` for arrays. + * + * @private + * @param {Array} array The array to sample. + * @param {number} n The number of elements to sample. + * @returns {Array} Returns the random elements. + */ + function arraySampleSize(array, n) { + return shuffleSelf(copyArray(array), baseClamp(n, 0, array.length)); + } -/***/ 6072: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * A specialized version of `_.shuffle` for arrays. + * + * @private + * @param {Array} array The array to shuffle. + * @returns {Array} Returns the new shuffled array. + */ + function arrayShuffle(array) { + return shuffleSelf(copyArray(array)); + } -const util = __nccwpck_require__(38280); + /** + * This function is like `assignValue` except that it doesn't assign + * `undefined` values. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function assignMergeValue(object, key, value) { + if ((value !== undefined && !eq(object[key], value)) || + (value === undefined && !(key in object))) { + baseAssignValue(object, key, value); + } + } -//TODO: handle comments -function readDocType(xmlData, i){ - - const entities = {}; - if( xmlData[i + 3] === 'O' && - xmlData[i + 4] === 'C' && - xmlData[i + 5] === 'T' && - xmlData[i + 6] === 'Y' && - xmlData[i + 7] === 'P' && - xmlData[i + 8] === 'E') - { - i = i+9; - let angleBracketsCount = 1; - let hasBody = false, comment = false; - let exp = ""; - for(;i') { //Read tag content - if(comment){ - if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ - comment = false; - angleBracketsCount--; - } - }else{ - angleBracketsCount--; - } - if (angleBracketsCount === 0) { - break; - } - }else if( xmlData[i] === '['){ - hasBody = true; - }else{ - exp += xmlData[i]; - } - } - if(angleBracketsCount !== 0){ - throw new Error(`Unclosed DOCTYPE`); + /** + * Gets the index at which the `key` is found in `array` of key-value pairs. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} key The key to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function assocIndexOf(array, key) { + var length = array.length; + while (length--) { + if (eq(array[length][0], key)) { + return length; } - }else{ - throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return -1; } - return {entities, i}; -} - -function readEntityExp(xmlData,i){ - //External entities are not supported - // - //Parameter entities are not supported - // + /** + * Aggregates elements of `collection` on `accumulator` with keys transformed + * by `iteratee` and values set by `setter`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform keys. + * @param {Object} accumulator The initial aggregated object. + * @returns {Function} Returns `accumulator`. + */ + function baseAggregator(collection, setter, iteratee, accumulator) { + baseEach(collection, function(value, key, collection) { + setter(accumulator, value, iteratee(value), collection); + }); + return accumulator; + } - //Internal entities are supported - // - - //read EntityName - let entityName = ""; - for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { - // if(xmlData[i] === " ") continue; - // else - entityName += xmlData[i]; + /** + * The base implementation of `_.assign` without support for multiple sources + * or `customizer` functions. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @returns {Object} Returns `object`. + */ + function baseAssign(object, source) { + return object && copyObject(source, keys(source), object); } - entityName = entityName.trim(); - if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); - //read Entity Value - const startChar = xmlData[i++]; - let val = "" - for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { - val += xmlData[i]; + /** + * The base implementation of `_.assignIn` without support for multiple sources + * or `customizer` functions. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @returns {Object} Returns `object`. + */ + function baseAssignIn(object, source) { + return object && copyObject(source, keysIn(source), object); } - return [entityName, val, i]; -} -function isComment(xmlData, i){ - if(xmlData[i+1] === '!' && - xmlData[i+2] === '-' && - xmlData[i+3] === '-') return true - return false -} -function isEntity(xmlData, i){ - if(xmlData[i+1] === '!' && - xmlData[i+2] === 'E' && - xmlData[i+3] === 'N' && - xmlData[i+4] === 'T' && - xmlData[i+5] === 'I' && - xmlData[i+6] === 'T' && - xmlData[i+7] === 'Y') return true - return false -} -function isElement(xmlData, i){ - if(xmlData[i+1] === '!' && - xmlData[i+2] === 'E' && - xmlData[i+3] === 'L' && - xmlData[i+4] === 'E' && - xmlData[i+5] === 'M' && - xmlData[i+6] === 'E' && - xmlData[i+7] === 'N' && - xmlData[i+8] === 'T') return true - return false -} + /** + * The base implementation of `assignValue` and `assignMergeValue` without + * value checks. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function baseAssignValue(object, key, value) { + if (key == '__proto__' && defineProperty) { + defineProperty(object, key, { + 'configurable': true, + 'enumerable': true, + 'value': value, + 'writable': true + }); + } else { + object[key] = value; + } + } -function isAttlist(xmlData, i){ - if(xmlData[i+1] === '!' && - xmlData[i+2] === 'A' && - xmlData[i+3] === 'T' && - xmlData[i+4] === 'T' && - xmlData[i+5] === 'L' && - xmlData[i+6] === 'I' && - xmlData[i+7] === 'S' && - xmlData[i+8] === 'T') return true - return false -} -function isNotation(xmlData, i){ - if(xmlData[i+1] === '!' && - xmlData[i+2] === 'N' && - xmlData[i+3] === 'O' && - xmlData[i+4] === 'T' && - xmlData[i+5] === 'A' && - xmlData[i+6] === 'T' && - xmlData[i+7] === 'I' && - xmlData[i+8] === 'O' && - xmlData[i+9] === 'N') return true - return false -} + /** + * The base implementation of `_.at` without support for individual paths. + * + * @private + * @param {Object} object The object to iterate over. + * @param {string[]} paths The property paths to pick. + * @returns {Array} Returns the picked elements. + */ + function baseAt(object, paths) { + var index = -1, + length = paths.length, + result = Array(length), + skip = object == null; -function validateEntityName(name){ - if (util.isName(name)) - return name; - else - throw new Error(`Invalid entity name ${name}`); -} + while (++index < length) { + result[index] = skip ? undefined : get(object, paths[index]); + } + return result; + } -module.exports = readDocType; + /** + * The base implementation of `_.clamp` which doesn't coerce arguments. + * + * @private + * @param {number} number The number to clamp. + * @param {number} [lower] The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the clamped number. + */ + function baseClamp(number, lower, upper) { + if (number === number) { + if (upper !== undefined) { + number = number <= upper ? number : upper; + } + if (lower !== undefined) { + number = number >= lower ? number : lower; + } + } + return number; + } + /** + * The base implementation of `_.clone` and `_.cloneDeep` which tracks + * traversed objects. + * + * @private + * @param {*} value The value to clone. + * @param {boolean} bitmask The bitmask flags. + * 1 - Deep clone + * 2 - Flatten inherited properties + * 4 - Clone symbols + * @param {Function} [customizer] The function to customize cloning. + * @param {string} [key] The key of `value`. + * @param {Object} [object] The parent object of `value`. + * @param {Object} [stack] Tracks traversed objects and their clone counterparts. + * @returns {*} Returns the cloned value. + */ + function baseClone(value, bitmask, customizer, key, object, stack) { + var result, + isDeep = bitmask & CLONE_DEEP_FLAG, + isFlat = bitmask & CLONE_FLAT_FLAG, + isFull = bitmask & CLONE_SYMBOLS_FLAG; -/***/ }), + if (customizer) { + result = object ? customizer(value, key, object, stack) : customizer(value); + } + if (result !== undefined) { + return result; + } + if (!isObject(value)) { + return value; + } + var isArr = isArray(value); + if (isArr) { + result = initCloneArray(value); + if (!isDeep) { + return copyArray(value, result); + } + } else { + var tag = getTag(value), + isFunc = tag == funcTag || tag == genTag; -/***/ 86993: -/***/ ((__unused_webpack_module, exports) => { + if (isBuffer(value)) { + return cloneBuffer(value, isDeep); + } + if (tag == objectTag || tag == argsTag || (isFunc && !object)) { + result = (isFlat || isFunc) ? {} : initCloneObject(value); + if (!isDeep) { + return isFlat + ? copySymbolsIn(value, baseAssignIn(result, value)) + : copySymbols(value, baseAssign(result, value)); + } + } else { + if (!cloneableTags[tag]) { + return object ? value : {}; + } + result = initCloneByTag(value, tag, isDeep); + } + } + // Check for circular references and return its corresponding clone. + stack || (stack = new Stack); + var stacked = stack.get(value); + if (stacked) { + return stacked; + } + stack.set(value, result); + if (isSet(value)) { + value.forEach(function(subValue) { + result.add(baseClone(subValue, bitmask, customizer, subValue, value, stack)); + }); + } else if (isMap(value)) { + value.forEach(function(subValue, key) { + result.set(key, baseClone(subValue, bitmask, customizer, key, value, stack)); + }); + } -const defaultOptions = { - preserveOrder: false, - attributeNamePrefix: '@_', - attributesGroupName: false, - textNodeName: '#text', - ignoreAttributes: true, - removeNSPrefix: false, // remove NS from tag name or attribute name if true - allowBooleanAttributes: false, //a tag can have attributes without any value - //ignoreRootElement : false, - parseTagValue: true, - parseAttributeValue: false, - trimValues: true, //Trim string values of tag and attributes - cdataPropName: false, - numberParseOptions: { - hex: true, - leadingZeros: true, - eNotation: true - }, - tagValueProcessor: function(tagName, val) { - return val; - }, - attributeValueProcessor: function(attrName, val) { - return val; - }, - stopNodes: [], //nested tags will not be parsed even for errors - alwaysCreateTextNode: false, - isArray: () => false, - commentPropName: false, - unpairedTags: [], - processEntities: true, - htmlEntities: false, - ignoreDeclaration: false, - ignorePiTags: false, - transformTagName: false, - transformAttributeName: false, - updateTag: function(tagName, jPath, attrs){ - return tagName - }, - // skipEmptyListItem: false -}; - -const buildOptions = function(options) { - return Object.assign({}, defaultOptions, options); -}; + var keysFunc = isFull + ? (isFlat ? getAllKeysIn : getAllKeys) + : (isFlat ? keysIn : keys); -exports.buildOptions = buildOptions; -exports.defaultOptions = defaultOptions; + var props = isArr ? undefined : keysFunc(value); + arrayEach(props || value, function(subValue, key) { + if (props) { + key = subValue; + subValue = value[key]; + } + // Recursively populate clone (susceptible to call stack limits). + assignValue(result, key, baseClone(subValue, bitmask, customizer, key, value, stack)); + }); + return result; + } -/***/ }), + /** + * The base implementation of `_.conforms` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property predicates to conform to. + * @returns {Function} Returns the new spec function. + */ + function baseConforms(source) { + var props = keys(source); + return function(object) { + return baseConformsTo(object, source, props); + }; + } -/***/ 25832: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * The base implementation of `_.conformsTo` which accepts `props` to check. + * + * @private + * @param {Object} object The object to inspect. + * @param {Object} source The object of property predicates to conform to. + * @returns {boolean} Returns `true` if `object` conforms, else `false`. + */ + function baseConformsTo(object, source, props) { + var length = props.length; + if (object == null) { + return !length; + } + object = Object(object); + while (length--) { + var key = props[length], + predicate = source[key], + value = object[key]; -"use strict"; + if ((value === undefined && !(key in object)) || !predicate(value)) { + return false; + } + } + return true; + } -///@ts-check + /** + * The base implementation of `_.delay` and `_.defer` which accepts `args` + * to provide to `func`. + * + * @private + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {Array} args The arguments to provide to `func`. + * @returns {number|Object} Returns the timer id or timeout object. + */ + function baseDelay(func, wait, args) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return setTimeout(function() { func.apply(undefined, args); }, wait); + } -const util = __nccwpck_require__(38280); -const xmlNode = __nccwpck_require__(7462); -const readDocType = __nccwpck_require__(6072); -const toNumber = __nccwpck_require__(14526); + /** + * The base implementation of methods like `_.difference` without support + * for excluding multiple arrays or iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Array} values The values to exclude. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + */ + function baseDifference(array, values, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + isCommon = true, + length = array.length, + result = [], + valuesLength = values.length; -const regx = - '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' - .replace(/NAME/g, util.nameRegexp); + if (!length) { + return result; + } + if (iteratee) { + values = arrayMap(values, baseUnary(iteratee)); + } + if (comparator) { + includes = arrayIncludesWith; + isCommon = false; + } + else if (values.length >= LARGE_ARRAY_SIZE) { + includes = cacheHas; + isCommon = false; + values = new SetCache(values); + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee == null ? value : iteratee(value); -//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); -//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var valuesIndex = valuesLength; + while (valuesIndex--) { + if (values[valuesIndex] === computed) { + continue outer; + } + } + result.push(value); + } + else if (!includes(values, computed, comparator)) { + result.push(value); + } + } + return result; + } -class OrderedObjParser{ - constructor(options){ - this.options = options; - this.currentNode = null; - this.tagsNodeStack = []; - this.docTypeEntities = {}; - this.lastEntities = { - "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, - "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, - "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, - "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, - }; - this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; - this.htmlEntities = { - "space": { regex: /&(nbsp|#160);/g, val: " " }, - // "lt" : { regex: /&(lt|#60);/g, val: "<" }, - // "gt" : { regex: /&(gt|#62);/g, val: ">" }, - // "amp" : { regex: /&(amp|#38);/g, val: "&" }, - // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, - // "apos" : { regex: /&(apos|#39);/g, val: "'" }, - "cent" : { regex: /&(cent|#162);/g, val: "¢" }, - "pound" : { regex: /&(pound|#163);/g, val: "£" }, - "yen" : { regex: /&(yen|#165);/g, val: "¥" }, - "euro" : { regex: /&(euro|#8364);/g, val: "€" }, - "copyright" : { regex: /&(copy|#169);/g, val: "©" }, - "reg" : { regex: /&(reg|#174);/g, val: "®" }, - "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, - }; - this.addExternalEntities = addExternalEntities; - this.parseXml = parseXml; - this.parseTextData = parseTextData; - this.resolveNameSpace = resolveNameSpace; - this.buildAttributesMap = buildAttributesMap; - this.isItStopNode = isItStopNode; - this.replaceEntitiesValue = replaceEntitiesValue; - this.readStopNodeData = readStopNodeData; - this.saveTextToParentTag = saveTextToParentTag; - this.addChild = addChild; - } + /** + * The base implementation of `_.forEach` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ + var baseEach = createBaseEach(baseForOwn); -} + /** + * The base implementation of `_.forEachRight` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ + var baseEachRight = createBaseEach(baseForOwnRight, true); -function addExternalEntities(externalEntities){ - const entKeys = Object.keys(externalEntities); - for (let i = 0; i < entKeys.length; i++) { - const ent = entKeys[i]; - this.lastEntities[ent] = { - regex: new RegExp("&"+ent+";","g"), - val : externalEntities[ent] + /** + * The base implementation of `_.every` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false` + */ + function baseEvery(collection, predicate) { + var result = true; + baseEach(collection, function(value, index, collection) { + result = !!predicate(value, index, collection); + return result; + }); + return result; } - } -} -/** - * @param {string} val - * @param {string} tagName - * @param {string} jPath - * @param {boolean} dontTrim - * @param {boolean} hasAttributes - * @param {boolean} isLeafNode - * @param {boolean} escapeEntities - */ -function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { - if (val !== undefined) { - if (this.options.trimValues && !dontTrim) { - val = val.trim(); - } - if(val.length > 0){ - if(!escapeEntities) val = this.replaceEntitiesValue(val); - - const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); - if(newval === null || newval === undefined){ - //don't parse - return val; - }else if(typeof newval !== typeof val || newval !== val){ - //overwrite - return newval; - }else if(this.options.trimValues){ - return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); - }else{ - const trimmedVal = val.trim(); - if(trimmedVal === val){ - return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); - }else{ - return val; + /** + * The base implementation of methods like `_.max` and `_.min` which accepts a + * `comparator` to determine the extremum value. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The iteratee invoked per iteration. + * @param {Function} comparator The comparator used to compare values. + * @returns {*} Returns the extremum value. + */ + function baseExtremum(array, iteratee, comparator) { + var index = -1, + length = array.length; + + while (++index < length) { + var value = array[index], + current = iteratee(value); + + if (current != null && (computed === undefined + ? (current === current && !isSymbol(current)) + : comparator(current, computed) + )) { + var computed = current, + result = value; } } + return result; } - } -} -function resolveNameSpace(tagname) { - if (this.options.removeNSPrefix) { - const tags = tagname.split(':'); - const prefix = tagname.charAt(0) === '/' ? '/' : ''; - if (tags[0] === 'xmlns') { - return ''; + /** + * The base implementation of `_.fill` without an iteratee call guard. + * + * @private + * @param {Array} array The array to fill. + * @param {*} value The value to fill `array` with. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns `array`. + */ + function baseFill(array, value, start, end) { + var length = array.length; + + start = toInteger(start); + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = (end === undefined || end > length) ? length : toInteger(end); + if (end < 0) { + end += length; + } + end = start > end ? 0 : toLength(end); + while (start < end) { + array[start++] = value; + } + return array; } - if (tags.length === 2) { - tagname = prefix + tags[1]; + + /** + * The base implementation of `_.filter` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ + function baseFilter(collection, predicate) { + var result = []; + baseEach(collection, function(value, index, collection) { + if (predicate(value, index, collection)) { + result.push(value); + } + }); + return result; } - } - return tagname; -} -//TODO: change regex to capture NS -//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); -const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + /** + * The base implementation of `_.flatten` with support for restricting flattening. + * + * @private + * @param {Array} array The array to flatten. + * @param {number} depth The maximum recursion depth. + * @param {boolean} [predicate=isFlattenable] The function invoked per iteration. + * @param {boolean} [isStrict] Restrict to values that pass `predicate` checks. + * @param {Array} [result=[]] The initial result value. + * @returns {Array} Returns the new flattened array. + */ + function baseFlatten(array, depth, predicate, isStrict, result) { + var index = -1, + length = array.length; -function buildAttributesMap(attrStr, jPath, tagName) { - if (!this.options.ignoreAttributes && typeof attrStr === 'string') { - // attrStr = attrStr.replace(/\r?\n/g, ' '); - //attrStr = attrStr || attrStr.trim(); + predicate || (predicate = isFlattenable); + result || (result = []); - const matches = util.getAllMatches(attrStr, attrsRegx); - const len = matches.length; //don't make it inline - const attrs = {}; - for (let i = 0; i < len; i++) { - const attrName = this.resolveNameSpace(matches[i][1]); - let oldVal = matches[i][4]; - let aName = this.options.attributeNamePrefix + attrName; - if (attrName.length) { - if (this.options.transformAttributeName) { - aName = this.options.transformAttributeName(aName); - } - if(aName === "__proto__") aName = "#__proto__"; - if (oldVal !== undefined) { - if (this.options.trimValues) { - oldVal = oldVal.trim(); - } - oldVal = this.replaceEntitiesValue(oldVal); - const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); - if(newVal === null || newVal === undefined){ - //don't parse - attrs[aName] = oldVal; - }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ - //overwrite - attrs[aName] = newVal; - }else{ - //parse - attrs[aName] = parseValue( - oldVal, - this.options.parseAttributeValue, - this.options.numberParseOptions - ); + while (++index < length) { + var value = array[index]; + if (depth > 0 && predicate(value)) { + if (depth > 1) { + // Recursively flatten arrays (susceptible to call stack limits). + baseFlatten(value, depth - 1, predicate, isStrict, result); + } else { + arrayPush(result, value); } - } else if (this.options.allowBooleanAttributes) { - attrs[aName] = true; + } else if (!isStrict) { + result[result.length] = value; } } + return result; } - if (!Object.keys(attrs).length) { - return; - } - if (this.options.attributesGroupName) { - const attrCollection = {}; - attrCollection[this.options.attributesGroupName] = attrs; - return attrCollection; - } - return attrs - } -} - -const parseXml = function(xmlData) { - xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line - const xmlObj = new xmlNode('!xml'); - let currentNode = xmlObj; - let textData = ""; - let jPath = ""; - for(let i=0; i< xmlData.length; i++){//for each char in XML data - const ch = xmlData[i]; - if(ch === '<'){ - // const nextIndex = i+1; - // const _2ndChar = xmlData[nextIndex]; - if( xmlData[i+1] === '/') {//Closing Tag - const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") - let tagName = xmlData.substring(i+2,closeIndex).trim(); - - if(this.options.removeNSPrefix){ - const colonIndex = tagName.indexOf(":"); - if(colonIndex !== -1){ - tagName = tagName.substr(colonIndex+1); - } - } - if(this.options.transformTagName) { - tagName = this.options.transformTagName(tagName); - } + /** + * The base implementation of `baseForOwn` which iterates over `object` + * properties returned by `keysFunc` and invokes `iteratee` for each property. + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ + var baseFor = createBaseFor(); - if(currentNode){ - textData = this.saveTextToParentTag(textData, currentNode, jPath); - } + /** + * This function is like `baseFor` except that it iterates over properties + * in the opposite order. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ + var baseForRight = createBaseFor(true); - //check if last tag of nested tag was unpaired tag - const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); - if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ - throw new Error(`Unpaired tag can not be used as closing tag: `); - } - let propIndex = 0 - if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ - propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) - this.tagsNodeStack.pop(); - }else{ - propIndex = jPath.lastIndexOf("."); - } - jPath = jPath.substring(0, propIndex); + /** + * The base implementation of `_.forOwn` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ + function baseForOwn(object, iteratee) { + return object && baseFor(object, iteratee, keys); + } - currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope - textData = ""; - i = closeIndex; - } else if( xmlData[i+1] === '?') { + /** + * The base implementation of `_.forOwnRight` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ + function baseForOwnRight(object, iteratee) { + return object && baseForRight(object, iteratee, keys); + } - let tagData = readTagExp(xmlData,i, false, "?>"); - if(!tagData) throw new Error("Pi Tag is not closed."); + /** + * The base implementation of `_.functions` which creates an array of + * `object` function property names filtered from `props`. + * + * @private + * @param {Object} object The object to inspect. + * @param {Array} props The property names to filter. + * @returns {Array} Returns the function names. + */ + function baseFunctions(object, props) { + return arrayFilter(props, function(key) { + return isFunction(object[key]); + }); + } - textData = this.saveTextToParentTag(textData, currentNode, jPath); - if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + /** + * The base implementation of `_.get` without support for default values. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to get. + * @returns {*} Returns the resolved value. + */ + function baseGet(object, path) { + path = castPath(path, object); - }else{ - - const childNode = new xmlNode(tagData.tagName); - childNode.add(this.options.textNodeName, ""); - - if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ - childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); - } - this.addChild(currentNode, childNode, jPath) + var index = 0, + length = path.length; - } + while (object != null && index < length) { + object = object[toKey(path[index++])]; + } + return (index && index == length) ? object : undefined; + } + /** + * The base implementation of `getAllKeys` and `getAllKeysIn` which uses + * `keysFunc` and `symbolsFunc` to get the enumerable property names and + * symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Function} keysFunc The function to get the keys of `object`. + * @param {Function} symbolsFunc The function to get the symbols of `object`. + * @returns {Array} Returns the array of property names and symbols. + */ + function baseGetAllKeys(object, keysFunc, symbolsFunc) { + var result = keysFunc(object); + return isArray(object) ? result : arrayPush(result, symbolsFunc(object)); + } - i = tagData.closeIndex + 1; - } else if(xmlData.substr(i + 1, 3) === '!--') { - const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") - if(this.options.commentPropName){ - const comment = xmlData.substring(i + 4, endIndex - 2); + /** + * The base implementation of `getTag` without fallbacks for buggy environments. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ + function baseGetTag(value) { + if (value == null) { + return value === undefined ? undefinedTag : nullTag; + } + return (symToStringTag && symToStringTag in Object(value)) + ? getRawTag(value) + : objectToString(value); + } - textData = this.saveTextToParentTag(textData, currentNode, jPath); + /** + * The base implementation of `_.gt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + */ + function baseGt(value, other) { + return value > other; + } - currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); - } - i = endIndex; - } else if( xmlData.substr(i + 1, 2) === '!D') { - const result = readDocType(xmlData, i); - this.docTypeEntities = result.entities; - i = result.i; - }else if(xmlData.substr(i + 1, 2) === '![') { - const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; - const tagExp = xmlData.substring(i + 9,closeIndex); + /** + * The base implementation of `_.has` without support for deep paths. + * + * @private + * @param {Object} [object] The object to query. + * @param {Array|string} key The key to check. + * @returns {boolean} Returns `true` if `key` exists, else `false`. + */ + function baseHas(object, key) { + return object != null && hasOwnProperty.call(object, key); + } - textData = this.saveTextToParentTag(textData, currentNode, jPath); + /** + * The base implementation of `_.hasIn` without support for deep paths. + * + * @private + * @param {Object} [object] The object to query. + * @param {Array|string} key The key to check. + * @returns {boolean} Returns `true` if `key` exists, else `false`. + */ + function baseHasIn(object, key) { + return object != null && key in Object(object); + } - //cdata should be set even if it is 0 length string - if(this.options.cdataPropName){ - // let val = this.parseTextData(tagExp, this.options.cdataPropName, jPath + "." + this.options.cdataPropName, true, false, true); - // if(!val) val = ""; - currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); - }else{ - let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true); - if(val == undefined) val = ""; - currentNode.add(this.options.textNodeName, val); - } - - i = closeIndex + 2; - }else {//Opening tag - let result = readTagExp(xmlData,i, this.options.removeNSPrefix); - let tagName= result.tagName; - let tagExp = result.tagExp; - let attrExpPresent = result.attrExpPresent; - let closeIndex = result.closeIndex; + /** + * The base implementation of `_.inRange` which doesn't coerce arguments. + * + * @private + * @param {number} number The number to check. + * @param {number} start The start of the range. + * @param {number} end The end of the range. + * @returns {boolean} Returns `true` if `number` is in the range, else `false`. + */ + function baseInRange(number, start, end) { + return number >= nativeMin(start, end) && number < nativeMax(start, end); + } - if (this.options.transformTagName) { - tagName = this.options.transformTagName(tagName); - } - - //save text as child node - if (currentNode && textData) { - if(currentNode.tagname !== '!xml'){ - //when nested tag is found - textData = this.saveTextToParentTag(textData, currentNode, jPath, false); - } - } + /** + * The base implementation of methods like `_.intersection`, without support + * for iteratee shorthands, that accepts an array of arrays to inspect. + * + * @private + * @param {Array} arrays The arrays to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of shared values. + */ + function baseIntersection(arrays, iteratee, comparator) { + var includes = comparator ? arrayIncludesWith : arrayIncludes, + length = arrays[0].length, + othLength = arrays.length, + othIndex = othLength, + caches = Array(othLength), + maxLength = Infinity, + result = []; - //check if last tag was unpaired tag - const lastTag = currentNode; - if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ - currentNode = this.tagsNodeStack.pop(); - jPath = jPath.substring(0, jPath.lastIndexOf(".")); - } - if(tagName !== xmlObj.tagname){ - jPath += jPath ? "." + tagName : tagName; + while (othIndex--) { + var array = arrays[othIndex]; + if (othIndex && iteratee) { + array = arrayMap(array, baseUnary(iteratee)); } - if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { //TODO: namespace - let tagContent = ""; - //self-closing tag - if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ - i = result.closeIndex; - } - //unpaired tag - else if(this.options.unpairedTags.indexOf(tagName) !== -1){ - i = result.closeIndex; - } - //normal tag - else{ - //read until closing tag is found - const result = this.readStopNodeData(xmlData, tagName, closeIndex + 1); - if(!result) throw new Error(`Unexpected end of ${tagName}`); - i = result.i; - tagContent = result.tagContent; - } + maxLength = nativeMin(array.length, maxLength); + caches[othIndex] = !comparator && (iteratee || (length >= 120 && array.length >= 120)) + ? new SetCache(othIndex && array) + : undefined; + } + array = arrays[0]; - const childNode = new xmlNode(tagName); - if(tagName !== tagExp && attrExpPresent){ - childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); - } - if(tagContent) { - tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); - } - - jPath = jPath.substr(0, jPath.lastIndexOf(".")); - childNode.add(this.options.textNodeName, tagContent); - - this.addChild(currentNode, childNode, jPath) - }else{ - //selfClosing tag - if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ - if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' - tagName = tagName.substr(0, tagName.length - 1); - tagExp = tagName; - }else{ - tagExp = tagExp.substr(0, tagExp.length - 1); - } - - if(this.options.transformTagName) { - tagName = this.options.transformTagName(tagName); - } + var index = -1, + seen = caches[0]; - const childNode = new xmlNode(tagName); - if(tagName !== tagExp && attrExpPresent){ - childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + outer: + while (++index < length && result.length < maxLength) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (!(seen + ? cacheHas(seen, computed) + : includes(result, computed, comparator) + )) { + othIndex = othLength; + while (--othIndex) { + var cache = caches[othIndex]; + if (!(cache + ? cacheHas(cache, computed) + : includes(arrays[othIndex], computed, comparator)) + ) { + continue outer; } - this.addChild(currentNode, childNode, jPath) - jPath = jPath.substr(0, jPath.lastIndexOf(".")); } - //opening tag - else{ - const childNode = new xmlNode( tagName); - this.tagsNodeStack.push(currentNode); - - if(tagName !== tagExp && attrExpPresent){ - childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); - } - this.addChild(currentNode, childNode, jPath) - currentNode = childNode; + if (seen) { + seen.push(computed); } - textData = ""; - i = closeIndex; + result.push(value); } } - }else{ - textData += xmlData[i]; + return result; } - } - return xmlObj.child; -} -function addChild(currentNode, childNode, jPath){ - const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) - if(result === false){ - }else if(typeof result === "string"){ - childNode.tagname = result - currentNode.addChild(childNode); - }else{ - currentNode.addChild(childNode); - } -} + /** + * The base implementation of `_.invert` and `_.invertBy` which inverts + * `object` with values transformed by `iteratee` and set by `setter`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform values. + * @param {Object} accumulator The initial inverted object. + * @returns {Function} Returns `accumulator`. + */ + function baseInverter(object, setter, iteratee, accumulator) { + baseForOwn(object, function(value, key, object) { + setter(accumulator, iteratee(value), key, object); + }); + return accumulator; + } -const replaceEntitiesValue = function(val){ + /** + * The base implementation of `_.invoke` without support for individual + * method arguments. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path of the method to invoke. + * @param {Array} args The arguments to invoke the method with. + * @returns {*} Returns the result of the invoked method. + */ + function baseInvoke(object, path, args) { + path = castPath(path, object); + object = parent(object, path); + var func = object == null ? object : object[toKey(last(path))]; + return func == null ? undefined : apply(func, object, args); + } - if(this.options.processEntities){ - for(let entityName in this.docTypeEntities){ - const entity = this.docTypeEntities[entityName]; - val = val.replace( entity.regx, entity.val); + /** + * The base implementation of `_.isArguments`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + */ + function baseIsArguments(value) { + return isObjectLike(value) && baseGetTag(value) == argsTag; } - for(let entityName in this.lastEntities){ - const entity = this.lastEntities[entityName]; - val = val.replace( entity.regex, entity.val); + + /** + * The base implementation of `_.isArrayBuffer` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array buffer, else `false`. + */ + function baseIsArrayBuffer(value) { + return isObjectLike(value) && baseGetTag(value) == arrayBufferTag; } - if(this.options.htmlEntities){ - for(let entityName in this.htmlEntities){ - const entity = this.htmlEntities[entityName]; - val = val.replace( entity.regex, entity.val); + + /** + * The base implementation of `_.isDate` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + */ + function baseIsDate(value) { + return isObjectLike(value) && baseGetTag(value) == dateTag; + } + + /** + * The base implementation of `_.isEqual` which supports partial comparisons + * and tracks traversed objects. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {boolean} bitmask The bitmask flags. + * 1 - Unordered comparison + * 2 - Partial comparison + * @param {Function} [customizer] The function to customize comparisons. + * @param {Object} [stack] Tracks traversed `value` and `other` objects. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + */ + function baseIsEqual(value, other, bitmask, customizer, stack) { + if (value === other) { + return true; + } + if (value == null || other == null || (!isObjectLike(value) && !isObjectLike(other))) { + return value !== value && other !== other; } + return baseIsEqualDeep(value, other, bitmask, customizer, baseIsEqual, stack); } - val = val.replace( this.ampEntity.regex, this.ampEntity.val); - } - return val; -} -function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { - if (textData) { //store previously collected data as textNode - if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 - - textData = this.parseTextData(textData, - currentNode.tagname, - jPath, - false, - currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, - isLeafNode); - if (textData !== undefined && textData !== "") - currentNode.add(this.options.textNodeName, textData); - textData = ""; - } - return textData; -} + /** + * A specialized version of `baseIsEqual` for arrays and objects which performs + * deep comparisons and tracks traversed objects enabling objects with circular + * references to be compared. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} [stack] Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function baseIsEqualDeep(object, other, bitmask, customizer, equalFunc, stack) { + var objIsArr = isArray(object), + othIsArr = isArray(other), + objTag = objIsArr ? arrayTag : getTag(object), + othTag = othIsArr ? arrayTag : getTag(other); -//TODO: use jPath to simplify the logic -/** - * - * @param {string[]} stopNodes - * @param {string} jPath - * @param {string} currentTagName - */ -function isItStopNode(stopNodes, jPath, currentTagName){ - const allNodesExp = "*." + currentTagName; - for (const stopNodePath in stopNodes) { - const stopNodeExp = stopNodes[stopNodePath]; - if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; - } - return false; -} + objTag = objTag == argsTag ? objectTag : objTag; + othTag = othTag == argsTag ? objectTag : othTag; -/** - * Returns the tag Expression and where it is ending handling single-double quotes situation - * @param {string} xmlData - * @param {number} i starting index - * @returns - */ -function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ - let attrBoundary; - let tagExp = ""; - for (let index = i; index < xmlData.length; index++) { - let ch = xmlData[index]; - if (attrBoundary) { - if (ch === attrBoundary) attrBoundary = "";//reset - } else if (ch === '"' || ch === "'") { - attrBoundary = ch; - } else if (ch === closingChar[0]) { - if(closingChar[1]){ - if(xmlData[index + 1] === closingChar[1]){ - return { - data: tagExp, - index: index - } - } - }else{ - return { - data: tagExp, - index: index + var objIsObj = objTag == objectTag, + othIsObj = othTag == objectTag, + isSameTag = objTag == othTag; + + if (isSameTag && isBuffer(object)) { + if (!isBuffer(other)) { + return false; } + objIsArr = true; + objIsObj = false; } - } else if (ch === '\t') { - ch = " " - } - tagExp += ch; - } -} + if (isSameTag && !objIsObj) { + stack || (stack = new Stack); + return (objIsArr || isTypedArray(object)) + ? equalArrays(object, other, bitmask, customizer, equalFunc, stack) + : equalByTag(object, other, objTag, bitmask, customizer, equalFunc, stack); + } + if (!(bitmask & COMPARE_PARTIAL_FLAG)) { + var objIsWrapped = objIsObj && hasOwnProperty.call(object, '__wrapped__'), + othIsWrapped = othIsObj && hasOwnProperty.call(other, '__wrapped__'); -function findClosingIndex(xmlData, str, i, errMsg){ - const closingIndex = xmlData.indexOf(str, i); - if(closingIndex === -1){ - throw new Error(errMsg) - }else{ - return closingIndex + str.length - 1; - } -} + if (objIsWrapped || othIsWrapped) { + var objUnwrapped = objIsWrapped ? object.value() : object, + othUnwrapped = othIsWrapped ? other.value() : other; -function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ - const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); - if(!result) return; - let tagExp = result.data; - const closeIndex = result.index; - const separatorIndex = tagExp.search(/\s/); - let tagName = tagExp; - let attrExpPresent = true; - if(separatorIndex !== -1){//separate tag name and attributes expression - tagName = tagExp.substr(0, separatorIndex).replace(/\s\s*$/, ''); - tagExp = tagExp.substr(separatorIndex + 1); - } + stack || (stack = new Stack); + return equalFunc(objUnwrapped, othUnwrapped, bitmask, customizer, stack); + } + } + if (!isSameTag) { + return false; + } + stack || (stack = new Stack); + return equalObjects(object, other, bitmask, customizer, equalFunc, stack); + } - if(removeNSPrefix){ - const colonIndex = tagName.indexOf(":"); - if(colonIndex !== -1){ - tagName = tagName.substr(colonIndex+1); - attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + /** + * The base implementation of `_.isMap` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a map, else `false`. + */ + function baseIsMap(value) { + return isObjectLike(value) && getTag(value) == mapTag; } - } - return { - tagName: tagName, - tagExp: tagExp, - closeIndex: closeIndex, - attrExpPresent: attrExpPresent, - } -} -/** - * find paired tag for a stop node - * @param {string} xmlData - * @param {string} tagName - * @param {number} i - */ -function readStopNodeData(xmlData, tagName, i){ - const startIndex = i; - // Starting at 1 since we already have an open tag - let openTagCount = 1; + /** + * The base implementation of `_.isMatch` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @param {Array} matchData The property names, values, and compare flags to match. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + */ + function baseIsMatch(object, source, matchData, customizer) { + var index = matchData.length, + length = index, + noCustomizer = !customizer; - for (; i < xmlData.length; i++) { - if( xmlData[i] === "<"){ - if (xmlData[i+1] === "/") {//close tag - const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); - let closeTagName = xmlData.substring(i+2,closeIndex).trim(); - if(closeTagName === tagName){ - openTagCount--; - if (openTagCount === 0) { - return { - tagContent: xmlData.substring(startIndex, i), - i : closeIndex - } - } + if (object == null) { + return !length; + } + object = Object(object); + while (index--) { + var data = matchData[index]; + if ((noCustomizer && data[2]) + ? data[1] !== object[data[0]] + : !(data[0] in object) + ) { + return false; + } + } + while (++index < length) { + data = matchData[index]; + var key = data[0], + objValue = object[key], + srcValue = data[1]; + + if (noCustomizer && data[2]) { + if (objValue === undefined && !(key in object)) { + return false; } - i=closeIndex; - } else if(xmlData[i+1] === '?') { - const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") - i=closeIndex; - } else if(xmlData.substr(i + 1, 3) === '!--') { - const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") - i=closeIndex; - } else if(xmlData.substr(i + 1, 2) === '![') { - const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; - i=closeIndex; } else { - const tagData = readTagExp(xmlData, i, '>') - - if (tagData) { - const openTagName = tagData && tagData.tagName; - if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { - openTagCount++; - } - i=tagData.closeIndex; + var stack = new Stack; + if (customizer) { + var result = customizer(objValue, srcValue, key, object, source, stack); + } + if (!(result === undefined + ? baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG, customizer, stack) + : result + )) { + return false; } } } - }//end for loop -} - -function parseValue(val, shouldParse, options) { - if (shouldParse && typeof val === 'string') { - //console.log(options) - const newval = val.trim(); - if(newval === 'true' ) return true; - else if(newval === 'false' ) return false; - else return toNumber(val, options); - } else { - if (util.isExist(val)) { - return val; - } else { - return ''; + return true; } - } -} - - -module.exports = OrderedObjParser; + /** + * The base implementation of `_.isNative` without bad shim checks. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. + */ + function baseIsNative(value) { + if (!isObject(value) || isMasked(value)) { + return false; + } + var pattern = isFunction(value) ? reIsNative : reIsHostCtor; + return pattern.test(toSource(value)); + } -/***/ }), + /** + * The base implementation of `_.isRegExp` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + */ + function baseIsRegExp(value) { + return isObjectLike(value) && baseGetTag(value) == regexpTag; + } -/***/ 42380: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * The base implementation of `_.isSet` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a set, else `false`. + */ + function baseIsSet(value) { + return isObjectLike(value) && getTag(value) == setTag; + } -const { buildOptions} = __nccwpck_require__(86993); -const OrderedObjParser = __nccwpck_require__(25832); -const { prettify} = __nccwpck_require__(42882); -const validator = __nccwpck_require__(61739); + /** + * The base implementation of `_.isTypedArray` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + */ + function baseIsTypedArray(value) { + return isObjectLike(value) && + isLength(value.length) && !!typedArrayTags[baseGetTag(value)]; + } -class XMLParser{ - - constructor(options){ - this.externalEntities = {}; - this.options = buildOptions(options); - + /** + * The base implementation of `_.iteratee`. + * + * @private + * @param {*} [value=_.identity] The value to convert to an iteratee. + * @returns {Function} Returns the iteratee. + */ + function baseIteratee(value) { + // Don't store the `typeof` result in a variable to avoid a JIT bug in Safari 9. + // See https://bugs.webkit.org/show_bug.cgi?id=156034 for more details. + if (typeof value == 'function') { + return value; + } + if (value == null) { + return identity; + } + if (typeof value == 'object') { + return isArray(value) + ? baseMatchesProperty(value[0], value[1]) + : baseMatches(value); + } + return property(value); } + /** - * Parse XML dats to JS object - * @param {string|Buffer} xmlData - * @param {boolean|Object} validationOption + * The base implementation of `_.keys` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. */ - parse(xmlData,validationOption){ - if(typeof xmlData === "string"){ - }else if( xmlData.toString){ - xmlData = xmlData.toString(); - }else{ - throw new Error("XML data is accepted in String or Bytes[] form.") + function baseKeys(object) { + if (!isPrototype(object)) { + return nativeKeys(object); + } + var result = []; + for (var key in Object(object)) { + if (hasOwnProperty.call(object, key) && key != 'constructor') { + result.push(key); } - if( validationOption){ - if(validationOption === true) validationOption = {}; //validate with default options - - const result = validator.validate(xmlData, validationOption); - if (result !== true) { - throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) - } - } - const orderedObjParser = new OrderedObjParser(this.options); - orderedObjParser.addExternalEntities(this.externalEntities); - const orderedResult = orderedObjParser.parseXml(xmlData); - if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; - else return prettify(orderedResult, this.options); + } + return result; } /** - * Add Entity which is not by default supported by this library - * @param {string} key - * @param {string} value + * The base implementation of `_.keysIn` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. */ - addEntity(key, value){ - if(value.indexOf("&") !== -1){ - throw new Error("Entity value can't have '&'") - }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ - throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") - }else if(value === "&"){ - throw new Error("An entity with value '&' is not permitted"); - }else{ - this.externalEntities[key] = value; + function baseKeysIn(object) { + if (!isObject(object)) { + return nativeKeysIn(object); + } + var isProto = isPrototype(object), + result = []; + + for (var key in object) { + if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) { + result.push(key); } + } + return result; } -} -module.exports = XMLParser; + /** + * The base implementation of `_.lt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than `other`, + * else `false`. + */ + function baseLt(value, other) { + return value < other; + } -/***/ }), + /** + * The base implementation of `_.map` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ + function baseMap(collection, iteratee) { + var index = -1, + result = isArrayLike(collection) ? Array(collection.length) : []; -/***/ 42882: -/***/ ((__unused_webpack_module, exports) => { + baseEach(collection, function(value, key, collection) { + result[++index] = iteratee(value, key, collection); + }); + return result; + } -"use strict"; + /** + * The base implementation of `_.matches` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property values to match. + * @returns {Function} Returns the new spec function. + */ + function baseMatches(source) { + var matchData = getMatchData(source); + if (matchData.length == 1 && matchData[0][2]) { + return matchesStrictComparable(matchData[0][0], matchData[0][1]); + } + return function(object) { + return object === source || baseIsMatch(object, source, matchData); + }; + } + /** + * The base implementation of `_.matchesProperty` which doesn't clone `srcValue`. + * + * @private + * @param {string} path The path of the property to get. + * @param {*} srcValue The value to match. + * @returns {Function} Returns the new spec function. + */ + function baseMatchesProperty(path, srcValue) { + if (isKey(path) && isStrictComparable(srcValue)) { + return matchesStrictComparable(toKey(path), srcValue); + } + return function(object) { + var objValue = get(object, path); + return (objValue === undefined && objValue === srcValue) + ? hasIn(object, path) + : baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG); + }; + } -/** - * - * @param {array} node - * @param {any} options - * @returns - */ -function prettify(node, options){ - return compress( node, options); -} + /** + * The base implementation of `_.merge` without support for multiple sources. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {number} srcIndex The index of `source`. + * @param {Function} [customizer] The function to customize merged values. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + */ + function baseMerge(object, source, srcIndex, customizer, stack) { + if (object === source) { + return; + } + baseFor(source, function(srcValue, key) { + stack || (stack = new Stack); + if (isObject(srcValue)) { + baseMergeDeep(object, source, key, srcIndex, baseMerge, customizer, stack); + } + else { + var newValue = customizer + ? customizer(safeGet(object, key), srcValue, (key + ''), object, source, stack) + : undefined; -/** - * - * @param {array} arr - * @param {object} options - * @param {string} jPath - * @returns object - */ -function compress(arr, options, jPath){ - let text; - const compressedObj = {}; - for (let i = 0; i < arr.length; i++) { - const tagObj = arr[i]; - const property = propName(tagObj); - let newJpath = ""; - if(jPath === undefined) newJpath = property; - else newJpath = jPath + "." + property; + if (newValue === undefined) { + newValue = srcValue; + } + assignMergeValue(object, key, newValue); + } + }, keysIn); + } - if(property === options.textNodeName){ - if(text === undefined) text = tagObj[property]; - else text += "" + tagObj[property]; - }else if(property === undefined){ - continue; - }else if(tagObj[property]){ - - let val = compress(tagObj[property], options, newJpath); - const isLeaf = isLeafTag(val, options); + /** + * A specialized version of `baseMerge` for arrays and objects which performs + * deep merges and tracks traversed objects enabling objects with circular + * references to be merged. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {string} key The key of the value to merge. + * @param {number} srcIndex The index of `source`. + * @param {Function} mergeFunc The function to merge values. + * @param {Function} [customizer] The function to customize assigned values. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + */ + function baseMergeDeep(object, source, key, srcIndex, mergeFunc, customizer, stack) { + var objValue = safeGet(object, key), + srcValue = safeGet(source, key), + stacked = stack.get(srcValue); - if(tagObj[":@"]){ - assignAttributes( val, tagObj[":@"], newJpath, options); - }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ - val = val[options.textNodeName]; - }else if(Object.keys(val).length === 0){ - if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; - else val = ""; + if (stacked) { + assignMergeValue(object, key, stacked); + return; } + var newValue = customizer + ? customizer(objValue, srcValue, (key + ''), object, source, stack) + : undefined; - if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { - if(!Array.isArray(compressedObj[property])) { - compressedObj[property] = [ compressedObj[property] ]; + var isCommon = newValue === undefined; + + if (isCommon) { + var isArr = isArray(srcValue), + isBuff = !isArr && isBuffer(srcValue), + isTyped = !isArr && !isBuff && isTypedArray(srcValue); + + newValue = srcValue; + if (isArr || isBuff || isTyped) { + if (isArray(objValue)) { + newValue = objValue; + } + else if (isArrayLikeObject(objValue)) { + newValue = copyArray(objValue); + } + else if (isBuff) { + isCommon = false; + newValue = cloneBuffer(srcValue, true); + } + else if (isTyped) { + isCommon = false; + newValue = cloneTypedArray(srcValue, true); + } + else { + newValue = []; + } } - compressedObj[property].push(val); - }else{ - //TODO: if a node is not an array, then check if it should be an array - //also determine if it is a leaf node - if (options.isArray(property, newJpath, isLeaf )) { - compressedObj[property] = [val]; - }else{ - compressedObj[property] = val; + else if (isPlainObject(srcValue) || isArguments(srcValue)) { + newValue = objValue; + if (isArguments(objValue)) { + newValue = toPlainObject(objValue); + } + else if (!isObject(objValue) || isFunction(objValue)) { + newValue = initCloneObject(srcValue); + } + } + else { + isCommon = false; } } + if (isCommon) { + // Recursively merge objects and arrays (susceptible to call stack limits). + stack.set(srcValue, newValue); + mergeFunc(newValue, srcValue, srcIndex, customizer, stack); + stack['delete'](srcValue); + } + assignMergeValue(object, key, newValue); } - - } - // if(text && text.length > 0) compressedObj[options.textNodeName] = text; - if(typeof text === "string"){ - if(text.length > 0) compressedObj[options.textNodeName] = text; - }else if(text !== undefined) compressedObj[options.textNodeName] = text; - return compressedObj; -} -function propName(obj){ - const keys = Object.keys(obj); - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - if(key !== ":@") return key; - } -} + /** + * The base implementation of `_.nth` which doesn't coerce arguments. + * + * @private + * @param {Array} array The array to query. + * @param {number} n The index of the element to return. + * @returns {*} Returns the nth element of `array`. + */ + function baseNth(array, n) { + var length = array.length; + if (!length) { + return; + } + n += n < 0 ? length : 0; + return isIndex(n, length) ? array[n] : undefined; + } -function assignAttributes(obj, attrMap, jpath, options){ - if (attrMap) { - const keys = Object.keys(attrMap); - const len = keys.length; //don't make it inline - for (let i = 0; i < len; i++) { - const atrrName = keys[i]; - if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { - obj[atrrName] = [ attrMap[atrrName] ]; + /** + * The base implementation of `_.orderBy` without param guards. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function[]|Object[]|string[]} iteratees The iteratees to sort by. + * @param {string[]} orders The sort orders of `iteratees`. + * @returns {Array} Returns the new sorted array. + */ + function baseOrderBy(collection, iteratees, orders) { + if (iteratees.length) { + iteratees = arrayMap(iteratees, function(iteratee) { + if (isArray(iteratee)) { + return function(value) { + return baseGet(value, iteratee.length === 1 ? iteratee[0] : iteratee); + } + } + return iteratee; + }); } else { - obj[atrrName] = attrMap[atrrName]; + iteratees = [identity]; } + + var index = -1; + iteratees = arrayMap(iteratees, baseUnary(getIteratee())); + + var result = baseMap(collection, function(value, key, collection) { + var criteria = arrayMap(iteratees, function(iteratee) { + return iteratee(value); + }); + return { 'criteria': criteria, 'index': ++index, 'value': value }; + }); + + return baseSortBy(result, function(object, other) { + return compareMultiple(object, other, orders); + }); } - } -} -function isLeafTag(obj, options){ - const { textNodeName } = options; - const propCount = Object.keys(obj).length; - - if (propCount === 0) { - return true; - } + /** + * The base implementation of `_.pick` without support for individual + * property identifiers. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @returns {Object} Returns the new object. + */ + function basePick(object, paths) { + return basePickBy(object, paths, function(value, path) { + return hasIn(object, path); + }); + } - if ( - propCount === 1 && - (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) - ) { - return true; - } + /** + * The base implementation of `_.pickBy` without support for iteratee shorthands. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @param {Function} predicate The function invoked per property. + * @returns {Object} Returns the new object. + */ + function basePickBy(object, paths, predicate) { + var index = -1, + length = paths.length, + result = {}; - return false; -} -exports.prettify = prettify; + while (++index < length) { + var path = paths[index], + value = baseGet(object, path); + if (predicate(value, path)) { + baseSet(result, castPath(path, object), value); + } + } + return result; + } -/***/ }), + /** + * A specialized version of `baseProperty` which supports deep paths. + * + * @private + * @param {Array|string} path The path of the property to get. + * @returns {Function} Returns the new accessor function. + */ + function basePropertyDeep(path) { + return function(object) { + return baseGet(object, path); + }; + } -/***/ 7462: -/***/ ((module) => { + /** + * The base implementation of `_.pullAllBy` without support for iteratee + * shorthands. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns `array`. + */ + function basePullAll(array, values, iteratee, comparator) { + var indexOf = comparator ? baseIndexOfWith : baseIndexOf, + index = -1, + length = values.length, + seen = array; -"use strict"; + if (array === values) { + values = copyArray(values); + } + if (iteratee) { + seen = arrayMap(array, baseUnary(iteratee)); + } + while (++index < length) { + var fromIndex = 0, + value = values[index], + computed = iteratee ? iteratee(value) : value; + + while ((fromIndex = indexOf(seen, computed, fromIndex, comparator)) > -1) { + if (seen !== array) { + splice.call(seen, fromIndex, 1); + } + splice.call(array, fromIndex, 1); + } + } + return array; + } + /** + * The base implementation of `_.pullAt` without support for individual + * indexes or capturing the removed elements. + * + * @private + * @param {Array} array The array to modify. + * @param {number[]} indexes The indexes of elements to remove. + * @returns {Array} Returns `array`. + */ + function basePullAt(array, indexes) { + var length = array ? indexes.length : 0, + lastIndex = length - 1; -class XmlNode{ - constructor(tagname) { - this.tagname = tagname; - this.child = []; //nested tags, text, cdata, comments in order - this[":@"] = {}; //attributes map - } - add(key,val){ - // this.child.push( {name : key, val: val, isCdata: isCdata }); - if(key === "__proto__") key = "#__proto__"; - this.child.push( {[key]: val }); - } - addChild(node) { - if(node.tagname === "__proto__") node.tagname = "#__proto__"; - if(node[":@"] && Object.keys(node[":@"]).length > 0){ - this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); - }else{ - this.child.push( { [node.tagname]: node.child }); + while (length--) { + var index = indexes[length]; + if (length == lastIndex || index !== previous) { + var previous = index; + if (isIndex(index)) { + splice.call(array, index, 1); + } else { + baseUnset(array, index); + } + } + } + return array; } - }; -}; - -module.exports = XmlNode; + /** + * The base implementation of `_.random` without support for returning + * floating-point numbers. + * + * @private + * @param {number} lower The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the random number. + */ + function baseRandom(lower, upper) { + return lower + nativeFloor(nativeRandom() * (upper - lower + 1)); + } -/***/ }), + /** + * The base implementation of `_.range` and `_.rangeRight` which doesn't + * coerce arguments. + * + * @private + * @param {number} start The start of the range. + * @param {number} end The end of the range. + * @param {number} step The value to increment or decrement by. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Array} Returns the range of numbers. + */ + function baseRange(start, end, step, fromRight) { + var index = -1, + length = nativeMax(nativeCeil((end - start) / (step || 1)), 0), + result = Array(length); -/***/ 64334: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + while (length--) { + result[fromRight ? length : ++index] = start; + start += step; + } + return result; + } -var CombinedStream = __nccwpck_require__(85443); -var util = __nccwpck_require__(73837); -var path = __nccwpck_require__(71017); -var http = __nccwpck_require__(13685); -var https = __nccwpck_require__(95687); -var parseUrl = (__nccwpck_require__(57310).parse); -var fs = __nccwpck_require__(57147); -var Stream = (__nccwpck_require__(12781).Stream); -var mime = __nccwpck_require__(43583); -var asynckit = __nccwpck_require__(14812); -var populate = __nccwpck_require__(95155); + /** + * The base implementation of `_.repeat` which doesn't coerce arguments. + * + * @private + * @param {string} string The string to repeat. + * @param {number} n The number of times to repeat the string. + * @returns {string} Returns the repeated string. + */ + function baseRepeat(string, n) { + var result = ''; + if (!string || n < 1 || n > MAX_SAFE_INTEGER) { + return result; + } + // Leverage the exponentiation by squaring algorithm for a faster repeat. + // See https://en.wikipedia.org/wiki/Exponentiation_by_squaring for more details. + do { + if (n % 2) { + result += string; + } + n = nativeFloor(n / 2); + if (n) { + string += string; + } + } while (n); -// Public API -module.exports = FormData; + return result; + } -// make it a Stream -util.inherits(FormData, CombinedStream); + /** + * The base implementation of `_.rest` which doesn't validate or coerce arguments. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + */ + function baseRest(func, start) { + return setToString(overRest(func, start, identity), func + ''); + } -/** - * Create readable "multipart/form-data" streams. - * Can be used to submit forms - * and file uploads to other web applications. - * - * @constructor - * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream - */ -function FormData(options) { - if (!(this instanceof FormData)) { - return new FormData(options); - } + /** + * The base implementation of `_.sample`. + * + * @private + * @param {Array|Object} collection The collection to sample. + * @returns {*} Returns the random element. + */ + function baseSample(collection) { + return arraySample(values(collection)); + } - this._overheadLength = 0; - this._valueLength = 0; - this._valuesToMeasure = []; + /** + * The base implementation of `_.sampleSize` without param guards. + * + * @private + * @param {Array|Object} collection The collection to sample. + * @param {number} n The number of elements to sample. + * @returns {Array} Returns the random elements. + */ + function baseSampleSize(collection, n) { + var array = values(collection); + return shuffleSelf(array, baseClamp(n, 0, array.length)); + } - CombinedStream.call(this); + /** + * The base implementation of `_.set`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @param {Function} [customizer] The function to customize path creation. + * @returns {Object} Returns `object`. + */ + function baseSet(object, path, value, customizer) { + if (!isObject(object)) { + return object; + } + path = castPath(path, object); - options = options || {}; - for (var option in options) { - this[option] = options[option]; - } -} + var index = -1, + length = path.length, + lastIndex = length - 1, + nested = object; -FormData.LINE_BREAK = '\r\n'; -FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + while (nested != null && ++index < length) { + var key = toKey(path[index]), + newValue = value; -FormData.prototype.append = function(field, value, options) { + if (key === '__proto__' || key === 'constructor' || key === 'prototype') { + return object; + } - options = options || {}; + if (index != lastIndex) { + var objValue = nested[key]; + newValue = customizer ? customizer(objValue, key, nested) : undefined; + if (newValue === undefined) { + newValue = isObject(objValue) + ? objValue + : (isIndex(path[index + 1]) ? [] : {}); + } + } + assignValue(nested, key, newValue); + nested = nested[key]; + } + return object; + } - // allow filename as single option - if (typeof options == 'string') { - options = {filename: options}; - } + /** + * The base implementation of `setData` without support for hot loop shorting. + * + * @private + * @param {Function} func The function to associate metadata with. + * @param {*} data The metadata. + * @returns {Function} Returns `func`. + */ + var baseSetData = !metaMap ? identity : function(func, data) { + metaMap.set(func, data); + return func; + }; - var append = CombinedStream.prototype.append.bind(this); + /** + * The base implementation of `setToString` without support for hot loop shorting. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ + var baseSetToString = !defineProperty ? identity : function(func, string) { + return defineProperty(func, 'toString', { + 'configurable': true, + 'enumerable': false, + 'value': constant(string), + 'writable': true + }); + }; - // all that streamy business can't handle numbers - if (typeof value == 'number') { - value = '' + value; - } + /** + * The base implementation of `_.shuffle`. + * + * @private + * @param {Array|Object} collection The collection to shuffle. + * @returns {Array} Returns the new shuffled array. + */ + function baseShuffle(collection) { + return shuffleSelf(values(collection)); + } - // https://github.com/felixge/node-form-data/issues/38 - if (util.isArray(value)) { - // Please convert your array into string - // the way web server expects it - this._error(new Error('Arrays are not supported.')); - return; - } + /** + * The base implementation of `_.slice` without an iteratee call guard. + * + * @private + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ + function baseSlice(array, start, end) { + var index = -1, + length = array.length; - var header = this._multiPartHeader(field, value, options); - var footer = this._multiPartFooter(); + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = end > length ? length : end; + if (end < 0) { + end += length; + } + length = start > end ? 0 : ((end - start) >>> 0); + start >>>= 0; - append(header); - append(value); - append(footer); + var result = Array(length); + while (++index < length) { + result[index] = array[index + start]; + } + return result; + } - // pass along options.knownLength - this._trackLength(header, value, options); -}; + /** + * The base implementation of `_.some` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ + function baseSome(collection, predicate) { + var result; -FormData.prototype._trackLength = function(header, value, options) { - var valueLength = 0; + baseEach(collection, function(value, index, collection) { + result = predicate(value, index, collection); + return !result; + }); + return !!result; + } - // used w/ getLengthSync(), when length is known. - // e.g. for streaming directly from a remote server, - // w/ a known file a size, and not wanting to wait for - // incoming file to finish to get its size. - if (options.knownLength != null) { - valueLength += +options.knownLength; - } else if (Buffer.isBuffer(value)) { - valueLength = value.length; - } else if (typeof value === 'string') { - valueLength = Buffer.byteLength(value); - } + /** + * The base implementation of `_.sortedIndex` and `_.sortedLastIndex` which + * performs a binary search of `array` to determine the index at which `value` + * should be inserted into `array` in order to maintain its sort order. + * + * @private + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {boolean} [retHighest] Specify returning the highest qualified index. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + */ + function baseSortedIndex(array, value, retHighest) { + var low = 0, + high = array == null ? low : array.length; - this._valueLength += valueLength; + if (typeof value == 'number' && value === value && high <= HALF_MAX_ARRAY_LENGTH) { + while (low < high) { + var mid = (low + high) >>> 1, + computed = array[mid]; - // @check why add CRLF? does this account for custom/multiple CRLFs? - this._overheadLength += - Buffer.byteLength(header) + - FormData.LINE_BREAK.length; + if (computed !== null && !isSymbol(computed) && + (retHighest ? (computed <= value) : (computed < value))) { + low = mid + 1; + } else { + high = mid; + } + } + return high; + } + return baseSortedIndexBy(array, value, identity, retHighest); + } - // empty or either doesn't have path or not an http response or not a stream - if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { - return; - } + /** + * The base implementation of `_.sortedIndexBy` and `_.sortedLastIndexBy` + * which invokes `iteratee` for `value` and each element of `array` to compute + * their sort ranking. The iteratee is invoked with one argument; (value). + * + * @private + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} iteratee The iteratee invoked per element. + * @param {boolean} [retHighest] Specify returning the highest qualified index. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + */ + function baseSortedIndexBy(array, value, iteratee, retHighest) { + var low = 0, + high = array == null ? 0 : array.length; + if (high === 0) { + return 0; + } - // no need to bother with the length - if (!options.knownLength) { - this._valuesToMeasure.push(value); - } -}; + value = iteratee(value); + var valIsNaN = value !== value, + valIsNull = value === null, + valIsSymbol = isSymbol(value), + valIsUndefined = value === undefined; -FormData.prototype._lengthRetriever = function(value, callback) { + while (low < high) { + var mid = nativeFloor((low + high) / 2), + computed = iteratee(array[mid]), + othIsDefined = computed !== undefined, + othIsNull = computed === null, + othIsReflexive = computed === computed, + othIsSymbol = isSymbol(computed); - if (value.hasOwnProperty('fd')) { + if (valIsNaN) { + var setLow = retHighest || othIsReflexive; + } else if (valIsUndefined) { + setLow = othIsReflexive && (retHighest || othIsDefined); + } else if (valIsNull) { + setLow = othIsReflexive && othIsDefined && (retHighest || !othIsNull); + } else if (valIsSymbol) { + setLow = othIsReflexive && othIsDefined && !othIsNull && (retHighest || !othIsSymbol); + } else if (othIsNull || othIsSymbol) { + setLow = false; + } else { + setLow = retHighest ? (computed <= value) : (computed < value); + } + if (setLow) { + low = mid + 1; + } else { + high = mid; + } + } + return nativeMin(high, MAX_ARRAY_INDEX); + } - // take read range into a account - // `end` = Infinity –> read file till the end - // - // TODO: Looks like there is bug in Node fs.createReadStream - // it doesn't respect `end` options without `start` options - // Fix it when node fixes it. - // https://github.com/joyent/node/issues/7819 - if (value.end != undefined && value.end != Infinity && value.start != undefined) { - - // when end specified - // no need to calculate range - // inclusive, starts with 0 - callback(null, value.end + 1 - (value.start ? value.start : 0)); - - // not that fast snoopy - } else { - // still need to fetch file size from fs - fs.stat(value.path, function(err, stat) { + /** + * The base implementation of `_.sortedUniq` and `_.sortedUniqBy` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + */ + function baseSortedUniq(array, iteratee) { + var index = -1, + length = array.length, + resIndex = 0, + result = []; - var fileSize; + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; - if (err) { - callback(err); - return; + if (!index || !eq(computed, seen)) { + var seen = computed; + result[resIndex++] = value === 0 ? 0 : value; } + } + return result; + } - // update final size based on the range options - fileSize = stat.size - (value.start ? value.start : 0); - callback(null, fileSize); - }); + /** + * The base implementation of `_.toNumber` which doesn't ensure correct + * conversions of binary, hexadecimal, or octal string values. + * + * @private + * @param {*} value The value to process. + * @returns {number} Returns the number. + */ + function baseToNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + return +value; } - // or http response - } else if (value.hasOwnProperty('httpVersion')) { - callback(null, +value.headers['content-length']); + /** + * The base implementation of `_.toString` which doesn't convert nullish + * values to empty strings. + * + * @private + * @param {*} value The value to process. + * @returns {string} Returns the string. + */ + function baseToString(value) { + // Exit early for strings to avoid a performance hit in some environments. + if (typeof value == 'string') { + return value; + } + if (isArray(value)) { + // Recursively convert values (susceptible to call stack limits). + return arrayMap(value, baseToString) + ''; + } + if (isSymbol(value)) { + return symbolToString ? symbolToString.call(value) : ''; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; + } - // or request stream http://github.com/mikeal/request - } else if (value.hasOwnProperty('httpModule')) { - // wait till response come back - value.on('response', function(response) { - value.pause(); - callback(null, +response.headers['content-length']); - }); - value.resume(); + /** + * The base implementation of `_.uniqBy` without support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. + */ + function baseUniq(array, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + length = array.length, + isCommon = true, + result = [], + seen = result; - // something else - } else { - callback('Unknown stream'); - } -}; + if (comparator) { + isCommon = false; + includes = arrayIncludesWith; + } + else if (length >= LARGE_ARRAY_SIZE) { + var set = iteratee ? null : createSet(array); + if (set) { + return setToArray(set); + } + isCommon = false; + includes = cacheHas; + seen = new SetCache; + } + else { + seen = iteratee ? [] : result; + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; -FormData.prototype._multiPartHeader = function(field, value, options) { - // custom header specified (as string)? - // it becomes responsible for boundary - // (e.g. to handle extra CRLFs on .NET servers) - if (typeof options.header == 'string') { - return options.header; - } + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var seenIndex = seen.length; + while (seenIndex--) { + if (seen[seenIndex] === computed) { + continue outer; + } + } + if (iteratee) { + seen.push(computed); + } + result.push(value); + } + else if (!includes(seen, computed, comparator)) { + if (seen !== result) { + seen.push(computed); + } + result.push(value); + } + } + return result; + } - var contentDisposition = this._getContentDisposition(value, options); - var contentType = this._getContentType(value, options); + /** + * The base implementation of `_.unset`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The property path to unset. + * @returns {boolean} Returns `true` if the property is deleted, else `false`. + */ + function baseUnset(object, path) { + path = castPath(path, object); + object = parent(object, path); + return object == null || delete object[toKey(last(path))]; + } - var contents = ''; - var headers = { - // add custom disposition as third element or keep it two elements if not - 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), - // if no content type. allow it to be empty array - 'Content-Type': [].concat(contentType || []) - }; + /** + * The base implementation of `_.update`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to update. + * @param {Function} updater The function to produce the updated value. + * @param {Function} [customizer] The function to customize path creation. + * @returns {Object} Returns `object`. + */ + function baseUpdate(object, path, updater, customizer) { + return baseSet(object, path, updater(baseGet(object, path)), customizer); + } - // allow custom headers. - if (typeof options.header == 'object') { - populate(headers, options.header); - } + /** + * The base implementation of methods like `_.dropWhile` and `_.takeWhile` + * without support for iteratee shorthands. + * + * @private + * @param {Array} array The array to query. + * @param {Function} predicate The function invoked per iteration. + * @param {boolean} [isDrop] Specify dropping elements instead of taking them. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Array} Returns the slice of `array`. + */ + function baseWhile(array, predicate, isDrop, fromRight) { + var length = array.length, + index = fromRight ? length : -1; - var header; - for (var prop in headers) { - if (!headers.hasOwnProperty(prop)) continue; - header = headers[prop]; + while ((fromRight ? index-- : ++index < length) && + predicate(array[index], index, array)) {} - // skip nullish headers. - if (header == null) { - continue; + return isDrop + ? baseSlice(array, (fromRight ? 0 : index), (fromRight ? index + 1 : length)) + : baseSlice(array, (fromRight ? index + 1 : 0), (fromRight ? length : index)); } - // convert all headers to arrays. - if (!Array.isArray(header)) { - header = [header]; + /** + * The base implementation of `wrapperValue` which returns the result of + * performing a sequence of actions on the unwrapped `value`, where each + * successive action is supplied the return value of the previous. + * + * @private + * @param {*} value The unwrapped value. + * @param {Array} actions Actions to perform to resolve the unwrapped value. + * @returns {*} Returns the resolved value. + */ + function baseWrapperValue(value, actions) { + var result = value; + if (result instanceof LazyWrapper) { + result = result.value(); + } + return arrayReduce(actions, function(result, action) { + return action.func.apply(action.thisArg, arrayPush([result], action.args)); + }, result); } - // add non-empty headers. - if (header.length) { - contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; - } - } + /** + * The base implementation of methods like `_.xor`, without support for + * iteratee shorthands, that accepts an array of arrays to inspect. + * + * @private + * @param {Array} arrays The arrays to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of values. + */ + function baseXor(arrays, iteratee, comparator) { + var length = arrays.length; + if (length < 2) { + return length ? baseUniq(arrays[0]) : []; + } + var index = -1, + result = Array(length); - return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; -}; + while (++index < length) { + var array = arrays[index], + othIndex = -1; -FormData.prototype._getContentDisposition = function(value, options) { + while (++othIndex < length) { + if (othIndex != index) { + result[index] = baseDifference(result[index] || array, arrays[othIndex], iteratee, comparator); + } + } + } + return baseUniq(baseFlatten(result, 1), iteratee, comparator); + } - var filename - , contentDisposition - ; + /** + * This base implementation of `_.zipObject` which assigns values using `assignFunc`. + * + * @private + * @param {Array} props The property identifiers. + * @param {Array} values The property values. + * @param {Function} assignFunc The function to assign values. + * @returns {Object} Returns the new object. + */ + function baseZipObject(props, values, assignFunc) { + var index = -1, + length = props.length, + valsLength = values.length, + result = {}; - if (typeof options.filepath === 'string') { - // custom filepath for relative paths - filename = path.normalize(options.filepath).replace(/\\/g, '/'); - } else if (options.filename || value.name || value.path) { - // custom filename take precedence - // formidable and the browser add a name property - // fs- and request- streams have path property - filename = path.basename(options.filename || value.name || value.path); - } else if (value.readable && value.hasOwnProperty('httpVersion')) { - // or try http response - filename = path.basename(value.client._httpMessage.path || ''); - } + while (++index < length) { + var value = index < valsLength ? values[index] : undefined; + assignFunc(result, props[index], value); + } + return result; + } - if (filename) { - contentDisposition = 'filename="' + filename + '"'; - } + /** + * Casts `value` to an empty array if it's not an array like object. + * + * @private + * @param {*} value The value to inspect. + * @returns {Array|Object} Returns the cast array-like object. + */ + function castArrayLikeObject(value) { + return isArrayLikeObject(value) ? value : []; + } - return contentDisposition; -}; + /** + * Casts `value` to `identity` if it's not a function. + * + * @private + * @param {*} value The value to inspect. + * @returns {Function} Returns cast function. + */ + function castFunction(value) { + return typeof value == 'function' ? value : identity; + } -FormData.prototype._getContentType = function(value, options) { + /** + * Casts `value` to a path array if it's not one. + * + * @private + * @param {*} value The value to inspect. + * @param {Object} [object] The object to query keys on. + * @returns {Array} Returns the cast property path array. + */ + function castPath(value, object) { + if (isArray(value)) { + return value; + } + return isKey(value, object) ? [value] : stringToPath(toString(value)); + } - // use custom content-type above all - var contentType = options.contentType; + /** + * A `baseRest` alias which can be replaced with `identity` by module + * replacement plugins. + * + * @private + * @type {Function} + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ + var castRest = baseRest; - // or try `name` from formidable, browser - if (!contentType && value.name) { - contentType = mime.lookup(value.name); - } + /** + * Casts `array` to a slice if it's needed. + * + * @private + * @param {Array} array The array to inspect. + * @param {number} start The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the cast slice. + */ + function castSlice(array, start, end) { + var length = array.length; + end = end === undefined ? length : end; + return (!start && end >= length) ? array : baseSlice(array, start, end); + } - // or try `path` from fs-, request- streams - if (!contentType && value.path) { - contentType = mime.lookup(value.path); - } + /** + * A simple wrapper around the global [`clearTimeout`](https://mdn.io/clearTimeout). + * + * @private + * @param {number|Object} id The timer id or timeout object of the timer to clear. + */ + var clearTimeout = ctxClearTimeout || function(id) { + return root.clearTimeout(id); + }; - // or if it's http-reponse - if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { - contentType = value.headers['content-type']; - } + /** + * Creates a clone of `buffer`. + * + * @private + * @param {Buffer} buffer The buffer to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Buffer} Returns the cloned buffer. + */ + function cloneBuffer(buffer, isDeep) { + if (isDeep) { + return buffer.slice(); + } + var length = buffer.length, + result = allocUnsafe ? allocUnsafe(length) : new buffer.constructor(length); - // or guess it from the filepath or filename - if (!contentType && (options.filepath || options.filename)) { - contentType = mime.lookup(options.filepath || options.filename); - } + buffer.copy(result); + return result; + } - // fallback to the default content type if `value` is not simple value - if (!contentType && typeof value == 'object') { - contentType = FormData.DEFAULT_CONTENT_TYPE; - } + /** + * Creates a clone of `arrayBuffer`. + * + * @private + * @param {ArrayBuffer} arrayBuffer The array buffer to clone. + * @returns {ArrayBuffer} Returns the cloned array buffer. + */ + function cloneArrayBuffer(arrayBuffer) { + var result = new arrayBuffer.constructor(arrayBuffer.byteLength); + new Uint8Array(result).set(new Uint8Array(arrayBuffer)); + return result; + } - return contentType; -}; + /** + * Creates a clone of `dataView`. + * + * @private + * @param {Object} dataView The data view to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the cloned data view. + */ + function cloneDataView(dataView, isDeep) { + var buffer = isDeep ? cloneArrayBuffer(dataView.buffer) : dataView.buffer; + return new dataView.constructor(buffer, dataView.byteOffset, dataView.byteLength); + } -FormData.prototype._multiPartFooter = function() { - return function(next) { - var footer = FormData.LINE_BREAK; + /** + * Creates a clone of `regexp`. + * + * @private + * @param {Object} regexp The regexp to clone. + * @returns {Object} Returns the cloned regexp. + */ + function cloneRegExp(regexp) { + var result = new regexp.constructor(regexp.source, reFlags.exec(regexp)); + result.lastIndex = regexp.lastIndex; + return result; + } - var lastPart = (this._streams.length === 0); - if (lastPart) { - footer += this._lastBoundary(); + /** + * Creates a clone of the `symbol` object. + * + * @private + * @param {Object} symbol The symbol object to clone. + * @returns {Object} Returns the cloned symbol object. + */ + function cloneSymbol(symbol) { + return symbolValueOf ? Object(symbolValueOf.call(symbol)) : {}; } - next(footer); - }.bind(this); -}; + /** + * Creates a clone of `typedArray`. + * + * @private + * @param {Object} typedArray The typed array to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the cloned typed array. + */ + function cloneTypedArray(typedArray, isDeep) { + var buffer = isDeep ? cloneArrayBuffer(typedArray.buffer) : typedArray.buffer; + return new typedArray.constructor(buffer, typedArray.byteOffset, typedArray.length); + } -FormData.prototype._lastBoundary = function() { - return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; -}; + /** + * Compares values to sort them in ascending order. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {number} Returns the sort order indicator for `value`. + */ + function compareAscending(value, other) { + if (value !== other) { + var valIsDefined = value !== undefined, + valIsNull = value === null, + valIsReflexive = value === value, + valIsSymbol = isSymbol(value); -FormData.prototype.getHeaders = function(userHeaders) { - var header; - var formHeaders = { - 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() - }; + var othIsDefined = other !== undefined, + othIsNull = other === null, + othIsReflexive = other === other, + othIsSymbol = isSymbol(other); - for (header in userHeaders) { - if (userHeaders.hasOwnProperty(header)) { - formHeaders[header.toLowerCase()] = userHeaders[header]; + if ((!othIsNull && !othIsSymbol && !valIsSymbol && value > other) || + (valIsSymbol && othIsDefined && othIsReflexive && !othIsNull && !othIsSymbol) || + (valIsNull && othIsDefined && othIsReflexive) || + (!valIsDefined && othIsReflexive) || + !valIsReflexive) { + return 1; + } + if ((!valIsNull && !valIsSymbol && !othIsSymbol && value < other) || + (othIsSymbol && valIsDefined && valIsReflexive && !valIsNull && !valIsSymbol) || + (othIsNull && valIsDefined && valIsReflexive) || + (!othIsDefined && valIsReflexive) || + !othIsReflexive) { + return -1; + } + } + return 0; } - } - - return formHeaders; -}; -FormData.prototype.setBoundary = function(boundary) { - this._boundary = boundary; -}; + /** + * Used by `_.orderBy` to compare multiple properties of a value to another + * and stable sort them. + * + * If `orders` is unspecified, all values are sorted in ascending order. Otherwise, + * specify an order of "desc" for descending or "asc" for ascending sort order + * of corresponding values. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {boolean[]|string[]} orders The order to sort by for each property. + * @returns {number} Returns the sort order indicator for `object`. + */ + function compareMultiple(object, other, orders) { + var index = -1, + objCriteria = object.criteria, + othCriteria = other.criteria, + length = objCriteria.length, + ordersLength = orders.length; -FormData.prototype.getBoundary = function() { - if (!this._boundary) { - this._generateBoundary(); - } + while (++index < length) { + var result = compareAscending(objCriteria[index], othCriteria[index]); + if (result) { + if (index >= ordersLength) { + return result; + } + var order = orders[index]; + return result * (order == 'desc' ? -1 : 1); + } + } + // Fixes an `Array#sort` bug in the JS engine embedded in Adobe applications + // that causes it, under certain circumstances, to provide the same value for + // `object` and `other`. See https://github.com/jashkenas/underscore/pull/1247 + // for more details. + // + // This also ensures a stable sort in V8 and other engines. + // See https://bugs.chromium.org/p/v8/issues/detail?id=90 for more details. + return object.index - other.index; + } - return this._boundary; -}; + /** + * Creates an array that is the composition of partially applied arguments, + * placeholders, and provided arguments into a single array of arguments. + * + * @private + * @param {Array} args The provided arguments. + * @param {Array} partials The arguments to prepend to those provided. + * @param {Array} holders The `partials` placeholder indexes. + * @params {boolean} [isCurried] Specify composing for a curried function. + * @returns {Array} Returns the new array of composed arguments. + */ + function composeArgs(args, partials, holders, isCurried) { + var argsIndex = -1, + argsLength = args.length, + holdersLength = holders.length, + leftIndex = -1, + leftLength = partials.length, + rangeLength = nativeMax(argsLength - holdersLength, 0), + result = Array(leftLength + rangeLength), + isUncurried = !isCurried; -FormData.prototype.getBuffer = function() { - var dataBuffer = new Buffer.alloc( 0 ); - var boundary = this.getBoundary(); + while (++leftIndex < leftLength) { + result[leftIndex] = partials[leftIndex]; + } + while (++argsIndex < holdersLength) { + if (isUncurried || argsIndex < argsLength) { + result[holders[argsIndex]] = args[argsIndex]; + } + } + while (rangeLength--) { + result[leftIndex++] = args[argsIndex++]; + } + return result; + } - // Create the form content. Add Line breaks to the end of data. - for (var i = 0, len = this._streams.length; i < len; i++) { - if (typeof this._streams[i] !== 'function') { + /** + * This function is like `composeArgs` except that the arguments composition + * is tailored for `_.partialRight`. + * + * @private + * @param {Array} args The provided arguments. + * @param {Array} partials The arguments to append to those provided. + * @param {Array} holders The `partials` placeholder indexes. + * @params {boolean} [isCurried] Specify composing for a curried function. + * @returns {Array} Returns the new array of composed arguments. + */ + function composeArgsRight(args, partials, holders, isCurried) { + var argsIndex = -1, + argsLength = args.length, + holdersIndex = -1, + holdersLength = holders.length, + rightIndex = -1, + rightLength = partials.length, + rangeLength = nativeMax(argsLength - holdersLength, 0), + result = Array(rangeLength + rightLength), + isUncurried = !isCurried; - // Add content to the buffer. - if(Buffer.isBuffer(this._streams[i])) { - dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); - }else { - dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + while (++argsIndex < rangeLength) { + result[argsIndex] = args[argsIndex]; + } + var offset = argsIndex; + while (++rightIndex < rightLength) { + result[offset + rightIndex] = partials[rightIndex]; + } + while (++holdersIndex < holdersLength) { + if (isUncurried || argsIndex < argsLength) { + result[offset + holders[holdersIndex]] = args[argsIndex++]; + } } + return result; + } + + /** + * Copies the values of `source` to `array`. + * + * @private + * @param {Array} source The array to copy values from. + * @param {Array} [array=[]] The array to copy values to. + * @returns {Array} Returns `array`. + */ + function copyArray(source, array) { + var index = -1, + length = source.length; - // Add break after content. - if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { - dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + array || (array = Array(length)); + while (++index < length) { + array[index] = source[index]; } + return array; } - } - // Add the footer and return the Buffer object. - return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); -}; + /** + * Copies properties of `source` to `object`. + * + * @private + * @param {Object} source The object to copy properties from. + * @param {Array} props The property identifiers to copy. + * @param {Object} [object={}] The object to copy properties to. + * @param {Function} [customizer] The function to customize copied values. + * @returns {Object} Returns `object`. + */ + function copyObject(source, props, object, customizer) { + var isNew = !object; + object || (object = {}); -FormData.prototype._generateBoundary = function() { - // This generates a 50 character boundary similar to those used by Firefox. - // They are optimized for boyer-moore parsing. - var boundary = '--------------------------'; - for (var i = 0; i < 24; i++) { - boundary += Math.floor(Math.random() * 10).toString(16); - } + var index = -1, + length = props.length; - this._boundary = boundary; -}; + while (++index < length) { + var key = props[index]; -// Note: getLengthSync DOESN'T calculate streams length -// As workaround one can calculate file size manually -// and add it as knownLength option -FormData.prototype.getLengthSync = function() { - var knownLength = this._overheadLength + this._valueLength; + var newValue = customizer + ? customizer(object[key], source[key], key, object, source) + : undefined; - // Don't get confused, there are 3 "internal" streams for each keyval pair - // so it basically checks if there is any value added to the form - if (this._streams.length) { - knownLength += this._lastBoundary().length; - } + if (newValue === undefined) { + newValue = source[key]; + } + if (isNew) { + baseAssignValue(object, key, newValue); + } else { + assignValue(object, key, newValue); + } + } + return object; + } - // https://github.com/form-data/form-data/issues/40 - if (!this.hasKnownLength()) { - // Some async length retrievers are present - // therefore synchronous length calculation is false. - // Please use getLength(callback) to get proper length - this._error(new Error('Cannot calculate proper length in synchronous way.')); - } + /** + * Copies own symbols of `source` to `object`. + * + * @private + * @param {Object} source The object to copy symbols from. + * @param {Object} [object={}] The object to copy symbols to. + * @returns {Object} Returns `object`. + */ + function copySymbols(source, object) { + return copyObject(source, getSymbols(source), object); + } - return knownLength; -}; + /** + * Copies own and inherited symbols of `source` to `object`. + * + * @private + * @param {Object} source The object to copy symbols from. + * @param {Object} [object={}] The object to copy symbols to. + * @returns {Object} Returns `object`. + */ + function copySymbolsIn(source, object) { + return copyObject(source, getSymbolsIn(source), object); + } -// Public API to check if length of added values is known -// https://github.com/form-data/form-data/issues/196 -// https://github.com/form-data/form-data/issues/262 -FormData.prototype.hasKnownLength = function() { - var hasKnownLength = true; + /** + * Creates a function like `_.groupBy`. + * + * @private + * @param {Function} setter The function to set accumulator values. + * @param {Function} [initializer] The accumulator object initializer. + * @returns {Function} Returns the new aggregator function. + */ + function createAggregator(setter, initializer) { + return function(collection, iteratee) { + var func = isArray(collection) ? arrayAggregator : baseAggregator, + accumulator = initializer ? initializer() : {}; - if (this._valuesToMeasure.length) { - hasKnownLength = false; - } + return func(collection, setter, getIteratee(iteratee, 2), accumulator); + }; + } - return hasKnownLength; -}; + /** + * Creates a function like `_.assign`. + * + * @private + * @param {Function} assigner The function to assign values. + * @returns {Function} Returns the new assigner function. + */ + function createAssigner(assigner) { + return baseRest(function(object, sources) { + var index = -1, + length = sources.length, + customizer = length > 1 ? sources[length - 1] : undefined, + guard = length > 2 ? sources[2] : undefined; -FormData.prototype.getLength = function(cb) { - var knownLength = this._overheadLength + this._valueLength; + customizer = (assigner.length > 3 && typeof customizer == 'function') + ? (length--, customizer) + : undefined; - if (this._streams.length) { - knownLength += this._lastBoundary().length; - } + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + customizer = length < 3 ? undefined : customizer; + length = 1; + } + object = Object(object); + while (++index < length) { + var source = sources[index]; + if (source) { + assigner(object, source, index, customizer); + } + } + return object; + }); + } - if (!this._valuesToMeasure.length) { - process.nextTick(cb.bind(this, null, knownLength)); - return; - } + /** + * Creates a `baseEach` or `baseEachRight` function. + * + * @private + * @param {Function} eachFunc The function to iterate over a collection. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ + function createBaseEach(eachFunc, fromRight) { + return function(collection, iteratee) { + if (collection == null) { + return collection; + } + if (!isArrayLike(collection)) { + return eachFunc(collection, iteratee); + } + var length = collection.length, + index = fromRight ? length : -1, + iterable = Object(collection); - asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { - if (err) { - cb(err); - return; + while ((fromRight ? index-- : ++index < length)) { + if (iteratee(iterable[index], index, iterable) === false) { + break; + } + } + return collection; + }; } - values.forEach(function(length) { - knownLength += length; - }); - - cb(null, knownLength); - }); -}; + /** + * Creates a base function for methods like `_.forIn` and `_.forOwn`. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ + function createBaseFor(fromRight) { + return function(object, iteratee, keysFunc) { + var index = -1, + iterable = Object(object), + props = keysFunc(object), + length = props.length; -FormData.prototype.submit = function(params, cb) { - var request - , options - , defaults = {method: 'post'} - ; + while (length--) { + var key = props[fromRight ? length : ++index]; + if (iteratee(iterable[key], key, iterable) === false) { + break; + } + } + return object; + }; + } - // parse provided url if it's string - // or treat it as options object - if (typeof params == 'string') { + /** + * Creates a function that wraps `func` to invoke it with the optional `this` + * binding of `thisArg`. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} [thisArg] The `this` binding of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createBind(func, bitmask, thisArg) { + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); - params = parseUrl(params); - options = populate({ - port: params.port, - path: params.pathname, - host: params.hostname, - protocol: params.protocol - }, defaults); + function wrapper() { + var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + return fn.apply(isBind ? thisArg : this, arguments); + } + return wrapper; + } - // use custom params - } else { + /** + * Creates a function like `_.lowerFirst`. + * + * @private + * @param {string} methodName The name of the `String` case method to use. + * @returns {Function} Returns the new case function. + */ + function createCaseFirst(methodName) { + return function(string) { + string = toString(string); - options = populate(params, defaults); - // if no port provided use default one - if (!options.port) { - options.port = options.protocol == 'https:' ? 443 : 80; - } - } + var strSymbols = hasUnicode(string) + ? stringToArray(string) + : undefined; - // put that good code in getHeaders to some use - options.headers = this.getHeaders(params.headers); + var chr = strSymbols + ? strSymbols[0] + : string.charAt(0); - // https if specified, fallback to http in any other case - if (options.protocol == 'https:') { - request = https.request(options); - } else { - request = http.request(options); - } + var trailing = strSymbols + ? castSlice(strSymbols, 1).join('') + : string.slice(1); - // get content length and fire away - this.getLength(function(err, length) { - if (err && err !== 'Unknown stream') { - this._error(err); - return; + return chr[methodName]() + trailing; + }; } - // add content length - if (length) { - request.setHeader('Content-Length', length); + /** + * Creates a function like `_.camelCase`. + * + * @private + * @param {Function} callback The function to combine each word. + * @returns {Function} Returns the new compounder function. + */ + function createCompounder(callback) { + return function(string) { + return arrayReduce(words(deburr(string).replace(reApos, '')), callback, ''); + }; } - this.pipe(request); - if (cb) { - var onResponse; - - var callback = function (error, responce) { - request.removeListener('error', callback); - request.removeListener('response', onResponse); + /** + * Creates a function that produces an instance of `Ctor` regardless of + * whether it was invoked as part of a `new` expression or by `call` or `apply`. + * + * @private + * @param {Function} Ctor The constructor to wrap. + * @returns {Function} Returns the new wrapped function. + */ + function createCtor(Ctor) { + return function() { + // Use a `switch` statement to work with class constructors. See + // http://ecma-international.org/ecma-262/7.0/#sec-ecmascript-function-objects-call-thisargument-argumentslist + // for more details. + var args = arguments; + switch (args.length) { + case 0: return new Ctor; + case 1: return new Ctor(args[0]); + case 2: return new Ctor(args[0], args[1]); + case 3: return new Ctor(args[0], args[1], args[2]); + case 4: return new Ctor(args[0], args[1], args[2], args[3]); + case 5: return new Ctor(args[0], args[1], args[2], args[3], args[4]); + case 6: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5]); + case 7: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5], args[6]); + } + var thisBinding = baseCreate(Ctor.prototype), + result = Ctor.apply(thisBinding, args); - return cb.call(this, error, responce); + // Mimic the constructor's `return` behavior. + // See https://es5.github.io/#x13.2.2 for more details. + return isObject(result) ? result : thisBinding; }; - - onResponse = callback.bind(this, null); - - request.on('error', callback); - request.on('response', onResponse); } - }.bind(this)); - - return request; -}; - -FormData.prototype._error = function(err) { - if (!this.error) { - this.error = err; - this.pause(); - this.emit('error', err); - } -}; - -FormData.prototype.toString = function () { - return '[object FormData]'; -}; + /** + * Creates a function that wraps `func` to enable currying. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {number} arity The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createCurry(func, bitmask, arity) { + var Ctor = createCtor(func); -/***/ }), - -/***/ 95155: -/***/ ((module) => { - -// populates missing values -module.exports = function(dst, src) { - - Object.keys(src).forEach(function(prop) - { - dst[prop] = dst[prop] || src[prop]; - }); + function wrapper() { + var length = arguments.length, + args = Array(length), + index = length, + placeholder = getHolder(wrapper); - return dst; -}; + while (index--) { + args[index] = arguments[index]; + } + var holders = (length < 3 && args[0] !== placeholder && args[length - 1] !== placeholder) + ? [] + : replaceHolders(args, placeholder); + length -= holders.length; + if (length < arity) { + return createRecurry( + func, bitmask, createHybrid, wrapper.placeholder, undefined, + args, holders, undefined, undefined, arity - length); + } + var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + return apply(fn, this, args); + } + return wrapper; + } -/***/ }), + /** + * Creates a `_.find` or `_.findLast` function. + * + * @private + * @param {Function} findIndexFunc The function to find the collection index. + * @returns {Function} Returns the new find function. + */ + function createFind(findIndexFunc) { + return function(collection, predicate, fromIndex) { + var iterable = Object(collection); + if (!isArrayLike(collection)) { + var iteratee = getIteratee(predicate, 3); + collection = keys(collection); + predicate = function(key) { return iteratee(iterable[key], key, iterable); }; + } + var index = findIndexFunc(collection, predicate, fromIndex); + return index > -1 ? iterable[iteratee ? collection[index] : index] : undefined; + }; + } -/***/ 31621: -/***/ ((module) => { + /** + * Creates a `_.flow` or `_.flowRight` function. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new flow function. + */ + function createFlow(fromRight) { + return flatRest(function(funcs) { + var length = funcs.length, + index = length, + prereq = LodashWrapper.prototype.thru; -"use strict"; + if (fromRight) { + funcs.reverse(); + } + while (index--) { + var func = funcs[index]; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + if (prereq && !wrapper && getFuncName(func) == 'wrapper') { + var wrapper = new LodashWrapper([], true); + } + } + index = wrapper ? index : length; + while (++index < length) { + func = funcs[index]; + var funcName = getFuncName(func), + data = funcName == 'wrapper' ? getData(func) : undefined; -module.exports = (flag, argv = process.argv) => { - const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); - const position = argv.indexOf(prefix + flag); - const terminatorPosition = argv.indexOf('--'); - return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); -}; + if (data && isLaziable(data[0]) && + data[1] == (WRAP_ARY_FLAG | WRAP_CURRY_FLAG | WRAP_PARTIAL_FLAG | WRAP_REARG_FLAG) && + !data[4].length && data[9] == 1 + ) { + wrapper = wrapper[getFuncName(data[0])].apply(wrapper, data[3]); + } else { + wrapper = (func.length == 1 && isLaziable(func)) + ? wrapper[funcName]() + : wrapper.thru(func); + } + } + return function() { + var args = arguments, + value = args[0]; + if (wrapper && args.length == 1 && isArray(value)) { + return wrapper.plant(value).value(); + } + var index = 0, + result = length ? funcs[index].apply(this, args) : value; -/***/ }), + while (++index < length) { + result = funcs[index].call(this, result); + } + return result; + }; + }); + } -/***/ 77492: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + /** + * Creates a function that wraps `func` to invoke it with optional `this` + * binding of `thisArg`, partial application, and currying. + * + * @private + * @param {Function|string} func The function or method name to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to prepend to those provided to + * the new function. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [partialsRight] The arguments to append to those provided + * to the new function. + * @param {Array} [holdersRight] The `partialsRight` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createHybrid(func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, argPos, ary, arity) { + var isAry = bitmask & WRAP_ARY_FLAG, + isBind = bitmask & WRAP_BIND_FLAG, + isBindKey = bitmask & WRAP_BIND_KEY_FLAG, + isCurried = bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG), + isFlip = bitmask & WRAP_FLIP_FLAG, + Ctor = isBindKey ? undefined : createCtor(func); -"use strict"; + function wrapper() { + var length = arguments.length, + args = Array(length), + index = length; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const net_1 = __importDefault(__nccwpck_require__(41808)); -const tls_1 = __importDefault(__nccwpck_require__(24404)); -const url_1 = __importDefault(__nccwpck_require__(57310)); -const debug_1 = __importDefault(__nccwpck_require__(38237)); -const once_1 = __importDefault(__nccwpck_require__(81040)); -const agent_base_1 = __nccwpck_require__(49690); -const debug = (0, debug_1.default)('http-proxy-agent'); -function isHTTPS(protocol) { - return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; -} -/** - * The `HttpProxyAgent` implements an HTTP Agent subclass that connects - * to the specified "HTTP proxy server" in order to proxy HTTP requests. - * - * @api public - */ -class HttpProxyAgent extends agent_base_1.Agent { - constructor(_opts) { - let opts; - if (typeof _opts === 'string') { - opts = url_1.default.parse(_opts); + while (index--) { + args[index] = arguments[index]; } - else { - opts = _opts; + if (isCurried) { + var placeholder = getHolder(wrapper), + holdersCount = countHolders(args, placeholder); } - if (!opts) { - throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + if (partials) { + args = composeArgs(args, partials, holders, isCurried); } - debug('Creating new HttpProxyAgent instance: %o', opts); - super(opts); - const proxy = Object.assign({}, opts); - // If `true`, then connect to the proxy server over TLS. - // Defaults to `false`. - this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); - // Prefer `hostname` over `host`, and set the `port` if needed. - proxy.host = proxy.hostname || proxy.host; - if (typeof proxy.port === 'string') { - proxy.port = parseInt(proxy.port, 10); + if (partialsRight) { + args = composeArgsRight(args, partialsRight, holdersRight, isCurried); } - if (!proxy.port && proxy.host) { - proxy.port = this.secureProxy ? 443 : 80; + length -= holdersCount; + if (isCurried && length < arity) { + var newHolders = replaceHolders(args, placeholder); + return createRecurry( + func, bitmask, createHybrid, wrapper.placeholder, thisArg, + args, newHolders, argPos, ary, arity - length + ); } - if (proxy.host && proxy.path) { - // If both a `host` and `path` are specified then it's most likely - // the result of a `url.parse()` call... we need to remove the - // `path` portion so that `net.connect()` doesn't attempt to open - // that as a Unix socket file. - delete proxy.path; - delete proxy.pathname; + var thisBinding = isBind ? thisArg : this, + fn = isBindKey ? thisBinding[func] : func; + + length = args.length; + if (argPos) { + args = reorder(args, argPos); + } else if (isFlip && length > 1) { + args.reverse(); } - this.proxy = proxy; + if (isAry && ary < length) { + args.length = ary; + } + if (this && this !== root && this instanceof wrapper) { + fn = Ctor || createCtor(fn); + } + return fn.apply(thisBinding, args); + } + return wrapper; } + /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. + * Creates a function like `_.invertBy`. * - * @api protected + * @private + * @param {Function} setter The function to set accumulator values. + * @param {Function} toIteratee The function to resolve iteratees. + * @returns {Function} Returns the new inverter function. */ - callback(req, opts) { - return __awaiter(this, void 0, void 0, function* () { - const { proxy, secureProxy } = this; - const parsed = url_1.default.parse(req.path); - if (!parsed.protocol) { - parsed.protocol = 'http:'; - } - if (!parsed.hostname) { - parsed.hostname = opts.hostname || opts.host || null; - } - if (parsed.port == null && typeof opts.port) { - parsed.port = String(opts.port); - } - if (parsed.port === '80') { - // if port is 80, then we can remove the port so that the - // ":80" portion is not on the produced URL - parsed.port = ''; - } - // Change the `http.ClientRequest` instance's "path" field - // to the absolute path of the URL that will be requested. - req.path = url_1.default.format(parsed); - // Inject the `Proxy-Authorization` header if necessary. - if (proxy.auth) { - req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); - } - // Create a socket connection to the proxy server. - let socket; - if (secureProxy) { - debug('Creating `tls.Socket`: %o', proxy); - socket = tls_1.default.connect(proxy); - } - else { - debug('Creating `net.Socket`: %o', proxy); - socket = net_1.default.connect(proxy); - } - // At this point, the http ClientRequest's internal `_header` field - // might have already been set. If this is the case then we'll need - // to re-generate the string since we just changed the `req.path`. - if (req._header) { - let first; - let endOfHeaders; - debug('Regenerating stored HTTP header string for request'); - req._header = null; - req._implicitHeader(); - if (req.output && req.output.length > 0) { - // Node < 12 - debug('Patching connection write() output buffer with updated header'); - first = req.output[0]; - endOfHeaders = first.indexOf('\r\n\r\n') + 4; - req.output[0] = req._header + first.substring(endOfHeaders); - debug('Output buffer: %o', req.output); - } - else if (req.outputData && req.outputData.length > 0) { - // Node >= 12 - debug('Patching connection write() output buffer with updated header'); - first = req.outputData[0].data; - endOfHeaders = first.indexOf('\r\n\r\n') + 4; - req.outputData[0].data = - req._header + first.substring(endOfHeaders); - debug('Output buffer: %o', req.outputData[0].data); - } - } - // Wait for the socket's `connect` event, so that this `callback()` - // function throws instead of the `http` request machinery. This is - // important for i.e. `PacProxyAgent` which determines a failed proxy - // connection via the `callback()` function throwing. - yield (0, once_1.default)(socket, 'connect'); - return socket; - }); + function createInverter(setter, toIteratee) { + return function(object, iteratee) { + return baseInverter(object, setter, toIteratee(iteratee), {}); + }; } -} -exports["default"] = HttpProxyAgent; -//# sourceMappingURL=agent.js.map - -/***/ }), - -/***/ 23764: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { - -"use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const agent_1 = __importDefault(__nccwpck_require__(77492)); -function createHttpProxyAgent(opts) { - return new agent_1.default(opts); -} -(function (createHttpProxyAgent) { - createHttpProxyAgent.HttpProxyAgent = agent_1.default; - createHttpProxyAgent.prototype = agent_1.default.prototype; -})(createHttpProxyAgent || (createHttpProxyAgent = {})); -module.exports = createHttpProxyAgent; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 15098: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const net_1 = __importDefault(__nccwpck_require__(41808)); -const tls_1 = __importDefault(__nccwpck_require__(24404)); -const url_1 = __importDefault(__nccwpck_require__(57310)); -const assert_1 = __importDefault(__nccwpck_require__(39491)); -const debug_1 = __importDefault(__nccwpck_require__(38237)); -const agent_base_1 = __nccwpck_require__(49690); -const parse_proxy_response_1 = __importDefault(__nccwpck_require__(595)); -const debug = debug_1.default('https-proxy-agent:agent'); -/** - * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to - * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. - * - * Outgoing HTTP requests are first tunneled through the proxy server using the - * `CONNECT` HTTP request method to establish a connection to the proxy server, - * and then the proxy server connects to the destination target and issues the - * HTTP request from the proxy server. - * - * `https:` requests have their socket connection upgraded to TLS once - * the connection to the proxy server has been established. - * - * @api public - */ -class HttpsProxyAgent extends agent_base_1.Agent { - constructor(_opts) { - let opts; - if (typeof _opts === 'string') { - opts = url_1.default.parse(_opts); - } - else { - opts = _opts; - } - if (!opts) { - throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); - } - debug('creating new HttpsProxyAgent instance: %o', opts); - super(opts); - const proxy = Object.assign({}, opts); - // If `true`, then connect to the proxy server over TLS. - // Defaults to `false`. - this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); - // Prefer `hostname` over `host`, and set the `port` if needed. - proxy.host = proxy.hostname || proxy.host; - if (typeof proxy.port === 'string') { - proxy.port = parseInt(proxy.port, 10); - } - if (!proxy.port && proxy.host) { - proxy.port = this.secureProxy ? 443 : 80; + /** + * Creates a function that performs a mathematical operation on two values. + * + * @private + * @param {Function} operator The function to perform the operation. + * @param {number} [defaultValue] The value used for `undefined` arguments. + * @returns {Function} Returns the new mathematical operation function. + */ + function createMathOperation(operator, defaultValue) { + return function(value, other) { + var result; + if (value === undefined && other === undefined) { + return defaultValue; } - // ALPN is supported by Node.js >= v5. - // attempt to negotiate http/1.1 for proxy servers that support http/2 - if (this.secureProxy && !('ALPNProtocols' in proxy)) { - proxy.ALPNProtocols = ['http 1.1']; + if (value !== undefined) { + result = value; } - if (proxy.host && proxy.path) { - // If both a `host` and `path` are specified then it's most likely - // the result of a `url.parse()` call... we need to remove the - // `path` portion so that `net.connect()` doesn't attempt to open - // that as a Unix socket file. - delete proxy.path; - delete proxy.pathname; + if (other !== undefined) { + if (result === undefined) { + return other; + } + if (typeof value == 'string' || typeof other == 'string') { + value = baseToString(value); + other = baseToString(other); + } else { + value = baseToNumber(value); + other = baseToNumber(other); + } + result = operator(value, other); } - this.proxy = proxy; + return result; + }; } + /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. + * Creates a function like `_.over`. * - * @api protected + * @private + * @param {Function} arrayFunc The function to iterate over iteratees. + * @returns {Function} Returns the new over function. */ - callback(req, opts) { - return __awaiter(this, void 0, void 0, function* () { - const { proxy, secureProxy } = this; - // Create a socket connection to the proxy server. - let socket; - if (secureProxy) { - debug('Creating `tls.Socket`: %o', proxy); - socket = tls_1.default.connect(proxy); - } - else { - debug('Creating `net.Socket`: %o', proxy); - socket = net_1.default.connect(proxy); - } - const headers = Object.assign({}, proxy.headers); - const hostname = `${opts.host}:${opts.port}`; - let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; - // Inject the `Proxy-Authorization` header if necessary. - if (proxy.auth) { - headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; - } - // The `Host` header should only include the port - // number when it is not the default port. - let { host, port, secureEndpoint } = opts; - if (!isDefaultPort(port, secureEndpoint)) { - host += `:${port}`; - } - headers.Host = host; - headers.Connection = 'close'; - for (const name of Object.keys(headers)) { - payload += `${name}: ${headers[name]}\r\n`; - } - const proxyResponsePromise = parse_proxy_response_1.default(socket); - socket.write(`${payload}\r\n`); - const { statusCode, buffered } = yield proxyResponsePromise; - if (statusCode === 200) { - req.once('socket', resume); - if (opts.secureEndpoint) { - // The proxy is connecting to a TLS server, so upgrade - // this socket connection to a TLS connection. - debug('Upgrading socket connection to TLS'); - const servername = opts.servername || opts.host; - return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, - servername })); - } - return socket; - } - // Some other status code that's not 200... need to re-play the HTTP - // header "data" events onto the socket once the HTTP machinery is - // attached so that the node core `http` can parse and handle the - // error status code. - // Close the original socket, and a new "fake" socket is returned - // instead, so that the proxy doesn't get the HTTP request - // written to it (which may contain `Authorization` headers or other - // sensitive data). - // - // See: https://hackerone.com/reports/541502 - socket.destroy(); - const fakeSocket = new net_1.default.Socket({ writable: false }); - fakeSocket.readable = true; - // Need to wait for the "socket" event to re-play the "data" events. - req.once('socket', (s) => { - debug('replaying proxy buffer for failed request'); - assert_1.default(s.listenerCount('data') > 0); - // Replay the "buffered" Buffer onto the fake `socket`, since at - // this point the HTTP module machinery has been hooked up for - // the user. - s.push(buffered); - s.push(null); - }); - return fakeSocket; + function createOver(arrayFunc) { + return flatRest(function(iteratees) { + iteratees = arrayMap(iteratees, baseUnary(getIteratee())); + return baseRest(function(args) { + var thisArg = this; + return arrayFunc(iteratees, function(iteratee) { + return apply(iteratee, thisArg, args); + }); }); + }); } -} -exports["default"] = HttpsProxyAgent; -function resume(socket) { - socket.resume(); -} -function isDefaultPort(port, secure) { - return Boolean((!secure && port === 80) || (secure && port === 443)); -} -function isHTTPS(protocol) { - return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; -} -function omit(obj, ...keys) { - const ret = {}; - let key; - for (key in obj) { - if (!keys.includes(key)) { - ret[key] = obj[key]; - } - } - return ret; -} -//# sourceMappingURL=agent.js.map - -/***/ }), - -/***/ 77219: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { - -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const agent_1 = __importDefault(__nccwpck_require__(15098)); -function createHttpsProxyAgent(opts) { - return new agent_1.default(opts); -} -(function (createHttpsProxyAgent) { - createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; - createHttpsProxyAgent.prototype = agent_1.default.prototype; -})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); -module.exports = createHttpsProxyAgent; -//# sourceMappingURL=index.js.map + /** + * Creates the padding for `string` based on `length`. The `chars` string + * is truncated if the number of characters exceeds `length`. + * + * @private + * @param {number} length The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padding for `string`. + */ + function createPadding(length, chars) { + chars = chars === undefined ? ' ' : baseToString(chars); -/***/ }), + var charsLength = chars.length; + if (charsLength < 2) { + return charsLength ? baseRepeat(chars, length) : chars; + } + var result = baseRepeat(chars, nativeCeil(length / stringSize(chars))); + return hasUnicode(chars) + ? castSlice(stringToArray(result), 0, length).join('') + : result.slice(0, length); + } -/***/ 595: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + /** + * Creates a function that wraps `func` to invoke it with the `this` binding + * of `thisArg` and `partials` prepended to the arguments it receives. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} partials The arguments to prepend to those provided to + * the new function. + * @returns {Function} Returns the new wrapped function. + */ + function createPartial(func, bitmask, thisArg, partials) { + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); -"use strict"; + function wrapper() { + var argsIndex = -1, + argsLength = arguments.length, + leftIndex = -1, + leftLength = partials.length, + args = Array(leftLength + argsLength), + fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const debug_1 = __importDefault(__nccwpck_require__(38237)); -const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); -function parseProxyResponse(socket) { - return new Promise((resolve, reject) => { - // we need to buffer any HTTP traffic that happens with the proxy before we get - // the CONNECT response, so that if the response is anything other than an "200" - // response code, then we can re-play the "data" events on the socket once the - // HTTP parser is hooked up... - let buffersLength = 0; - const buffers = []; - function read() { - const b = socket.read(); - if (b) - ondata(b); - else - socket.once('readable', read); - } - function cleanup() { - socket.removeListener('end', onend); - socket.removeListener('error', onerror); - socket.removeListener('close', onclose); - socket.removeListener('readable', read); - } - function onclose(err) { - debug('onclose had error %o', err); + while (++leftIndex < leftLength) { + args[leftIndex] = partials[leftIndex]; } - function onend() { - debug('onend'); + while (argsLength--) { + args[leftIndex++] = arguments[++argsIndex]; } - function onerror(err) { - cleanup(); - debug('onerror %o', err); - reject(err); + return apply(fn, isBind ? thisArg : this, args); + } + return wrapper; + } + + /** + * Creates a `_.range` or `_.rangeRight` function. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new range function. + */ + function createRange(fromRight) { + return function(start, end, step) { + if (step && typeof step != 'number' && isIterateeCall(start, end, step)) { + end = step = undefined; } - function ondata(b) { - buffers.push(b); - buffersLength += b.length; - const buffered = Buffer.concat(buffers, buffersLength); - const endOfHeaders = buffered.indexOf('\r\n\r\n'); - if (endOfHeaders === -1) { - // keep buffering - debug('have not received end of HTTP headers yet...'); - read(); - return; - } - const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); - const statusCode = +firstLine.split(' ')[1]; - debug('got proxy server response: %o', firstLine); - resolve({ - statusCode, - buffered - }); + // Ensure the sign of `-0` is preserved. + start = toFinite(start); + if (end === undefined) { + end = start; + start = 0; + } else { + end = toFinite(end); } - socket.on('error', onerror); - socket.on('close', onclose); - socket.on('end', onend); - read(); - }); -} -exports["default"] = parseProxyResponse; -//# sourceMappingURL=parse-proxy-response.js.map - -/***/ }), - -/***/ 98768: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const fs = __nccwpck_require__(57147); - -let isDocker; - -function hasDockerEnv() { - try { - fs.statSync('/.dockerenv'); - return true; - } catch (_) { - return false; - } -} - -function hasDockerCGroup() { - try { - return fs.readFileSync('/proc/self/cgroup', 'utf8').includes('docker'); - } catch (_) { - return false; - } -} - -module.exports = () => { - if (isDocker === undefined) { - isDocker = hasDockerEnv() || hasDockerCGroup(); - } - - return isDocker; -}; - - -/***/ }), - -/***/ 52559: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const os = __nccwpck_require__(22037); -const fs = __nccwpck_require__(57147); -const isDocker = __nccwpck_require__(98768); - -const isWsl = () => { - if (process.platform !== 'linux') { - return false; - } - - if (os.release().toLowerCase().includes('microsoft')) { - if (isDocker()) { - return false; - } - - return true; - } - - try { - return fs.readFileSync('/proc/version', 'utf8').toLowerCase().includes('microsoft') ? - !isDocker() : false; - } catch (_) { - return false; - } -}; - -if (process.env.__IS_WSL_TEST__) { - module.exports = isWsl; -} else { - module.exports = isWsl(); -} - - -/***/ }), - -/***/ 51778: -/***/ ((module) => { - -"use strict"; -class JSBI extends Array{constructor(i,_){if(super(i),this.sign=_,i>JSBI.__kMaxLength)throw new RangeError("Maximum BigInt size exceeded")}static BigInt(i){var _=Math.floor,t=Number.isFinite;if("number"==typeof i){if(0===i)return JSBI.__zero();if(JSBI.__isOneDigitInt(i))return 0>i?JSBI.__oneDigit(-i,!0):JSBI.__oneDigit(i,!1);if(!t(i)||_(i)!==i)throw new RangeError("The number "+i+" cannot be converted to BigInt because it is not an integer");return JSBI.__fromDouble(i)}if("string"==typeof i){const _=JSBI.__fromString(i);if(null===_)throw new SyntaxError("Cannot convert "+i+" to a BigInt");return _}if("boolean"==typeof i)return!0===i?JSBI.__oneDigit(1,!1):JSBI.__zero();if("object"==typeof i){if(i.constructor===JSBI)return i;const _=JSBI.__toPrimitive(i);return JSBI.BigInt(_)}throw new TypeError("Cannot convert "+i+" to a BigInt")}toDebugString(){const i=["BigInt["];for(const _ of this)i.push((_?(_>>>0).toString(16):_)+", ");return i.push("]"),i.join("")}toString(i=10){if(2>i||36>>=12;const a=l-12;let u=12<=l?0:o<<20+l,d=20+l;for(0>>30-a,u=o<>>30-d,d-=30;const h=JSBI.__decideRounding(i,d,s,o);if((1===h||0===h&&1==(1&u))&&(u=u+1>>>0,0===u&&(r++,0!=r>>>20&&(r=0,g++,1023=JSBI.__kMaxLengthBits)throw new RangeError("BigInt too big");if(1===i.length&&2===i.__digit(0)){const _=1+(0|t/30),e=i.sign&&0!=(1&t),n=new JSBI(_,e);n.__initializeDigits();const g=1<>=1;0!==t;t>>=1)n=JSBI.multiply(n,n),0!=(1&t)&&(null===e?e=n:e=JSBI.multiply(e,n));return e}static multiply(_,t){if(0===_.length)return _;if(0===t.length)return t;let i=_.length+t.length;30<=_.__clzmsd()+t.__clzmsd()&&i--;const e=new JSBI(i,_.sign!==t.sign);e.__initializeDigits();for(let n=0;n<_.length;n++)JSBI.__multiplyAccumulate(t,_.__digit(n),e,n);return e.__trim()}static divide(i,_){if(0===_.length)throw new RangeError("Division by zero");if(0>JSBI.__absoluteCompare(i,_))return JSBI.__zero();const t=i.sign!==_.sign,e=_.__unsignedDigit(0);let n;if(1===_.length&&32767>=e){if(1===e)return t===i.sign?i:JSBI.unaryMinus(i);n=JSBI.__absoluteDivSmall(i,e,null)}else n=JSBI.__absoluteDivLarge(i,_,!0,!1);return n.sign=t,n.__trim()}static remainder(i,_){if(0===_.length)throw new RangeError("Division by zero");if(0>JSBI.__absoluteCompare(i,_))return i;const t=_.__unsignedDigit(0);if(1===_.length&&32767>=t){if(1===t)return JSBI.__zero();const _=JSBI.__absoluteModSmall(i,t);return 0===_?JSBI.__zero():JSBI.__oneDigit(_,i.sign)}const e=JSBI.__absoluteDivLarge(i,_,!1,!0);return e.sign=i.sign,e.__trim()}static add(i,_){const t=i.sign;return t===_.sign?JSBI.__absoluteAdd(i,_,t):0<=JSBI.__absoluteCompare(i,_)?JSBI.__absoluteSub(i,_,t):JSBI.__absoluteSub(_,i,!t)}static subtract(i,_){const t=i.sign;return t===_.sign?0<=JSBI.__absoluteCompare(i,_)?JSBI.__absoluteSub(i,_,t):JSBI.__absoluteSub(_,i,!t):JSBI.__absoluteAdd(i,_,t)}static leftShift(i,_){return 0===_.length||0===i.length?i:_.sign?JSBI.__rightShiftByAbsolute(i,_):JSBI.__leftShiftByAbsolute(i,_)}static signedRightShift(i,_){return 0===_.length||0===i.length?i:_.sign?JSBI.__leftShiftByAbsolute(i,_):JSBI.__rightShiftByAbsolute(i,_)}static unsignedRightShift(){throw new TypeError("BigInts have no unsigned right shift; use >> instead")}static lessThan(i,_){return 0>JSBI.__compareToBigInt(i,_)}static lessThanOrEqual(i,_){return 0>=JSBI.__compareToBigInt(i,_)}static greaterThan(i,_){return 0_)throw new RangeError("Invalid value: not (convertible to) a safe integer");if(0===_)return JSBI.__zero();if(_>=JSBI.__kMaxLengthBits)return t;const e=0|(_+29)/30;if(t.lengthi)throw new RangeError("Invalid value: not (convertible to) a safe integer");if(0===i)return JSBI.__zero();if(_.sign){if(i>JSBI.__kMaxLengthBits)throw new RangeError("BigInt too big");return JSBI.__truncateAndSubFromPowerOfTwo(i,_,!1)}if(i>=JSBI.__kMaxLengthBits)return _;const e=0|(i+29)/30;if(_.length>>g)return _}return JSBI.__truncateToNBits(i,_)}static ADD(i,_){if(i=JSBI.__toPrimitive(i),_=JSBI.__toPrimitive(_),"string"==typeof i)return"string"!=typeof _&&(_=_.toString()),i+_;if("string"==typeof _)return i.toString()+_;if(i=JSBI.__toNumeric(i),_=JSBI.__toNumeric(_),JSBI.__isBigInt(i)&&JSBI.__isBigInt(_))return JSBI.add(i,_);if("number"==typeof i&&"number"==typeof _)return i+_;throw new TypeError("Cannot mix BigInt and other types, use explicit conversions")}static LT(i,_){return JSBI.__compare(i,_,0)}static LE(i,_){return JSBI.__compare(i,_,1)}static GT(i,_){return JSBI.__compare(i,_,2)}static GE(i,_){return JSBI.__compare(i,_,3)}static EQ(i,_){for(;;){if(JSBI.__isBigInt(i))return JSBI.__isBigInt(_)?JSBI.equal(i,_):JSBI.EQ(_,i);if("number"==typeof i){if(JSBI.__isBigInt(_))return JSBI.__equalToNumber(_,i);if("object"!=typeof _)return i==_;_=JSBI.__toPrimitive(_)}else if("string"==typeof i){if(JSBI.__isBigInt(_))return i=JSBI.__fromString(i),null!==i&&JSBI.equal(i,_);if("object"!=typeof _)return i==_;_=JSBI.__toPrimitive(_)}else if("boolean"==typeof i){if(JSBI.__isBigInt(_))return JSBI.__equalToNumber(_,+i);if("object"!=typeof _)return i==_;_=JSBI.__toPrimitive(_)}else if("symbol"==typeof i){if(JSBI.__isBigInt(_))return!1;if("object"!=typeof _)return i==_;_=JSBI.__toPrimitive(_)}else if("object"==typeof i){if("object"==typeof _&&_.constructor!==JSBI)return i==_;i=JSBI.__toPrimitive(i)}else return i==_}}static NE(i,_){return!JSBI.EQ(i,_)}static __zero(){return new JSBI(0,!1)}static __oneDigit(i,_){const t=new JSBI(1,_);return t.__setDigit(0,i),t}__copy(){const _=new JSBI(this.length,this.sign);for(let t=0;t_)n=-_-1;else{if(0===t)return-1;t--,e=i.__digit(t),n=29}let g=1<>>20,t=_-1023,e=(0|t/30)+1,n=new JSBI(e,0>i);let g=1048575&JSBI.__kBitConversionInts[1]|1048576,o=JSBI.__kBitConversionInts[0];const s=20,l=t%30;let r,a=0;if(l<20){const i=s-l;a=i+32,r=g>>>i,g=g<<32-i|o>>>i,o<<=32-i}else if(l===20)a=32,r=g,g=o,o=0;else{const i=l-s;a=32-i,r=g<>>32-i,g=o<>>2,g=g<<30|o>>>2,o<<=30):r=0,n.__setDigit(_,r);return n.__trim()}static __isWhitespace(i){return!!(13>=i&&9<=i)||(159>=i?32==i:131071>=i?160==i||5760==i:196607>=i?(i&=131071,10>=i||40==i||41==i||47==i||95==i||4096==i):65279==i)}static __fromString(i,_=0){let t=0;const e=i.length;let n=0;if(n===e)return JSBI.__zero();let g=i.charCodeAt(n);for(;JSBI.__isWhitespace(g);){if(++n===e)return JSBI.__zero();g=i.charCodeAt(n)}if(43===g){if(++n===e)return null;g=i.charCodeAt(n),t=1}else if(45===g){if(++n===e)return null;g=i.charCodeAt(n),t=-1}if(0===_){if(_=10,48===g){if(++n===e)return JSBI.__zero();if(g=i.charCodeAt(n),88===g||120===g){if(_=16,++n===e)return null;g=i.charCodeAt(n)}else if(79===g||111===g){if(_=8,++n===e)return null;g=i.charCodeAt(n)}else if(66===g||98===g){if(_=2,++n===e)return null;g=i.charCodeAt(n)}}}else if(16===_&&48===g){if(++n===e)return JSBI.__zero();if(g=i.charCodeAt(n),88===g||120===g){if(++n===e)return null;g=i.charCodeAt(n)}}if(0!=t&&10!==_)return null;for(;48===g;){if(++n===e)return JSBI.__zero();g=i.charCodeAt(n)}const o=e-n;let s=JSBI.__kMaxBitsPerChar[_],l=JSBI.__kBitsPerCharTableMultiplier-1;if(o>1073741824/s)return null;const r=s*o+l>>>JSBI.__kBitsPerCharTableShift,a=new JSBI(0|(r+29)/30,!1),u=10>_?_:10,h=10<_?_-10:0;if(0==(_&_-1)){s>>=JSBI.__kBitsPerCharTableShift;const _=[],t=[];let o=!1;do{let l=0,r=0;for(;;){let _;if(g-48>>>0>>0>>0>>0>>JSBI.__kBitsPerCharTableShift)/30;a.__inplaceMultiplyAdd(b,r,D)}while(!t)}if(n!==e){if(!JSBI.__isWhitespace(g))return null;for(n++;n>>l-o)}if(0!==g){if(n>=_.length)throw new Error("implementation bug");_.__setDigit(n++,g)}for(;n<_.length;n++)_.__setDigit(n,0)}static __toStringBasePowerOfTwo(_,i){const t=_.length;let e=i-1;e=(85&e>>>1)+(85&e),e=(51&e>>>2)+(51&e),e=(15&e>>>4)+(15&e);const n=e,g=i-1,o=_.__digit(t-1),s=JSBI.__clz30(o);let l=0|(30*t-s+n-1)/n;if(_.sign&&l++,268435456>>o,d=30-o;d>=n;)r[a--]=JSBI.__kConversionChars[u&g],u>>>=n,d-=n}const h=(u|o<>>n-d;0!==u;)r[a--]=JSBI.__kConversionChars[u&g],u>>>=n;if(_.sign&&(r[a--]="-"),-1!=a)throw new Error("implementation bug");return r.join("")}static __toStringGeneric(_,i,t){const e=_.length;if(0===e)return"";if(1===e){let e=_.__unsignedDigit(0).toString(i);return!1===t&&_.sign&&(e="-"+e),e}const n=30*e-JSBI.__clz30(_.__digit(e-1)),g=JSBI.__kMaxBitsPerChar[i],o=g-1;let s=n*JSBI.__kBitsPerCharTableMultiplier;s+=o-1,s=0|s/o;const l=s+1>>1,r=JSBI.exponentiate(JSBI.__oneDigit(i,!1),JSBI.__oneDigit(l,!1));let a,u;const d=r.__unsignedDigit(0);if(1===r.length&&32767>=d){a=new JSBI(_.length,!1),a.__initializeDigits();let t=0;for(let e=2*_.length-1;0<=e;e--){const i=t<<15|_.__halfDigit(e);a.__setHalfDigit(e,0|i/d),t=0|i%d}u=t.toString(i)}else{const t=JSBI.__absoluteDivLarge(_,r,!0,!0);a=t.quotient;const e=t.remainder.__trim();u=JSBI.__toStringGeneric(e,i,!0)}a.__trim();let h=JSBI.__toStringGeneric(a,i,!0);for(;u.lengthe?JSBI.__absoluteLess(t):0}static __compareToNumber(i,_){if(JSBI.__isOneDigitInt(_)){const t=i.sign,e=0>_;if(t!==e)return JSBI.__unequalSign(t);if(0===i.length){if(e)throw new Error("implementation bug");return 0===_?0:-1}if(1n?JSBI.__absoluteGreater(t):g_)return JSBI.__unequalSign(t);if(0===_)throw new Error("implementation bug: should be handled elsewhere");if(0===i.length)return-1;JSBI.__kBitConversionDouble[0]=_;const e=2047&JSBI.__kBitConversionInts[1]>>>20;if(2047==e)throw new Error("implementation bug: handled elsewhere");const n=e-1023;if(0>n)return JSBI.__absoluteGreater(t);const g=i.length;let o=i.__digit(g-1);const s=JSBI.__clz30(o),l=30*g-s,r=n+1;if(lr)return JSBI.__absoluteGreater(t);let a=1048576|1048575&JSBI.__kBitConversionInts[1],u=JSBI.__kBitConversionInts[0];const d=20,h=29-s;if(h!==(0|(l-1)%30))throw new Error("implementation bug");let m,b=0;if(20>h){const i=d-h;b=i+32,m=a>>>i,a=a<<32-i|u>>>i,u<<=32-i}else if(20===h)b=32,m=a,a=u,u=0;else{const i=h-d;b=32-i,m=a<>>32-i,a=u<>>=0,m>>>=0,o>m)return JSBI.__absoluteGreater(t);if(o>>2,a=a<<30|u>>>2,u<<=30):m=0;const _=i.__unsignedDigit(e);if(_>m)return JSBI.__absoluteGreater(t);if(__&&i.__unsignedDigit(0)===t(_):0===JSBI.__compareToDouble(i,_)}static __comparisonResultToBool(i,_){return 0===_?0>i:1===_?0>=i:2===_?0_;case 3:return i>=_;}if(JSBI.__isBigInt(i)&&"string"==typeof _)return _=JSBI.__fromString(_),null!==_&&JSBI.__comparisonResultToBool(JSBI.__compareToBigInt(i,_),t);if("string"==typeof i&&JSBI.__isBigInt(_))return i=JSBI.__fromString(i),null!==i&&JSBI.__comparisonResultToBool(JSBI.__compareToBigInt(i,_),t);if(i=JSBI.__toNumeric(i),_=JSBI.__toNumeric(_),JSBI.__isBigInt(i)){if(JSBI.__isBigInt(_))return JSBI.__comparisonResultToBool(JSBI.__compareToBigInt(i,_),t);if("number"!=typeof _)throw new Error("implementation bug");return JSBI.__comparisonResultToBool(JSBI.__compareToNumber(i,_),t)}if("number"!=typeof i)throw new Error("implementation bug");if(JSBI.__isBigInt(_))return JSBI.__comparisonResultToBool(JSBI.__compareToNumber(_,i),2^t);if("number"!=typeof _)throw new Error("implementation bug");return 0===t?i<_:1===t?i<=_:2===t?i>_:3===t?i>=_:void 0}__clzmsd(){return JSBI.__clz30(this.__digit(this.length-1))}static __absoluteAdd(_,t,e){if(_.length>>30,g.__setDigit(s,1073741823&i)}for(;s<_.length;s++){const i=_.__digit(s)+o;o=i>>>30,g.__setDigit(s,1073741823&i)}return s>>30,n.__setDigit(o,1073741823&i)}for(;o<_.length;o++){const i=_.__digit(o)-g;g=1&i>>>30,n.__setDigit(o,1073741823&i)}return n.__trim()}static __absoluteAddOne(_,i,t=null){const e=_.length;null===t?t=new JSBI(e,i):t.sign=i;let n=1;for(let g=0;g>>30,t.__setDigit(g,1073741823&i)}return 0!=n&&t.__setDigitGrow(e,1),t}static __absoluteSubOne(_,t){const e=_.length;t=t||e;const n=new JSBI(t,!1);let g=1;for(let o=0;o>>30,n.__setDigit(o,1073741823&i)}if(0!=g)throw new Error("implementation bug");for(let g=e;gn?0:_.__unsignedDigit(n)>t.__unsignedDigit(n)?1:-1}static __multiplyAccumulate(_,t,e,n){if(0===t)return;const g=32767&t,o=t>>>15;let s=0,l=0;for(let r,a=0;a<_.length;a++,n++){r=e.__digit(n);const i=_.__digit(a),t=32767&i,u=i>>>15,d=JSBI.__imul(t,g),h=JSBI.__imul(t,o),m=JSBI.__imul(u,g),b=JSBI.__imul(u,o);r+=l+d+s,s=r>>>30,r&=1073741823,r+=((32767&h)<<15)+((32767&m)<<15),s+=r>>>30,l=b+(h>>>15)+(m>>>15),e.__setDigit(n,1073741823&r)}for(;0!=s||0!==l;n++){let i=e.__digit(n);i+=s+l,l=0,s=i>>>30,e.__setDigit(n,1073741823&i)}}static __internalMultiplyAdd(_,t,e,g,o){let s=e,l=0;for(let n=0;n>>15,t),a=e+((32767&g)<<15)+l+s;s=a>>>30,l=g>>>15,o.__setDigit(n,1073741823&a)}if(o.length>g)for(o.__setDigit(g++,s+l);gthis.length&&(t=this.length);const e=32767&i,n=i>>>15;let g=0,o=_;for(let s=0;s>>15,l=JSBI.__imul(_,e),r=JSBI.__imul(_,n),a=JSBI.__imul(t,e),u=JSBI.__imul(t,n);let d=o+l+g;g=d>>>30,d&=1073741823,d+=((32767&r)<<15)+((32767&a)<<15),g+=d>>>30,o=u+(r>>>15)+(a>>>15),this.__setDigit(s,1073741823&d)}if(0!=g||0!==o)throw new Error("implementation bug")}static __absoluteDivSmall(_,t,e=null){null===e&&(e=new JSBI(_.length,!1));let n=0;for(let g,o=2*_.length-1;0<=o;o-=2){g=(n<<15|_.__halfDigit(o))>>>0;const i=0|g/t;n=0|g%t,g=(n<<15|_.__halfDigit(o-1))>>>0;const s=0|g/t;n=0|g%t,e.__setDigit(o>>>1,i<<15|s)}return e}static __absoluteModSmall(_,t){let e=0;for(let n=2*_.length-1;0<=n;n--){const i=(e<<15|_.__halfDigit(n))>>>0;e=0|i%t}return e}static __absoluteDivLarge(i,_,t,e){const g=_.__halfDigitLength(),n=_.length,o=i.__halfDigitLength()-g;let s=null;t&&(s=new JSBI(o+2>>>1,!1),s.__initializeDigits());const l=new JSBI(g+2>>>1,!1);l.__initializeDigits();const r=JSBI.__clz15(_.__halfDigit(g-1));0>>0;r=0|t/u;let e=0|t%u;const n=_.__halfDigit(g-2),o=a.__halfDigit(h+g-2);for(;JSBI.__imul(r,n)>>>0>(e<<16|o)>>>0&&(r--,e+=u,!(32767>>1,d|r))}if(e)return a.__inplaceRightShift(r),t?{quotient:s,remainder:a}:a;if(t)return s;throw new Error("unreachable")}static __clz15(i){return JSBI.__clz30(i)-15}__inplaceAdd(_,t,e){let n=0;for(let g=0;g>>15,this.__setHalfDigit(t+g,32767&i)}return n}__inplaceSub(_,t,e){let n=0;if(1&t){t>>=1;let g=this.__digit(t),o=32767&g,s=0;for(;s>>1;s++){const i=_.__digit(s),e=(g>>>15)-(32767&i)-n;n=1&e>>>15,this.__setDigit(t+s,(32767&e)<<15|32767&o),g=this.__digit(t+s+1),o=(32767&g)-(i>>>15)-n,n=1&o>>>15}const i=_.__digit(s),l=(g>>>15)-(32767&i)-n;n=1&l>>>15,this.__setDigit(t+s,(32767&l)<<15|32767&o);if(t+s+1>=this.length)throw new RangeError("out of bounds");0==(1&e)&&(g=this.__digit(t+s+1),o=(32767&g)-(i>>>15)-n,n=1&o>>>15,this.__setDigit(t+_.length,1073709056&g|32767&o))}else{t>>=1;let g=0;for(;g<_.length-1;g++){const i=this.__digit(t+g),e=_.__digit(g),o=(32767&i)-(32767&e)-n;n=1&o>>>15;const s=(i>>>15)-(e>>>15)-n;n=1&s>>>15,this.__setDigit(t+g,(32767&s)<<15|32767&o)}const i=this.__digit(t+g),o=_.__digit(g),s=(32767&i)-(32767&o)-n;n=1&s>>>15;let l=0;0==(1&e)&&(l=(i>>>15)-(o>>>15)-n,n=1&l>>>15),this.__setDigit(t+g,(32767&l)<<15|32767&s)}return n}__inplaceRightShift(_){if(0===_)return;let t=this.__digit(0)>>>_;const e=this.length-1;for(let n=0;n>>_}this.__setDigit(e,t)}static __specialLeftShift(_,t,e){const g=_.length,n=new JSBI(g+e,!1);if(0===t){for(let t=0;t>>30-t}return 0t)throw new RangeError("BigInt too big");const e=0|t/30,n=t%30,g=_.length,o=0!==n&&0!=_.__digit(g-1)>>>30-n,s=g+e+(o?1:0),l=new JSBI(s,_.sign);if(0===n){let t=0;for(;t>>30-n}if(o)l.__setDigit(g+e,t);else if(0!==t)throw new Error("implementation bug")}return l.__trim()}static __rightShiftByAbsolute(_,i){const t=_.length,e=_.sign,n=JSBI.__toShiftAmount(i);if(0>n)return JSBI.__rightShiftByMaximum(e);const g=0|n/30,o=n%30;let s=t-g;if(0>=s)return JSBI.__rightShiftByMaximum(e);let l=!1;if(e){if(0!=(_.__digit(g)&(1<>>o;const n=t-g-1;for(let t=0;t>>o}r.__setDigit(n,e)}return l&&(r=JSBI.__absoluteAddOne(r,!0,r)),r.__trim()}static __rightShiftByMaximum(i){return i?JSBI.__oneDigit(1,!0):JSBI.__zero()}static __toShiftAmount(i){if(1JSBI.__kMaxLengthBits?-1:_}static __toPrimitive(i,_="default"){if("object"!=typeof i)return i;if(i.constructor===JSBI)return i;if("undefined"!=typeof Symbol&&"symbol"==typeof Symbol.toPrimitive){const t=i[Symbol.toPrimitive];if(t){const i=t(_);if("object"!=typeof i)return i;throw new TypeError("Cannot convert object to primitive value")}}const t=i.valueOf;if(t){const _=t.call(i);if("object"!=typeof _)return _}const e=i.toString;if(e){const _=e.call(i);if("object"!=typeof _)return _}throw new TypeError("Cannot convert object to primitive value")}static __toNumeric(i){return JSBI.__isBigInt(i)?i:+i}static __isBigInt(i){return"object"==typeof i&&null!==i&&i.constructor===JSBI}static __truncateToNBits(i,_){const t=0|(i+29)/30,e=new JSBI(t,_.sign),n=t-1;for(let t=0;t>>_}return e.__setDigit(n,g),e.__trim()}static __truncateAndSubFromPowerOfTwo(_,t,e){var n=Math.min;const g=0|(_+29)/30,o=new JSBI(g,e);let s=0;const l=g-1;let a=0;for(const i=n(l,t.length);s>>30,o.__setDigit(s,1073741823&i)}for(;s>>i;const _=1<<32-i;h=_-u-a,h&=_-1}return o.__setDigit(l,h),o.__trim()}__digit(_){return this[_]}__unsignedDigit(_){return this[_]>>>0}__setDigit(_,i){this[_]=0|i}__setDigitGrow(_,i){this[_]=0|i}__halfDigitLength(){const i=this.length;return 32767>=this.__unsignedDigit(i-1)?2*i-1:2*i}__halfDigit(_){return 32767&this[_>>>1]>>>15*(1&_)}__setHalfDigit(_,i){const t=_>>>1,e=this.__digit(t),n=1&_?32767&e|i<<15:1073709056&e|32767&i;this.__setDigit(t,n)}static __digitPow(i,_){let t=1;for(;0<_;)1&_&&(t*=i),_>>>=1,i*=i;return t}static __isOneDigitInt(i){return(1073741823&i)===i}}JSBI.__kMaxLength=33554432,JSBI.__kMaxLengthBits=JSBI.__kMaxLength<<5,JSBI.__kMaxBitsPerChar=[0,0,32,51,64,75,83,90,96,102,107,111,115,119,122,126,128,131,134,136,139,141,143,145,147,149,151,153,154,156,158,159,160,162,163,165,166],JSBI.__kBitsPerCharTableShift=5,JSBI.__kBitsPerCharTableMultiplier=1<>>0)/Math.LN2)},JSBI.__imul=Math.imul||function(i,_){return 0|i*_},module.exports=JSBI; -//# sourceMappingURL=jsbi-cjs.js.map - - -/***/ }), + step = step === undefined ? (start < end ? 1 : -1) : toFinite(step); + return baseRange(start, end, step, fromRight); + }; + } -/***/ 53359: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Creates a function that performs a relational operation on two values. + * + * @private + * @param {Function} operator The function to perform the operation. + * @returns {Function} Returns the new relational operation function. + */ + function createRelationalOperation(operator) { + return function(value, other) { + if (!(typeof value == 'string' && typeof other == 'string')) { + value = toNumber(value); + other = toNumber(other); + } + return operator(value, other); + }; + } -var jws = __nccwpck_require__(22597); + /** + * Creates a function that wraps `func` to continue currying. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {Function} wrapFunc The function to create the `func` wrapper. + * @param {*} placeholder The placeholder value. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to prepend to those provided to + * the new function. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createRecurry(func, bitmask, wrapFunc, placeholder, thisArg, partials, holders, argPos, ary, arity) { + var isCurry = bitmask & WRAP_CURRY_FLAG, + newHolders = isCurry ? holders : undefined, + newHoldersRight = isCurry ? undefined : holders, + newPartials = isCurry ? partials : undefined, + newPartialsRight = isCurry ? undefined : partials; -module.exports = function (jwt, options) { - options = options || {}; - var decoded = jws.decode(jwt, options); - if (!decoded) { return null; } - var payload = decoded.payload; + bitmask |= (isCurry ? WRAP_PARTIAL_FLAG : WRAP_PARTIAL_RIGHT_FLAG); + bitmask &= ~(isCurry ? WRAP_PARTIAL_RIGHT_FLAG : WRAP_PARTIAL_FLAG); - //try parse the payload - if(typeof payload === 'string') { - try { - var obj = JSON.parse(payload); - if(obj !== null && typeof obj === 'object') { - payload = obj; + if (!(bitmask & WRAP_CURRY_BOUND_FLAG)) { + bitmask &= ~(WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG); } - } catch (e) { } - } - - //return header if `complete` option is enabled. header includes claims - //such as `kid` and `alg` used to select the key within a JWKS needed to - //verify the signature - if (options.complete === true) { - return { - header: decoded.header, - payload: payload, - signature: decoded.signature - }; - } - return payload; -}; + var newData = [ + func, bitmask, thisArg, newPartials, newHolders, newPartialsRight, + newHoldersRight, argPos, ary, arity + ]; + var result = wrapFunc.apply(undefined, newData); + if (isLaziable(func)) { + setData(result, newData); + } + result.placeholder = placeholder; + return setWrapToString(result, func, bitmask); + } -/***/ }), + /** + * Creates a function like `_.round`. + * + * @private + * @param {string} methodName The name of the `Math` method to use when rounding. + * @returns {Function} Returns the new round function. + */ + function createRound(methodName) { + var func = Math[methodName]; + return function(number, precision) { + number = toNumber(number); + precision = precision == null ? 0 : nativeMin(toInteger(precision), 292); + if (precision && nativeIsFinite(number)) { + // Shift with exponential notation to avoid floating-point issues. + // See [MDN](https://mdn.io/round#Examples) for more details. + var pair = (toString(number) + 'e').split('e'), + value = func(pair[0] + 'e' + (+pair[1] + precision)); -/***/ 77486: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + pair = (toString(value) + 'e').split('e'); + return +(pair[0] + 'e' + (+pair[1] - precision)); + } + return func(number); + }; + } -module.exports = { - verify: __nccwpck_require__(12327), - sign: __nccwpck_require__(82022), - JsonWebTokenError: __nccwpck_require__(405), - NotBeforeError: __nccwpck_require__(4383), - TokenExpiredError: __nccwpck_require__(46637), -}; + /** + * Creates a set object of `values`. + * + * @private + * @param {Array} values The values to add to the set. + * @returns {Object} Returns the new set. + */ + var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { + return new Set(values); + }; -Object.defineProperty(module.exports, "decode", ({ - enumerable: false, - value: __nccwpck_require__(53359), -})); + /** + * Creates a `_.toPairs` or `_.toPairsIn` function. + * + * @private + * @param {Function} keysFunc The function to get the keys of a given object. + * @returns {Function} Returns the new pairs function. + */ + function createToPairs(keysFunc) { + return function(object) { + var tag = getTag(object); + if (tag == mapTag) { + return mapToArray(object); + } + if (tag == setTag) { + return setToPairs(object); + } + return baseToPairs(object, keysFunc(object)); + }; + } + /** + * Creates a function that either curries or invokes `func` with optional + * `this` binding and partially applied arguments. + * + * @private + * @param {Function|string} func The function or method name to wrap. + * @param {number} bitmask The bitmask flags. + * 1 - `_.bind` + * 2 - `_.bindKey` + * 4 - `_.curry` or `_.curryRight` of a bound function + * 8 - `_.curry` + * 16 - `_.curryRight` + * 32 - `_.partial` + * 64 - `_.partialRight` + * 128 - `_.rearg` + * 256 - `_.ary` + * 512 - `_.flip` + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to be partially applied. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createWrap(func, bitmask, thisArg, partials, holders, argPos, ary, arity) { + var isBindKey = bitmask & WRAP_BIND_KEY_FLAG; + if (!isBindKey && typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + var length = partials ? partials.length : 0; + if (!length) { + bitmask &= ~(WRAP_PARTIAL_FLAG | WRAP_PARTIAL_RIGHT_FLAG); + partials = holders = undefined; + } + ary = ary === undefined ? ary : nativeMax(toInteger(ary), 0); + arity = arity === undefined ? arity : toInteger(arity); + length -= holders ? holders.length : 0; -/***/ }), + if (bitmask & WRAP_PARTIAL_RIGHT_FLAG) { + var partialsRight = partials, + holdersRight = holders; -/***/ 405: -/***/ ((module) => { + partials = holders = undefined; + } + var data = isBindKey ? undefined : getData(func); -var JsonWebTokenError = function (message, error) { - Error.call(this, message); - if(Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - this.name = 'JsonWebTokenError'; - this.message = message; - if (error) this.inner = error; -}; + var newData = [ + func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, + argPos, ary, arity + ]; -JsonWebTokenError.prototype = Object.create(Error.prototype); -JsonWebTokenError.prototype.constructor = JsonWebTokenError; + if (data) { + mergeData(newData, data); + } + func = newData[0]; + bitmask = newData[1]; + thisArg = newData[2]; + partials = newData[3]; + holders = newData[4]; + arity = newData[9] = newData[9] === undefined + ? (isBindKey ? 0 : func.length) + : nativeMax(newData[9] - length, 0); -module.exports = JsonWebTokenError; + if (!arity && bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG)) { + bitmask &= ~(WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG); + } + if (!bitmask || bitmask == WRAP_BIND_FLAG) { + var result = createBind(func, bitmask, thisArg); + } else if (bitmask == WRAP_CURRY_FLAG || bitmask == WRAP_CURRY_RIGHT_FLAG) { + result = createCurry(func, bitmask, arity); + } else if ((bitmask == WRAP_PARTIAL_FLAG || bitmask == (WRAP_BIND_FLAG | WRAP_PARTIAL_FLAG)) && !holders.length) { + result = createPartial(func, bitmask, thisArg, partials); + } else { + result = createHybrid.apply(undefined, newData); + } + var setter = data ? baseSetData : setData; + return setWrapToString(setter(result, newData), func, bitmask); + } + /** + * Used by `_.defaults` to customize its `_.assignIn` use to assign properties + * of source objects to the destination object for all destination properties + * that resolve to `undefined`. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to assign. + * @param {Object} object The parent object of `objValue`. + * @returns {*} Returns the value to assign. + */ + function customDefaultsAssignIn(objValue, srcValue, key, object) { + if (objValue === undefined || + (eq(objValue, objectProto[key]) && !hasOwnProperty.call(object, key))) { + return srcValue; + } + return objValue; + } -/***/ }), + /** + * Used by `_.defaultsDeep` to customize its `_.merge` use to merge source + * objects into destination objects that are passed thru. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to merge. + * @param {Object} object The parent object of `objValue`. + * @param {Object} source The parent object of `srcValue`. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + * @returns {*} Returns the value to assign. + */ + function customDefaultsMerge(objValue, srcValue, key, object, source, stack) { + if (isObject(objValue) && isObject(srcValue)) { + // Recursively merge objects and arrays (susceptible to call stack limits). + stack.set(srcValue, objValue); + baseMerge(objValue, srcValue, undefined, customDefaultsMerge, stack); + stack['delete'](srcValue); + } + return objValue; + } -/***/ 4383: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Used by `_.omit` to customize its `_.cloneDeep` use to only clone plain + * objects. + * + * @private + * @param {*} value The value to inspect. + * @param {string} key The key of the property to inspect. + * @returns {*} Returns the uncloned value or `undefined` to defer cloning to `_.cloneDeep`. + */ + function customOmitClone(value) { + return isPlainObject(value) ? undefined : value; + } -var JsonWebTokenError = __nccwpck_require__(405); + /** + * A specialized version of `baseIsEqualDeep` for arrays with support for + * partial deep comparisons. + * + * @private + * @param {Array} array The array to compare. + * @param {Array} other The other array to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `array` and `other` objects. + * @returns {boolean} Returns `true` if the arrays are equivalent, else `false`. + */ + function equalArrays(array, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + arrLength = array.length, + othLength = other.length; -var NotBeforeError = function (message, date) { - JsonWebTokenError.call(this, message); - this.name = 'NotBeforeError'; - this.date = date; -}; + if (arrLength != othLength && !(isPartial && othLength > arrLength)) { + return false; + } + // Check that cyclic values are equal. + var arrStacked = stack.get(array); + var othStacked = stack.get(other); + if (arrStacked && othStacked) { + return arrStacked == other && othStacked == array; + } + var index = -1, + result = true, + seen = (bitmask & COMPARE_UNORDERED_FLAG) ? new SetCache : undefined; -NotBeforeError.prototype = Object.create(JsonWebTokenError.prototype); + stack.set(array, other); + stack.set(other, array); -NotBeforeError.prototype.constructor = NotBeforeError; + // Ignore non-index properties. + while (++index < arrLength) { + var arrValue = array[index], + othValue = other[index]; -module.exports = NotBeforeError; + if (customizer) { + var compared = isPartial + ? customizer(othValue, arrValue, index, other, array, stack) + : customizer(arrValue, othValue, index, array, other, stack); + } + if (compared !== undefined) { + if (compared) { + continue; + } + result = false; + break; + } + // Recursively compare arrays (susceptible to call stack limits). + if (seen) { + if (!arraySome(other, function(othValue, othIndex) { + if (!cacheHas(seen, othIndex) && + (arrValue === othValue || equalFunc(arrValue, othValue, bitmask, customizer, stack))) { + return seen.push(othIndex); + } + })) { + result = false; + break; + } + } else if (!( + arrValue === othValue || + equalFunc(arrValue, othValue, bitmask, customizer, stack) + )) { + result = false; + break; + } + } + stack['delete'](array); + stack['delete'](other); + return result; + } -/***/ }), + /** + * A specialized version of `baseIsEqualDeep` for comparing objects of + * the same `toStringTag`. + * + * **Note:** This function only supports comparing values with tags of + * `Boolean`, `Date`, `Error`, `Number`, `RegExp`, or `String`. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {string} tag The `toStringTag` of the objects to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function equalByTag(object, other, tag, bitmask, customizer, equalFunc, stack) { + switch (tag) { + case dataViewTag: + if ((object.byteLength != other.byteLength) || + (object.byteOffset != other.byteOffset)) { + return false; + } + object = object.buffer; + other = other.buffer; -/***/ 46637: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + case arrayBufferTag: + if ((object.byteLength != other.byteLength) || + !equalFunc(new Uint8Array(object), new Uint8Array(other))) { + return false; + } + return true; -var JsonWebTokenError = __nccwpck_require__(405); + case boolTag: + case dateTag: + case numberTag: + // Coerce booleans to `1` or `0` and dates to milliseconds. + // Invalid dates are coerced to `NaN`. + return eq(+object, +other); -var TokenExpiredError = function (message, expiredAt) { - JsonWebTokenError.call(this, message); - this.name = 'TokenExpiredError'; - this.expiredAt = expiredAt; -}; + case errorTag: + return object.name == other.name && object.message == other.message; -TokenExpiredError.prototype = Object.create(JsonWebTokenError.prototype); + case regexpTag: + case stringTag: + // Coerce regexes to strings and treat strings, primitives and objects, + // as equal. See http://www.ecma-international.org/ecma-262/7.0/#sec-regexp.prototype.tostring + // for more details. + return object == (other + ''); -TokenExpiredError.prototype.constructor = TokenExpiredError; + case mapTag: + var convert = mapToArray; -module.exports = TokenExpiredError; + case setTag: + var isPartial = bitmask & COMPARE_PARTIAL_FLAG; + convert || (convert = setToArray); -/***/ }), + if (object.size != other.size && !isPartial) { + return false; + } + // Assume cyclic values are equal. + var stacked = stack.get(object); + if (stacked) { + return stacked == other; + } + bitmask |= COMPARE_UNORDERED_FLAG; -/***/ 7622: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Recursively compare objects (susceptible to call stack limits). + stack.set(object, other); + var result = equalArrays(convert(object), convert(other), bitmask, customizer, equalFunc, stack); + stack['delete'](object); + return result; -const semver = __nccwpck_require__(43998); + case symbolTag: + if (symbolValueOf) { + return symbolValueOf.call(object) == symbolValueOf.call(other); + } + } + return false; + } -module.exports = semver.satisfies(process.version, '>=15.7.0'); + /** + * A specialized version of `baseIsEqualDeep` for objects with support for + * partial deep comparisons. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function equalObjects(object, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + objProps = getAllKeys(object), + objLength = objProps.length, + othProps = getAllKeys(other), + othLength = othProps.length; + if (objLength != othLength && !isPartial) { + return false; + } + var index = objLength; + while (index--) { + var key = objProps[index]; + if (!(isPartial ? key in other : hasOwnProperty.call(other, key))) { + return false; + } + } + // Check that cyclic values are equal. + var objStacked = stack.get(object); + var othStacked = stack.get(other); + if (objStacked && othStacked) { + return objStacked == other && othStacked == object; + } + var result = true; + stack.set(object, other); + stack.set(other, object); -/***/ }), + var skipCtor = isPartial; + while (++index < objLength) { + key = objProps[index]; + var objValue = object[key], + othValue = other[key]; -/***/ 59085: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (customizer) { + var compared = isPartial + ? customizer(othValue, objValue, key, other, object, stack) + : customizer(objValue, othValue, key, object, other, stack); + } + // Recursively compare objects (susceptible to call stack limits). + if (!(compared === undefined + ? (objValue === othValue || equalFunc(objValue, othValue, bitmask, customizer, stack)) + : compared + )) { + result = false; + break; + } + skipCtor || (skipCtor = key == 'constructor'); + } + if (result && !skipCtor) { + var objCtor = object.constructor, + othCtor = other.constructor; -var semver = __nccwpck_require__(43998); + // Non `Object` object instances with different constructors are not equal. + if (objCtor != othCtor && + ('constructor' in object && 'constructor' in other) && + !(typeof objCtor == 'function' && objCtor instanceof objCtor && + typeof othCtor == 'function' && othCtor instanceof othCtor)) { + result = false; + } + } + stack['delete'](object); + stack['delete'](other); + return result; + } -module.exports = semver.satisfies(process.version, '^6.12.0 || >=8.0.0'); + /** + * A specialized version of `baseRest` which flattens the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ + function flatRest(func) { + return setToString(overRest(func, undefined, flatten), func + ''); + } + /** + * Creates an array of own enumerable property names and symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names and symbols. + */ + function getAllKeys(object) { + return baseGetAllKeys(object, keys, getSymbols); + } -/***/ }), + /** + * Creates an array of own and inherited enumerable property names and + * symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names and symbols. + */ + function getAllKeysIn(object) { + return baseGetAllKeys(object, keysIn, getSymbolsIn); + } -/***/ 45170: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Gets metadata for `func`. + * + * @private + * @param {Function} func The function to query. + * @returns {*} Returns the metadata for `func`. + */ + var getData = !metaMap ? noop : function(func) { + return metaMap.get(func); + }; -const semver = __nccwpck_require__(43998); + /** + * Gets the name of `func`. + * + * @private + * @param {Function} func The function to query. + * @returns {string} Returns the function name. + */ + function getFuncName(func) { + var result = (func.name + ''), + array = realNames[result], + length = hasOwnProperty.call(realNames, result) ? array.length : 0; -module.exports = semver.satisfies(process.version, '>=16.9.0'); + while (length--) { + var data = array[length], + otherFunc = data.func; + if (otherFunc == null || otherFunc == func) { + return data.name; + } + } + return result; + } + /** + * Gets the argument placeholder value for `func`. + * + * @private + * @param {Function} func The function to inspect. + * @returns {*} Returns the placeholder value. + */ + function getHolder(func) { + var object = hasOwnProperty.call(lodash, 'placeholder') ? lodash : func; + return object.placeholder; + } -/***/ }), + /** + * Gets the appropriate "iteratee" function. If `_.iteratee` is customized, + * this function returns the custom method, otherwise it returns `baseIteratee`. + * If arguments are provided, the chosen function is invoked with them and + * its result is returned. + * + * @private + * @param {*} [value] The value to convert to an iteratee. + * @param {number} [arity] The arity of the created iteratee. + * @returns {Function} Returns the chosen function or its result. + */ + function getIteratee() { + var result = lodash.iteratee || iteratee; + result = result === iteratee ? baseIteratee : result; + return arguments.length ? result(arguments[0], arguments[1]) : result; + } -/***/ 20910: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Gets the data for `map`. + * + * @private + * @param {Object} map The map to query. + * @param {string} key The reference key. + * @returns {*} Returns the map data. + */ + function getMapData(map, key) { + var data = map.__data__; + return isKeyable(key) + ? data[typeof key == 'string' ? 'string' : 'hash'] + : data.map; + } -var ms = __nccwpck_require__(80900); + /** + * Gets the property names, values, and compare flags of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the match data of `object`. + */ + function getMatchData(object) { + var result = keys(object), + length = result.length; -module.exports = function (time, iat) { - var timestamp = iat || Math.floor(Date.now() / 1000); + while (length--) { + var key = result[length], + value = object[key]; - if (typeof time === 'string') { - var milliseconds = ms(time); - if (typeof milliseconds === 'undefined') { - return; + result[length] = [key, value, isStrictComparable(value)]; + } + return result; } - return Math.floor(timestamp + milliseconds / 1000); - } else if (typeof time === 'number') { - return timestamp + time; - } else { - return; - } -}; - -/***/ }), + /** + * Gets the native function at `key` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the method to get. + * @returns {*} Returns the function if it's native, else `undefined`. + */ + function getNative(object, key) { + var value = getValue(object, key); + return baseIsNative(value) ? value : undefined; + } -/***/ 47596: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the raw `toStringTag`. + */ + function getRawTag(value) { + var isOwn = hasOwnProperty.call(value, symToStringTag), + tag = value[symToStringTag]; -const ASYMMETRIC_KEY_DETAILS_SUPPORTED = __nccwpck_require__(7622); -const RSA_PSS_KEY_DETAILS_SUPPORTED = __nccwpck_require__(45170); + try { + value[symToStringTag] = undefined; + var unmasked = true; + } catch (e) {} -const allowedAlgorithmsForKeys = { - 'ec': ['ES256', 'ES384', 'ES512'], - 'rsa': ['RS256', 'PS256', 'RS384', 'PS384', 'RS512', 'PS512'], - 'rsa-pss': ['PS256', 'PS384', 'PS512'] -}; + var result = nativeObjectToString.call(value); + if (unmasked) { + if (isOwn) { + value[symToStringTag] = tag; + } else { + delete value[symToStringTag]; + } + } + return result; + } -const allowedCurves = { - ES256: 'prime256v1', - ES384: 'secp384r1', - ES512: 'secp521r1', -}; + /** + * Creates an array of the own enumerable symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of symbols. + */ + var getSymbols = !nativeGetSymbols ? stubArray : function(object) { + if (object == null) { + return []; + } + object = Object(object); + return arrayFilter(nativeGetSymbols(object), function(symbol) { + return propertyIsEnumerable.call(object, symbol); + }); + }; -module.exports = function(algorithm, key) { - if (!algorithm || !key) return; + /** + * Creates an array of the own and inherited enumerable symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of symbols. + */ + var getSymbolsIn = !nativeGetSymbols ? stubArray : function(object) { + var result = []; + while (object) { + arrayPush(result, getSymbols(object)); + object = getPrototype(object); + } + return result; + }; - const keyType = key.asymmetricKeyType; - if (!keyType) return; + /** + * Gets the `toStringTag` of `value`. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ + var getTag = baseGetTag; - const allowedAlgorithms = allowedAlgorithmsForKeys[keyType]; + // Fallback for data views, maps, sets, and weak maps in IE 11 and promises in Node.js < 6. + if ((DataView && getTag(new DataView(new ArrayBuffer(1))) != dataViewTag) || + (Map && getTag(new Map) != mapTag) || + (Promise && getTag(Promise.resolve()) != promiseTag) || + (Set && getTag(new Set) != setTag) || + (WeakMap && getTag(new WeakMap) != weakMapTag)) { + getTag = function(value) { + var result = baseGetTag(value), + Ctor = result == objectTag ? value.constructor : undefined, + ctorString = Ctor ? toSource(Ctor) : ''; - if (!allowedAlgorithms) { - throw new Error(`Unknown key type "${keyType}".`); - } + if (ctorString) { + switch (ctorString) { + case dataViewCtorString: return dataViewTag; + case mapCtorString: return mapTag; + case promiseCtorString: return promiseTag; + case setCtorString: return setTag; + case weakMapCtorString: return weakMapTag; + } + } + return result; + }; + } - if (!allowedAlgorithms.includes(algorithm)) { - throw new Error(`"alg" parameter for "${keyType}" key type must be one of: ${allowedAlgorithms.join(', ')}.`) - } + /** + * Gets the view, applying any `transforms` to the `start` and `end` positions. + * + * @private + * @param {number} start The start of the view. + * @param {number} end The end of the view. + * @param {Array} transforms The transformations to apply to the view. + * @returns {Object} Returns an object containing the `start` and `end` + * positions of the view. + */ + function getView(start, end, transforms) { + var index = -1, + length = transforms.length; - /* - * Ignore the next block from test coverage because it gets executed - * conditionally depending on the Node version. Not ignoring it would - * prevent us from reaching the target % of coverage for versions of - * Node under 15.7.0. - */ - /* istanbul ignore next */ - if (ASYMMETRIC_KEY_DETAILS_SUPPORTED) { - switch (keyType) { - case 'ec': - const keyCurve = key.asymmetricKeyDetails.namedCurve; - const allowedCurve = allowedCurves[algorithm]; + while (++index < length) { + var data = transforms[index], + size = data.size; - if (keyCurve !== allowedCurve) { - throw new Error(`"alg" parameter "${algorithm}" requires curve "${allowedCurve}".`); + switch (data.type) { + case 'drop': start += size; break; + case 'dropRight': end -= size; break; + case 'take': end = nativeMin(end, start + size); break; + case 'takeRight': start = nativeMax(start, end - size); break; + } } - break; + return { 'start': start, 'end': end }; + } - case 'rsa-pss': - if (RSA_PSS_KEY_DETAILS_SUPPORTED) { - const length = parseInt(algorithm.slice(-3), 10); - const { hashAlgorithm, mgf1HashAlgorithm, saltLength } = key.asymmetricKeyDetails; + /** + * Extracts wrapper details from the `source` body comment. + * + * @private + * @param {string} source The source to inspect. + * @returns {Array} Returns the wrapper details. + */ + function getWrapDetails(source) { + var match = source.match(reWrapDetails); + return match ? match[1].split(reSplitDetails) : []; + } - if (hashAlgorithm !== `sha${length}` || mgf1HashAlgorithm !== hashAlgorithm) { - throw new Error(`Invalid key for this operation, its RSA-PSS parameters do not meet the requirements of "alg" ${algorithm}.`); - } + /** + * Checks if `path` exists on `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @param {Function} hasFunc The function to check properties. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + */ + function hasPath(object, path, hasFunc) { + path = castPath(path, object); - if (saltLength !== undefined && saltLength > length >> 3) { - throw new Error(`Invalid key for this operation, its RSA-PSS parameter saltLength does not meet the requirements of "alg" ${algorithm}.`) + var index = -1, + length = path.length, + result = false; + + while (++index < length) { + var key = toKey(path[index]); + if (!(result = object != null && hasFunc(object, key))) { + break; } + object = object[key]; } - break; + if (result || ++index != length) { + return result; + } + length = object == null ? 0 : object.length; + return !!length && isLength(length) && isIndex(key, length) && + (isArray(object) || isArguments(object)); } - } -} + /** + * Initializes an array clone. + * + * @private + * @param {Array} array The array to clone. + * @returns {Array} Returns the initialized clone. + */ + function initCloneArray(array) { + var length = array.length, + result = new array.constructor(length); -/***/ }), + // Add properties assigned by `RegExp#exec`. + if (length && typeof array[0] == 'string' && hasOwnProperty.call(array, 'index')) { + result.index = array.index; + result.input = array.input; + } + return result; + } -/***/ 92321: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Initializes an object clone. + * + * @private + * @param {Object} object The object to clone. + * @returns {Object} Returns the initialized clone. + */ + function initCloneObject(object) { + return (typeof object.constructor == 'function' && !isPrototype(object)) + ? baseCreate(getPrototype(object)) + : {}; + } -var bufferEqual = __nccwpck_require__(9239); -var Buffer = (__nccwpck_require__(21867).Buffer); -var crypto = __nccwpck_require__(6113); -var formatEcdsa = __nccwpck_require__(11728); -var util = __nccwpck_require__(73837); + /** + * Initializes an object clone based on its `toStringTag`. + * + * **Note:** This function only supports cloning values with tags of + * `Boolean`, `Date`, `Error`, `Map`, `Number`, `RegExp`, `Set`, or `String`. + * + * @private + * @param {Object} object The object to clone. + * @param {string} tag The `toStringTag` of the object to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the initialized clone. + */ + function initCloneByTag(object, tag, isDeep) { + var Ctor = object.constructor; + switch (tag) { + case arrayBufferTag: + return cloneArrayBuffer(object); -var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' -var MSG_INVALID_SECRET = 'secret must be a string or buffer'; -var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; -var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + case boolTag: + case dateTag: + return new Ctor(+object); -var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; -if (supportsKeyObjects) { - MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; - MSG_INVALID_SECRET += 'or a KeyObject'; -} + case dataViewTag: + return cloneDataView(object, isDeep); -function checkIsPublicKey(key) { - if (Buffer.isBuffer(key)) { - return; - } + case float32Tag: case float64Tag: + case int8Tag: case int16Tag: case int32Tag: + case uint8Tag: case uint8ClampedTag: case uint16Tag: case uint32Tag: + return cloneTypedArray(object, isDeep); - if (typeof key === 'string') { - return; - } + case mapTag: + return new Ctor; - if (!supportsKeyObjects) { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } + case numberTag: + case stringTag: + return new Ctor(object); - if (typeof key !== 'object') { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } + case regexpTag: + return cloneRegExp(object); - if (typeof key.type !== 'string') { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } + case setTag: + return new Ctor; - if (typeof key.asymmetricKeyType !== 'string') { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } + case symbolTag: + return cloneSymbol(object); + } + } - if (typeof key.export !== 'function') { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } -}; + /** + * Inserts wrapper `details` in a comment at the top of the `source` body. + * + * @private + * @param {string} source The source to modify. + * @returns {Array} details The details to insert. + * @returns {string} Returns the modified source. + */ + function insertWrapDetails(source, details) { + var length = details.length; + if (!length) { + return source; + } + var lastIndex = length - 1; + details[lastIndex] = (length > 1 ? '& ' : '') + details[lastIndex]; + details = details.join(length > 2 ? ', ' : ' '); + return source.replace(reWrapComment, '{\n/* [wrapped with ' + details + '] */\n'); + } -function checkIsPrivateKey(key) { - if (Buffer.isBuffer(key)) { - return; - } + /** + * Checks if `value` is a flattenable `arguments` object or array. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is flattenable, else `false`. + */ + function isFlattenable(value) { + return isArray(value) || isArguments(value) || + !!(spreadableSymbol && value && value[spreadableSymbol]); + } - if (typeof key === 'string') { - return; - } + /** + * Checks if `value` is a valid array-like index. + * + * @private + * @param {*} value The value to check. + * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. + * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. + */ + function isIndex(value, length) { + var type = typeof value; + length = length == null ? MAX_SAFE_INTEGER : length; - if (typeof key === 'object') { - return; - } + return !!length && + (type == 'number' || + (type != 'symbol' && reIsUint.test(value))) && + (value > -1 && value % 1 == 0 && value < length); + } - throw typeError(MSG_INVALID_SIGNER_KEY); -}; + /** + * Checks if the given arguments are from an iteratee call. + * + * @private + * @param {*} value The potential iteratee value argument. + * @param {*} index The potential iteratee index or key argument. + * @param {*} object The potential iteratee object argument. + * @returns {boolean} Returns `true` if the arguments are from an iteratee call, + * else `false`. + */ + function isIterateeCall(value, index, object) { + if (!isObject(object)) { + return false; + } + var type = typeof index; + if (type == 'number' + ? (isArrayLike(object) && isIndex(index, object.length)) + : (type == 'string' && index in object) + ) { + return eq(object[index], value); + } + return false; + } -function checkIsSecretKey(key) { - if (Buffer.isBuffer(key)) { - return; - } + /** + * Checks if `value` is a property name and not a property path. + * + * @private + * @param {*} value The value to check. + * @param {Object} [object] The object to query keys on. + * @returns {boolean} Returns `true` if `value` is a property name, else `false`. + */ + function isKey(value, object) { + if (isArray(value)) { + return false; + } + var type = typeof value; + if (type == 'number' || type == 'symbol' || type == 'boolean' || + value == null || isSymbol(value)) { + return true; + } + return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || + (object != null && value in Object(object)); + } - if (typeof key === 'string') { - return key; - } + /** + * Checks if `value` is suitable for use as unique object key. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is suitable, else `false`. + */ + function isKeyable(value) { + var type = typeof value; + return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') + ? (value !== '__proto__') + : (value === null); + } - if (!supportsKeyObjects) { - throw typeError(MSG_INVALID_SECRET); - } + /** + * Checks if `func` has a lazy counterpart. + * + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` has a lazy counterpart, + * else `false`. + */ + function isLaziable(func) { + var funcName = getFuncName(func), + other = lodash[funcName]; - if (typeof key !== 'object') { - throw typeError(MSG_INVALID_SECRET); - } + if (typeof other != 'function' || !(funcName in LazyWrapper.prototype)) { + return false; + } + if (func === other) { + return true; + } + var data = getData(other); + return !!data && func === data[0]; + } - if (key.type !== 'secret') { - throw typeError(MSG_INVALID_SECRET); - } + /** + * Checks if `func` has its source masked. + * + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` is masked, else `false`. + */ + function isMasked(func) { + return !!maskSrcKey && (maskSrcKey in func); + } - if (typeof key.export !== 'function') { - throw typeError(MSG_INVALID_SECRET); - } -} + /** + * Checks if `func` is capable of being masked. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `func` is maskable, else `false`. + */ + var isMaskable = coreJsData ? isFunction : stubFalse; -function fromBase64(base64) { - return base64 - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); -} + /** + * Checks if `value` is likely a prototype object. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. + */ + function isPrototype(value) { + var Ctor = value && value.constructor, + proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto; -function toBase64(base64url) { - base64url = base64url.toString(); + return value === proto; + } - var padding = 4 - base64url.length % 4; - if (padding !== 4) { - for (var i = 0; i < padding; ++i) { - base64url += '='; + /** + * Checks if `value` is suitable for strict equality comparisons, i.e. `===`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` if suitable for strict + * equality comparisons, else `false`. + */ + function isStrictComparable(value) { + return value === value && !isObject(value); } - } - return base64url - .replace(/\-/g, '+') - .replace(/_/g, '/'); -} + /** + * A specialized version of `matchesProperty` for source values suitable + * for strict equality comparisons, i.e. `===`. + * + * @private + * @param {string} key The key of the property to get. + * @param {*} srcValue The value to match. + * @returns {Function} Returns the new spec function. + */ + function matchesStrictComparable(key, srcValue) { + return function(object) { + if (object == null) { + return false; + } + return object[key] === srcValue && + (srcValue !== undefined || (key in Object(object))); + }; + } -function typeError(template) { - var args = [].slice.call(arguments, 1); - var errMsg = util.format.bind(util, template).apply(null, args); - return new TypeError(errMsg); -} + /** + * A specialized version of `_.memoize` which clears the memoized function's + * cache when it exceeds `MAX_MEMOIZE_SIZE`. + * + * @private + * @param {Function} func The function to have its output memoized. + * @returns {Function} Returns the new memoized function. + */ + function memoizeCapped(func) { + var result = memoize(func, function(key) { + if (cache.size === MAX_MEMOIZE_SIZE) { + cache.clear(); + } + return key; + }); -function bufferOrString(obj) { - return Buffer.isBuffer(obj) || typeof obj === 'string'; -} + var cache = result.cache; + return result; + } -function normalizeInput(thing) { - if (!bufferOrString(thing)) - thing = JSON.stringify(thing); - return thing; -} + /** + * Merges the function metadata of `source` into `data`. + * + * Merging metadata reduces the number of wrappers used to invoke a function. + * This is possible because methods like `_.bind`, `_.curry`, and `_.partial` + * may be applied regardless of execution order. Methods like `_.ary` and + * `_.rearg` modify function arguments, making the order in which they are + * executed important, preventing the merging of metadata. However, we make + * an exception for a safe combined case where curried functions have `_.ary` + * and or `_.rearg` applied. + * + * @private + * @param {Array} data The destination metadata. + * @param {Array} source The source metadata. + * @returns {Array} Returns `data`. + */ + function mergeData(data, source) { + var bitmask = data[1], + srcBitmask = source[1], + newBitmask = bitmask | srcBitmask, + isCommon = newBitmask < (WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG | WRAP_ARY_FLAG); -function createHmacSigner(bits) { - return function sign(thing, secret) { - checkIsSecretKey(secret); - thing = normalizeInput(thing); - var hmac = crypto.createHmac('sha' + bits, secret); - var sig = (hmac.update(thing), hmac.digest('base64')) - return fromBase64(sig); - } -} + var isCombo = + ((srcBitmask == WRAP_ARY_FLAG) && (bitmask == WRAP_CURRY_FLAG)) || + ((srcBitmask == WRAP_ARY_FLAG) && (bitmask == WRAP_REARG_FLAG) && (data[7].length <= source[8])) || + ((srcBitmask == (WRAP_ARY_FLAG | WRAP_REARG_FLAG)) && (source[7].length <= source[8]) && (bitmask == WRAP_CURRY_FLAG)); -function createHmacVerifier(bits) { - return function verify(thing, signature, secret) { - var computedSig = createHmacSigner(bits)(thing, secret); - return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); - } -} + // Exit early if metadata can't be merged. + if (!(isCommon || isCombo)) { + return data; + } + // Use source `thisArg` if available. + if (srcBitmask & WRAP_BIND_FLAG) { + data[2] = source[2]; + // Set when currying a bound function. + newBitmask |= bitmask & WRAP_BIND_FLAG ? 0 : WRAP_CURRY_BOUND_FLAG; + } + // Compose partial arguments. + var value = source[3]; + if (value) { + var partials = data[3]; + data[3] = partials ? composeArgs(partials, value, source[4]) : value; + data[4] = partials ? replaceHolders(data[3], PLACEHOLDER) : source[4]; + } + // Compose partial right arguments. + value = source[5]; + if (value) { + partials = data[5]; + data[5] = partials ? composeArgsRight(partials, value, source[6]) : value; + data[6] = partials ? replaceHolders(data[5], PLACEHOLDER) : source[6]; + } + // Use source `argPos` if available. + value = source[7]; + if (value) { + data[7] = value; + } + // Use source `ary` if it's smaller. + if (srcBitmask & WRAP_ARY_FLAG) { + data[8] = data[8] == null ? source[8] : nativeMin(data[8], source[8]); + } + // Use source `arity` if one is not provided. + if (data[9] == null) { + data[9] = source[9]; + } + // Use source `func` and merge bitmasks. + data[0] = source[0]; + data[1] = newBitmask; -function createKeySigner(bits) { - return function sign(thing, privateKey) { - checkIsPrivateKey(privateKey); - thing = normalizeInput(thing); - // Even though we are specifying "RSA" here, this works with ECDSA - // keys as well. - var signer = crypto.createSign('RSA-SHA' + bits); - var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); - return fromBase64(sig); - } -} + return data; + } -function createKeyVerifier(bits) { - return function verify(thing, signature, publicKey) { - checkIsPublicKey(publicKey); - thing = normalizeInput(thing); - signature = toBase64(signature); - var verifier = crypto.createVerify('RSA-SHA' + bits); - verifier.update(thing); - return verifier.verify(publicKey, signature, 'base64'); - } -} + /** + * This function is like + * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * except that it includes inherited enumerable properties. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function nativeKeysIn(object) { + var result = []; + if (object != null) { + for (var key in Object(object)) { + result.push(key); + } + } + return result; + } -function createPSSKeySigner(bits) { - return function sign(thing, privateKey) { - checkIsPrivateKey(privateKey); - thing = normalizeInput(thing); - var signer = crypto.createSign('RSA-SHA' + bits); - var sig = (signer.update(thing), signer.sign({ - key: privateKey, - padding: crypto.constants.RSA_PKCS1_PSS_PADDING, - saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST - }, 'base64')); - return fromBase64(sig); - } -} + /** + * Converts `value` to a string using `Object.prototype.toString`. + * + * @private + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + */ + function objectToString(value) { + return nativeObjectToString.call(value); + } -function createPSSKeyVerifier(bits) { - return function verify(thing, signature, publicKey) { - checkIsPublicKey(publicKey); - thing = normalizeInput(thing); - signature = toBase64(signature); - var verifier = crypto.createVerify('RSA-SHA' + bits); - verifier.update(thing); - return verifier.verify({ - key: publicKey, - padding: crypto.constants.RSA_PKCS1_PSS_PADDING, - saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST - }, signature, 'base64'); - } -} + /** + * A specialized version of `baseRest` which transforms the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @param {Function} transform The rest array transform. + * @returns {Function} Returns the new function. + */ + function overRest(func, start, transform) { + start = nativeMax(start === undefined ? (func.length - 1) : start, 0); + return function() { + var args = arguments, + index = -1, + length = nativeMax(args.length - start, 0), + array = Array(length); -function createECDSASigner(bits) { - var inner = createKeySigner(bits); - return function sign() { - var signature = inner.apply(null, arguments); - signature = formatEcdsa.derToJose(signature, 'ES' + bits); - return signature; - }; -} + while (++index < length) { + array[index] = args[start + index]; + } + index = -1; + var otherArgs = Array(start + 1); + while (++index < start) { + otherArgs[index] = args[index]; + } + otherArgs[start] = transform(array); + return apply(func, this, otherArgs); + }; + } -function createECDSAVerifer(bits) { - var inner = createKeyVerifier(bits); - return function verify(thing, signature, publicKey) { - signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); - var result = inner(thing, signature, publicKey); - return result; - }; -} + /** + * Gets the parent value at `path` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} path The path to get the parent value of. + * @returns {*} Returns the parent value. + */ + function parent(object, path) { + return path.length < 2 ? object : baseGet(object, baseSlice(path, 0, -1)); + } -function createNoneSigner() { - return function sign() { - return ''; - } -} + /** + * Reorder `array` according to the specified indexes where the element at + * the first index is assigned as the first element, the element at + * the second index is assigned as the second element, and so on. + * + * @private + * @param {Array} array The array to reorder. + * @param {Array} indexes The arranged array indexes. + * @returns {Array} Returns `array`. + */ + function reorder(array, indexes) { + var arrLength = array.length, + length = nativeMin(indexes.length, arrLength), + oldArray = copyArray(array); -function createNoneVerifier() { - return function verify(thing, signature) { - return signature === ''; - } -} + while (length--) { + var index = indexes[length]; + array[length] = isIndex(index, arrLength) ? oldArray[index] : undefined; + } + return array; + } -module.exports = function jwa(algorithm) { - var signerFactories = { - hs: createHmacSigner, - rs: createKeySigner, - ps: createPSSKeySigner, - es: createECDSASigner, - none: createNoneSigner, - } - var verifierFactories = { - hs: createHmacVerifier, - rs: createKeyVerifier, - ps: createPSSKeyVerifier, - es: createECDSAVerifer, - none: createNoneVerifier, - } - var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/i); - if (!match) - throw typeError(MSG_INVALID_ALGORITHM, algorithm); - var algo = (match[1] || match[3]).toLowerCase(); - var bits = match[2]; + /** + * Gets the value at `key`, unless `key` is "__proto__" or "constructor". + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ + function safeGet(object, key) { + if (key === 'constructor' && typeof object[key] === 'function') { + return; + } - return { - sign: signerFactories[algo](bits), - verify: verifierFactories[algo](bits), - } -}; + if (key == '__proto__') { + return; + } + return object[key]; + } -/***/ }), + /** + * Sets metadata for `func`. + * + * **Note:** If this function becomes hot, i.e. is invoked a lot in a short + * period of time, it will trip its breaker and transition to an identity + * function to avoid garbage collection pauses in V8. See + * [V8 issue 2070](https://bugs.chromium.org/p/v8/issues/detail?id=2070) + * for more details. + * + * @private + * @param {Function} func The function to associate metadata with. + * @param {*} data The metadata. + * @returns {Function} Returns `func`. + */ + var setData = shortOut(baseSetData); -/***/ 22597: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + /** + * A simple wrapper around the global [`setTimeout`](https://mdn.io/setTimeout). + * + * @private + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @returns {number|Object} Returns the timer id or timeout object. + */ + var setTimeout = ctxSetTimeout || function(func, wait) { + return root.setTimeout(func, wait); + }; -/*global exports*/ -var SignStream = __nccwpck_require__(35070); -var VerifyStream = __nccwpck_require__(63974); + /** + * Sets the `toString` method of `func` to return `string`. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ + var setToString = shortOut(baseSetToString); -var ALGORITHMS = [ - 'HS256', 'HS384', 'HS512', - 'RS256', 'RS384', 'RS512', - 'PS256', 'PS384', 'PS512', - 'ES256', 'ES384', 'ES512' -]; + /** + * Sets the `toString` method of `wrapper` to mimic the source of `reference` + * with wrapper details in a comment at the top of the source body. + * + * @private + * @param {Function} wrapper The function to modify. + * @param {Function} reference The reference function. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @returns {Function} Returns `wrapper`. + */ + function setWrapToString(wrapper, reference, bitmask) { + var source = (reference + ''); + return setToString(wrapper, insertWrapDetails(source, updateWrapDetails(getWrapDetails(source), bitmask))); + } -exports.ALGORITHMS = ALGORITHMS; -exports.sign = SignStream.sign; -exports.verify = VerifyStream.verify; -exports.decode = VerifyStream.decode; -exports.isValid = VerifyStream.isValid; -exports.createSign = function createSign(opts) { - return new SignStream(opts); -}; -exports.createVerify = function createVerify(opts) { - return new VerifyStream(opts); -}; + /** + * Creates a function that'll short out and invoke `identity` instead + * of `func` when it's called `HOT_COUNT` or more times in `HOT_SPAN` + * milliseconds. + * + * @private + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new shortable function. + */ + function shortOut(func) { + var count = 0, + lastCalled = 0; + return function() { + var stamp = nativeNow(), + remaining = HOT_SPAN - (stamp - lastCalled); -/***/ }), + lastCalled = stamp; + if (remaining > 0) { + if (++count >= HOT_COUNT) { + return arguments[0]; + } + } else { + count = 0; + } + return func.apply(undefined, arguments); + }; + } -/***/ 60704: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * A specialized version of `_.shuffle` which mutates and sets the size of `array`. + * + * @private + * @param {Array} array The array to shuffle. + * @param {number} [size=array.length] The size of `array`. + * @returns {Array} Returns `array`. + */ + function shuffleSelf(array, size) { + var index = -1, + length = array.length, + lastIndex = length - 1; -/*global module, process*/ -var Buffer = (__nccwpck_require__(21867).Buffer); -var Stream = __nccwpck_require__(12781); -var util = __nccwpck_require__(73837); + size = size === undefined ? length : size; + while (++index < size) { + var rand = baseRandom(index, lastIndex), + value = array[rand]; -function DataStream(data) { - this.buffer = null; - this.writable = true; - this.readable = true; + array[rand] = array[index]; + array[index] = value; + } + array.length = size; + return array; + } - // No input - if (!data) { - this.buffer = Buffer.alloc(0); - return this; - } + /** + * Converts `string` to a property path array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the property path array. + */ + var stringToPath = memoizeCapped(function(string) { + var result = []; + if (string.charCodeAt(0) === 46 /* . */) { + result.push(''); + } + string.replace(rePropName, function(match, number, quote, subString) { + result.push(quote ? subString.replace(reEscapeChar, '$1') : (number || match)); + }); + return result; + }); - // Stream - if (typeof data.pipe === 'function') { - this.buffer = Buffer.alloc(0); - data.pipe(this); - return this; - } + /** + * Converts `value` to a string key if it's not a string or symbol. + * + * @private + * @param {*} value The value to inspect. + * @returns {string|symbol} Returns the key. + */ + function toKey(value) { + if (typeof value == 'string' || isSymbol(value)) { + return value; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; + } - // Buffer or String - // or Object (assumedly a passworded key) - if (data.length || typeof data === 'object') { - this.buffer = data; - this.writable = false; - process.nextTick(function () { - this.emit('end', data); - this.readable = false; - this.emit('close'); - }.bind(this)); - return this; - } + /** + * Converts `func` to its source code. + * + * @private + * @param {Function} func The function to convert. + * @returns {string} Returns the source code. + */ + function toSource(func) { + if (func != null) { + try { + return funcToString.call(func); + } catch (e) {} + try { + return (func + ''); + } catch (e) {} + } + return ''; + } - throw new TypeError('Unexpected data type ('+ typeof data + ')'); -} -util.inherits(DataStream, Stream); + /** + * Updates wrapper `details` based on `bitmask` flags. + * + * @private + * @returns {Array} details The details to modify. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @returns {Array} Returns `details`. + */ + function updateWrapDetails(details, bitmask) { + arrayEach(wrapFlags, function(pair) { + var value = '_.' + pair[0]; + if ((bitmask & pair[1]) && !arrayIncludes(details, value)) { + details.push(value); + } + }); + return details.sort(); + } -DataStream.prototype.write = function write(data) { - this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); - this.emit('data', data); -}; + /** + * Creates a clone of `wrapper`. + * + * @private + * @param {Object} wrapper The wrapper to clone. + * @returns {Object} Returns the cloned wrapper. + */ + function wrapperClone(wrapper) { + if (wrapper instanceof LazyWrapper) { + return wrapper.clone(); + } + var result = new LodashWrapper(wrapper.__wrapped__, wrapper.__chain__); + result.__actions__ = copyArray(wrapper.__actions__); + result.__index__ = wrapper.__index__; + result.__values__ = wrapper.__values__; + return result; + } -DataStream.prototype.end = function end(data) { - if (data) - this.write(data); - this.emit('end', data); - this.emit('close'); - this.writable = false; - this.readable = false; -}; + /*------------------------------------------------------------------------*/ -module.exports = DataStream; + /** + * Creates an array of elements split into groups the length of `size`. + * If `array` can't be split evenly, the final chunk will be the remaining + * elements. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to process. + * @param {number} [size=1] The length of each chunk + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the new array of chunks. + * @example + * + * _.chunk(['a', 'b', 'c', 'd'], 2); + * // => [['a', 'b'], ['c', 'd']] + * + * _.chunk(['a', 'b', 'c', 'd'], 3); + * // => [['a', 'b', 'c'], ['d']] + */ + function chunk(array, size, guard) { + if ((guard ? isIterateeCall(array, size, guard) : size === undefined)) { + size = 1; + } else { + size = nativeMax(toInteger(size), 0); + } + var length = array == null ? 0 : array.length; + if (!length || size < 1) { + return []; + } + var index = 0, + resIndex = 0, + result = Array(nativeCeil(length / size)); + while (index < length) { + result[resIndex++] = baseSlice(array, index, (index += size)); + } + return result; + } -/***/ }), + /** + * Creates an array with all falsey values removed. The values `false`, `null`, + * `0`, `""`, `undefined`, and `NaN` are falsey. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to compact. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.compact([0, 1, false, 2, '', 3]); + * // => [1, 2, 3] + */ + function compact(array) { + var index = -1, + length = array == null ? 0 : array.length, + resIndex = 0, + result = []; -/***/ 35070: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + while (++index < length) { + var value = array[index]; + if (value) { + result[resIndex++] = value; + } + } + return result; + } -/*global module*/ -var Buffer = (__nccwpck_require__(21867).Buffer); -var DataStream = __nccwpck_require__(60704); -var jwa = __nccwpck_require__(92321); -var Stream = __nccwpck_require__(12781); -var toString = __nccwpck_require__(56206); -var util = __nccwpck_require__(73837); + /** + * Creates a new array concatenating `array` with any additional arrays + * and/or values. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to concatenate. + * @param {...*} [values] The values to concatenate. + * @returns {Array} Returns the new concatenated array. + * @example + * + * var array = [1]; + * var other = _.concat(array, 2, [3], [[4]]); + * + * console.log(other); + * // => [1, 2, 3, [4]] + * + * console.log(array); + * // => [1] + */ + function concat() { + var length = arguments.length; + if (!length) { + return []; + } + var args = Array(length - 1), + array = arguments[0], + index = length; -function base64url(string, encoding) { - return Buffer - .from(string, encoding) - .toString('base64') - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); -} + while (index--) { + args[index - 1] = arguments[index]; + } + return arrayPush(isArray(array) ? copyArray(array) : [array], baseFlatten(args, 1)); + } -function jwsSecuredInput(header, payload, encoding) { - encoding = encoding || 'utf8'; - var encodedHeader = base64url(toString(header), 'binary'); - var encodedPayload = base64url(toString(payload), encoding); - return util.format('%s.%s', encodedHeader, encodedPayload); -} + /** + * Creates an array of `array` values not included in the other given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * **Note:** Unlike `_.pullAll`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @returns {Array} Returns the new array of filtered values. + * @see _.without, _.xor + * @example + * + * _.difference([2, 1], [2, 3]); + * // => [1] + */ + var difference = baseRest(function(array, values) { + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true)) + : []; + }); -function jwsSign(opts) { - var header = opts.header; - var payload = opts.payload; - var secretOrKey = opts.secret || opts.privateKey; - var encoding = opts.encoding; - var algo = jwa(header.alg); - var securedInput = jwsSecuredInput(header, payload, encoding); - var signature = algo.sign(securedInput, secretOrKey); - return util.format('%s.%s', securedInput, signature); -} + /** + * This method is like `_.difference` except that it accepts `iteratee` which + * is invoked for each element of `array` and `values` to generate the criterion + * by which they're compared. The order and references of result values are + * determined by the first array. The iteratee is invoked with one argument: + * (value). + * + * **Note:** Unlike `_.pullAllBy`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.differenceBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [1.2] + * + * // The `_.property` iteratee shorthand. + * _.differenceBy([{ 'x': 2 }, { 'x': 1 }], [{ 'x': 1 }], 'x'); + * // => [{ 'x': 2 }] + */ + var differenceBy = baseRest(function(array, values) { + var iteratee = last(values); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true), getIteratee(iteratee, 2)) + : []; + }); -function SignStream(opts) { - var secret = opts.secret||opts.privateKey||opts.key; - var secretStream = new DataStream(secret); - this.readable = true; - this.header = opts.header; - this.encoding = opts.encoding; - this.secret = this.privateKey = this.key = secretStream; - this.payload = new DataStream(opts.payload); - this.secret.once('close', function () { - if (!this.payload.writable && this.readable) - this.sign(); - }.bind(this)); + /** + * This method is like `_.difference` except that it accepts `comparator` + * which is invoked to compare elements of `array` to `values`. The order and + * references of result values are determined by the first array. The comparator + * is invoked with two arguments: (arrVal, othVal). + * + * **Note:** Unlike `_.pullAllWith`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * + * _.differenceWith(objects, [{ 'x': 1, 'y': 2 }], _.isEqual); + * // => [{ 'x': 2, 'y': 1 }] + */ + var differenceWith = baseRest(function(array, values) { + var comparator = last(values); + if (isArrayLikeObject(comparator)) { + comparator = undefined; + } + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true), undefined, comparator) + : []; + }); - this.payload.once('close', function () { - if (!this.secret.writable && this.readable) - this.sign(); - }.bind(this)); -} -util.inherits(SignStream, Stream); + /** + * Creates a slice of `array` with `n` elements dropped from the beginning. + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to drop. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.drop([1, 2, 3]); + * // => [2, 3] + * + * _.drop([1, 2, 3], 2); + * // => [3] + * + * _.drop([1, 2, 3], 5); + * // => [] + * + * _.drop([1, 2, 3], 0); + * // => [1, 2, 3] + */ + function drop(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + return baseSlice(array, n < 0 ? 0 : n, length); + } + + /** + * Creates a slice of `array` with `n` elements dropped from the end. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to drop. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.dropRight([1, 2, 3]); + * // => [1, 2] + * + * _.dropRight([1, 2, 3], 2); + * // => [1] + * + * _.dropRight([1, 2, 3], 5); + * // => [] + * + * _.dropRight([1, 2, 3], 0); + * // => [1, 2, 3] + */ + function dropRight(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + n = length - n; + return baseSlice(array, 0, n < 0 ? 0 : n); + } -SignStream.prototype.sign = function sign() { - try { - var signature = jwsSign({ - header: this.header, - payload: this.payload.buffer, - secret: this.secret.buffer, - encoding: this.encoding - }); - this.emit('done', signature); - this.emit('data', signature); - this.emit('end'); - this.readable = false; - return signature; - } catch (e) { - this.readable = false; - this.emit('error', e); - this.emit('close'); - } -}; + /** + * Creates a slice of `array` excluding elements dropped from the end. + * Elements are dropped until `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.dropRightWhile(users, function(o) { return !o.active; }); + * // => objects for ['barney'] + * + * // The `_.matches` iteratee shorthand. + * _.dropRightWhile(users, { 'user': 'pebbles', 'active': false }); + * // => objects for ['barney', 'fred'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.dropRightWhile(users, ['active', false]); + * // => objects for ['barney'] + * + * // The `_.property` iteratee shorthand. + * _.dropRightWhile(users, 'active'); + * // => objects for ['barney', 'fred', 'pebbles'] + */ + function dropRightWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), true, true) + : []; + } -SignStream.sign = jwsSign; + /** + * Creates a slice of `array` excluding elements dropped from the beginning. + * Elements are dropped until `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.dropWhile(users, function(o) { return !o.active; }); + * // => objects for ['pebbles'] + * + * // The `_.matches` iteratee shorthand. + * _.dropWhile(users, { 'user': 'barney', 'active': false }); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.dropWhile(users, ['active', false]); + * // => objects for ['pebbles'] + * + * // The `_.property` iteratee shorthand. + * _.dropWhile(users, 'active'); + * // => objects for ['barney', 'fred', 'pebbles'] + */ + function dropWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), true) + : []; + } -module.exports = SignStream; + /** + * Fills elements of `array` with `value` from `start` up to, but not + * including, `end`. + * + * **Note:** This method mutates `array`. + * + * @static + * @memberOf _ + * @since 3.2.0 + * @category Array + * @param {Array} array The array to fill. + * @param {*} value The value to fill `array` with. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns `array`. + * @example + * + * var array = [1, 2, 3]; + * + * _.fill(array, 'a'); + * console.log(array); + * // => ['a', 'a', 'a'] + * + * _.fill(Array(3), 2); + * // => [2, 2, 2] + * + * _.fill([4, 6, 8, 10], '*', 1, 3); + * // => [4, '*', '*', 10] + */ + function fill(array, value, start, end) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + if (start && typeof start != 'number' && isIterateeCall(array, value, start)) { + start = 0; + end = length; + } + return baseFill(array, value, start, end); + } + /** + * This method is like `_.find` except that it returns the index of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.findIndex(users, function(o) { return o.user == 'barney'; }); + * // => 0 + * + * // The `_.matches` iteratee shorthand. + * _.findIndex(users, { 'user': 'fred', 'active': false }); + * // => 1 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findIndex(users, ['active', false]); + * // => 0 + * + * // The `_.property` iteratee shorthand. + * _.findIndex(users, 'active'); + * // => 2 + */ + function findIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseFindIndex(array, getIteratee(predicate, 3), index); + } -/***/ }), + /** + * This method is like `_.findIndex` except that it iterates over elements + * of `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.findLastIndex(users, function(o) { return o.user == 'pebbles'; }); + * // => 2 + * + * // The `_.matches` iteratee shorthand. + * _.findLastIndex(users, { 'user': 'barney', 'active': true }); + * // => 0 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findLastIndex(users, ['active', false]); + * // => 2 + * + * // The `_.property` iteratee shorthand. + * _.findLastIndex(users, 'active'); + * // => 0 + */ + function findLastIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = length - 1; + if (fromIndex !== undefined) { + index = toInteger(fromIndex); + index = fromIndex < 0 + ? nativeMax(length + index, 0) + : nativeMin(index, length - 1); + } + return baseFindIndex(array, getIteratee(predicate, 3), index, true); + } -/***/ 56206: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Flattens `array` a single level deep. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flatten([1, [2, [3, [4]], 5]]); + * // => [1, 2, [3, [4]], 5] + */ + function flatten(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, 1) : []; + } -/*global module*/ -var Buffer = (__nccwpck_require__(14300).Buffer); + /** + * Recursively flattens `array`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flattenDeep([1, [2, [3, [4]], 5]]); + * // => [1, 2, 3, 4, 5] + */ + function flattenDeep(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, INFINITY) : []; + } -module.exports = function toString(obj) { - if (typeof obj === 'string') - return obj; - if (typeof obj === 'number' || Buffer.isBuffer(obj)) - return obj.toString(); - return JSON.stringify(obj); -}; + /** + * Recursively flatten `array` up to `depth` times. + * + * @static + * @memberOf _ + * @since 4.4.0 + * @category Array + * @param {Array} array The array to flatten. + * @param {number} [depth=1] The maximum recursion depth. + * @returns {Array} Returns the new flattened array. + * @example + * + * var array = [1, [2, [3, [4]], 5]]; + * + * _.flattenDepth(array, 1); + * // => [1, 2, [3, [4]], 5] + * + * _.flattenDepth(array, 2); + * // => [1, 2, 3, [4], 5] + */ + function flattenDepth(array, depth) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + depth = depth === undefined ? 1 : toInteger(depth); + return baseFlatten(array, depth); + } + /** + * The inverse of `_.toPairs`; this method returns an object composed + * from key-value `pairs`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} pairs The key-value pairs. + * @returns {Object} Returns the new object. + * @example + * + * _.fromPairs([['a', 1], ['b', 2]]); + * // => { 'a': 1, 'b': 2 } + */ + function fromPairs(pairs) { + var index = -1, + length = pairs == null ? 0 : pairs.length, + result = {}; -/***/ }), + while (++index < length) { + var pair = pairs[index]; + result[pair[0]] = pair[1]; + } + return result; + } -/***/ 63974: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Gets the first element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias first + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the first element of `array`. + * @example + * + * _.head([1, 2, 3]); + * // => 1 + * + * _.head([]); + * // => undefined + */ + function head(array) { + return (array && array.length) ? array[0] : undefined; + } -/*global module*/ -var Buffer = (__nccwpck_require__(21867).Buffer); -var DataStream = __nccwpck_require__(60704); -var jwa = __nccwpck_require__(92321); -var Stream = __nccwpck_require__(12781); -var toString = __nccwpck_require__(56206); -var util = __nccwpck_require__(73837); -var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + /** + * Gets the index at which the first occurrence of `value` is found in `array` + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. If `fromIndex` is negative, it's used as the + * offset from the end of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.indexOf([1, 2, 1, 2], 2); + * // => 1 + * + * // Search from the `fromIndex`. + * _.indexOf([1, 2, 1, 2], 2, 2); + * // => 3 + */ + function indexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseIndexOf(array, value, index); + } -function isObject(thing) { - return Object.prototype.toString.call(thing) === '[object Object]'; -} + /** + * Gets all but the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.initial([1, 2, 3]); + * // => [1, 2] + */ + function initial(array) { + var length = array == null ? 0 : array.length; + return length ? baseSlice(array, 0, -1) : []; + } -function safeJsonParse(thing) { - if (isObject(thing)) - return thing; - try { return JSON.parse(thing); } - catch (e) { return undefined; } -} + /** + * Creates an array of unique values that are included in all given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * _.intersection([2, 1], [2, 3]); + * // => [2] + */ + var intersection = baseRest(function(arrays) { + var mapped = arrayMap(arrays, castArrayLikeObject); + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped) + : []; + }); -function headerFromJWS(jwsSig) { - var encodedHeader = jwsSig.split('.', 1)[0]; - return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); -} + /** + * This method is like `_.intersection` except that it accepts `iteratee` + * which is invoked for each element of each `arrays` to generate the criterion + * by which they're compared. The order and references of result values are + * determined by the first array. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * _.intersectionBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [2.1] + * + * // The `_.property` iteratee shorthand. + * _.intersectionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }] + */ + var intersectionBy = baseRest(function(arrays) { + var iteratee = last(arrays), + mapped = arrayMap(arrays, castArrayLikeObject); -function securedInputFromJWS(jwsSig) { - return jwsSig.split('.', 2).join('.'); -} + if (iteratee === last(mapped)) { + iteratee = undefined; + } else { + mapped.pop(); + } + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped, getIteratee(iteratee, 2)) + : []; + }); -function signatureFromJWS(jwsSig) { - return jwsSig.split('.')[2]; -} + /** + * This method is like `_.intersection` except that it accepts `comparator` + * which is invoked to compare elements of `arrays`. The order and references + * of result values are determined by the first array. The comparator is + * invoked with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.intersectionWith(objects, others, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }] + */ + var intersectionWith = baseRest(function(arrays) { + var comparator = last(arrays), + mapped = arrayMap(arrays, castArrayLikeObject); -function payloadFromJWS(jwsSig, encoding) { - encoding = encoding || 'utf8'; - var payload = jwsSig.split('.')[1]; - return Buffer.from(payload, 'base64').toString(encoding); -} + comparator = typeof comparator == 'function' ? comparator : undefined; + if (comparator) { + mapped.pop(); + } + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped, undefined, comparator) + : []; + }); -function isValidJws(string) { - return JWS_REGEX.test(string) && !!headerFromJWS(string); -} + /** + * Converts all elements in `array` into a string separated by `separator`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to convert. + * @param {string} [separator=','] The element separator. + * @returns {string} Returns the joined string. + * @example + * + * _.join(['a', 'b', 'c'], '~'); + * // => 'a~b~c' + */ + function join(array, separator) { + return array == null ? '' : nativeJoin.call(array, separator); + } -function jwsVerify(jwsSig, algorithm, secretOrKey) { - if (!algorithm) { - var err = new Error("Missing algorithm parameter for jws.verify"); - err.code = "MISSING_ALGORITHM"; - throw err; - } - jwsSig = toString(jwsSig); - var signature = signatureFromJWS(jwsSig); - var securedInput = securedInputFromJWS(jwsSig); - var algo = jwa(algorithm); - return algo.verify(securedInput, signature, secretOrKey); -} + /** + * Gets the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the last element of `array`. + * @example + * + * _.last([1, 2, 3]); + * // => 3 + */ + function last(array) { + var length = array == null ? 0 : array.length; + return length ? array[length - 1] : undefined; + } -function jwsDecode(jwsSig, opts) { - opts = opts || {}; - jwsSig = toString(jwsSig); + /** + * This method is like `_.indexOf` except that it iterates over elements of + * `array` from right to left. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.lastIndexOf([1, 2, 1, 2], 2); + * // => 3 + * + * // Search from the `fromIndex`. + * _.lastIndexOf([1, 2, 1, 2], 2, 2); + * // => 1 + */ + function lastIndexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = length; + if (fromIndex !== undefined) { + index = toInteger(fromIndex); + index = index < 0 ? nativeMax(length + index, 0) : nativeMin(index, length - 1); + } + return value === value + ? strictLastIndexOf(array, value, index) + : baseFindIndex(array, baseIsNaN, index, true); + } - if (!isValidJws(jwsSig)) - return null; + /** + * Gets the element at index `n` of `array`. If `n` is negative, the nth + * element from the end is returned. + * + * @static + * @memberOf _ + * @since 4.11.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=0] The index of the element to return. + * @returns {*} Returns the nth element of `array`. + * @example + * + * var array = ['a', 'b', 'c', 'd']; + * + * _.nth(array, 1); + * // => 'b' + * + * _.nth(array, -2); + * // => 'c'; + */ + function nth(array, n) { + return (array && array.length) ? baseNth(array, toInteger(n)) : undefined; + } - var header = headerFromJWS(jwsSig); + /** + * Removes all given values from `array` using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * **Note:** Unlike `_.without`, this method mutates `array`. Use `_.remove` + * to remove elements from an array by predicate. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {...*} [values] The values to remove. + * @returns {Array} Returns `array`. + * @example + * + * var array = ['a', 'b', 'c', 'a', 'b', 'c']; + * + * _.pull(array, 'a', 'c'); + * console.log(array); + * // => ['b', 'b'] + */ + var pull = baseRest(pullAll); - if (!header) - return null; + /** + * This method is like `_.pull` except that it accepts an array of values to remove. + * + * **Note:** Unlike `_.difference`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @returns {Array} Returns `array`. + * @example + * + * var array = ['a', 'b', 'c', 'a', 'b', 'c']; + * + * _.pullAll(array, ['a', 'c']); + * console.log(array); + * // => ['b', 'b'] + */ + function pullAll(array, values) { + return (array && array.length && values && values.length) + ? basePullAll(array, values) + : array; + } - var payload = payloadFromJWS(jwsSig); - if (header.typ === 'JWT' || opts.json) - payload = JSON.parse(payload, opts.encoding); + /** + * This method is like `_.pullAll` except that it accepts `iteratee` which is + * invoked for each element of `array` and `values` to generate the criterion + * by which they're compared. The iteratee is invoked with one argument: (value). + * + * **Note:** Unlike `_.differenceBy`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns `array`. + * @example + * + * var array = [{ 'x': 1 }, { 'x': 2 }, { 'x': 3 }, { 'x': 1 }]; + * + * _.pullAllBy(array, [{ 'x': 1 }, { 'x': 3 }], 'x'); + * console.log(array); + * // => [{ 'x': 2 }] + */ + function pullAllBy(array, values, iteratee) { + return (array && array.length && values && values.length) + ? basePullAll(array, values, getIteratee(iteratee, 2)) + : array; + } - return { - header: header, - payload: payload, - signature: signatureFromJWS(jwsSig) - }; -} + /** + * This method is like `_.pullAll` except that it accepts `comparator` which + * is invoked to compare elements of `array` to `values`. The comparator is + * invoked with two arguments: (arrVal, othVal). + * + * **Note:** Unlike `_.differenceWith`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns `array`. + * @example + * + * var array = [{ 'x': 1, 'y': 2 }, { 'x': 3, 'y': 4 }, { 'x': 5, 'y': 6 }]; + * + * _.pullAllWith(array, [{ 'x': 3, 'y': 4 }], _.isEqual); + * console.log(array); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 5, 'y': 6 }] + */ + function pullAllWith(array, values, comparator) { + return (array && array.length && values && values.length) + ? basePullAll(array, values, undefined, comparator) + : array; + } -function VerifyStream(opts) { - opts = opts || {}; - var secretOrKey = opts.secret||opts.publicKey||opts.key; - var secretStream = new DataStream(secretOrKey); - this.readable = true; - this.algorithm = opts.algorithm; - this.encoding = opts.encoding; - this.secret = this.publicKey = this.key = secretStream; - this.signature = new DataStream(opts.signature); - this.secret.once('close', function () { - if (!this.signature.writable && this.readable) - this.verify(); - }.bind(this)); + /** + * Removes elements from `array` corresponding to `indexes` and returns an + * array of removed elements. + * + * **Note:** Unlike `_.at`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {...(number|number[])} [indexes] The indexes of elements to remove. + * @returns {Array} Returns the new array of removed elements. + * @example + * + * var array = ['a', 'b', 'c', 'd']; + * var pulled = _.pullAt(array, [1, 3]); + * + * console.log(array); + * // => ['a', 'c'] + * + * console.log(pulled); + * // => ['b', 'd'] + */ + var pullAt = flatRest(function(array, indexes) { + var length = array == null ? 0 : array.length, + result = baseAt(array, indexes); - this.signature.once('close', function () { - if (!this.secret.writable && this.readable) - this.verify(); - }.bind(this)); -} -util.inherits(VerifyStream, Stream); -VerifyStream.prototype.verify = function verify() { - try { - var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); - var obj = jwsDecode(this.signature.buffer, this.encoding); - this.emit('done', valid, obj); - this.emit('data', valid); - this.emit('end'); - this.readable = false; - return valid; - } catch (e) { - this.readable = false; - this.emit('error', e); - this.emit('close'); - } -}; + basePullAt(array, arrayMap(indexes, function(index) { + return isIndex(index, length) ? +index : index; + }).sort(compareAscending)); -VerifyStream.decode = jwsDecode; -VerifyStream.isValid = isValidJws; -VerifyStream.verify = jwsVerify; + return result; + }); -module.exports = VerifyStream; + /** + * Removes all elements from `array` that `predicate` returns truthy for + * and returns an array of the removed elements. The predicate is invoked + * with three arguments: (value, index, array). + * + * **Note:** Unlike `_.filter`, this method mutates `array`. Use `_.pull` + * to pull elements from an array by value. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new array of removed elements. + * @example + * + * var array = [1, 2, 3, 4]; + * var evens = _.remove(array, function(n) { + * return n % 2 == 0; + * }); + * + * console.log(array); + * // => [1, 3] + * + * console.log(evens); + * // => [2, 4] + */ + function remove(array, predicate) { + var result = []; + if (!(array && array.length)) { + return result; + } + var index = -1, + indexes = [], + length = array.length; + predicate = getIteratee(predicate, 3); + while (++index < length) { + var value = array[index]; + if (predicate(value, index, array)) { + result.push(value); + indexes.push(index); + } + } + basePullAt(array, indexes); + return result; + } -/***/ }), + /** + * Reverses `array` so that the first element becomes the last, the second + * element becomes the second to last, and so on. + * + * **Note:** This method mutates `array` and is based on + * [`Array#reverse`](https://mdn.io/Array/reverse). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @returns {Array} Returns `array`. + * @example + * + * var array = [1, 2, 3]; + * + * _.reverse(array); + * // => [3, 2, 1] + * + * console.log(array); + * // => [3, 2, 1] + */ + function reverse(array) { + return array == null ? array : nativeReverse.call(array); + } -/***/ 24644: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Creates a slice of `array` from `start` up to, but not including, `end`. + * + * **Note:** This method is used instead of + * [`Array#slice`](https://mdn.io/Array/slice) to ensure dense arrays are + * returned. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ + function slice(array, start, end) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + if (end && typeof end != 'number' && isIterateeCall(array, start, end)) { + start = 0; + end = length; + } + else { + start = start == null ? 0 : toInteger(start); + end = end === undefined ? length : toInteger(end); + } + return baseSlice(array, start, end); + } -const ANY = Symbol('SemVer ANY') -// hoisted class for cyclic dependency -class Comparator { - static get ANY () { - return ANY - } + /** + * Uses a binary search to determine the lowest index at which `value` + * should be inserted into `array` in order to maintain its sort order. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * _.sortedIndex([30, 50], 40); + * // => 1 + */ + function sortedIndex(array, value) { + return baseSortedIndex(array, value); + } - constructor (comp, options) { - options = parseOptions(options) + /** + * This method is like `_.sortedIndex` except that it accepts `iteratee` + * which is invoked for `value` and each element of `array` to compute their + * sort ranking. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * var objects = [{ 'x': 4 }, { 'x': 5 }]; + * + * _.sortedIndexBy(objects, { 'x': 4 }, function(o) { return o.x; }); + * // => 0 + * + * // The `_.property` iteratee shorthand. + * _.sortedIndexBy(objects, { 'x': 4 }, 'x'); + * // => 0 + */ + function sortedIndexBy(array, value, iteratee) { + return baseSortedIndexBy(array, value, getIteratee(iteratee, 2)); + } - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value + /** + * This method is like `_.indexOf` except that it performs a binary + * search on a sorted `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.sortedIndexOf([4, 5, 5, 5, 6], 5); + * // => 1 + */ + function sortedIndexOf(array, value) { + var length = array == null ? 0 : array.length; + if (length) { + var index = baseSortedIndex(array, value); + if (index < length && eq(array[index], value)) { + return index; + } } + return -1; } - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) - - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version + /** + * This method is like `_.sortedIndex` except that it returns the highest + * index at which `value` should be inserted into `array` in order to + * maintain its sort order. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * _.sortedLastIndex([4, 5, 5, 5, 6], 5); + * // => 4 + */ + function sortedLastIndex(array, value) { + return baseSortedIndex(array, value, true); } - debug('comp', this) - } - - parse (comp) { - const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] - const m = comp.match(r) - - if (!m) { - throw new TypeError(`Invalid comparator: ${comp}`) + /** + * This method is like `_.sortedLastIndex` except that it accepts `iteratee` + * which is invoked for `value` and each element of `array` to compute their + * sort ranking. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * var objects = [{ 'x': 4 }, { 'x': 5 }]; + * + * _.sortedLastIndexBy(objects, { 'x': 4 }, function(o) { return o.x; }); + * // => 1 + * + * // The `_.property` iteratee shorthand. + * _.sortedLastIndexBy(objects, { 'x': 4 }, 'x'); + * // => 1 + */ + function sortedLastIndexBy(array, value, iteratee) { + return baseSortedIndexBy(array, value, getIteratee(iteratee, 2), true); } - this.operator = m[1] !== undefined ? m[1] : '' - if (this.operator === '=') { - this.operator = '' + /** + * This method is like `_.lastIndexOf` except that it performs a binary + * search on a sorted `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.sortedLastIndexOf([4, 5, 5, 5, 6], 5); + * // => 3 + */ + function sortedLastIndexOf(array, value) { + var length = array == null ? 0 : array.length; + if (length) { + var index = baseSortedIndex(array, value, true) - 1; + if (eq(array[index], value)) { + return index; + } + } + return -1; } - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) + /** + * This method is like `_.uniq` except that it's designed and optimized + * for sorted arrays. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.sortedUniq([1, 1, 2]); + * // => [1, 2] + */ + function sortedUniq(array) { + return (array && array.length) + ? baseSortedUniq(array) + : []; } - } - - toString () { - return this.value - } - test (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY || version === ANY) { - return true + /** + * This method is like `_.uniqBy` except that it's designed and optimized + * for sorted arrays. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.sortedUniqBy([1.1, 1.2, 2.3, 2.4], Math.floor); + * // => [1.1, 2.3] + */ + function sortedUniqBy(array, iteratee) { + return (array && array.length) + ? baseSortedUniq(array, getIteratee(iteratee, 2)) + : []; } - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } + /** + * Gets all but the first element of `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to query. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.tail([1, 2, 3]); + * // => [2, 3] + */ + function tail(array) { + var length = array == null ? 0 : array.length; + return length ? baseSlice(array, 1, length) : []; } - return cmp(version, this.operator, this.semver, this.options) - } - - intersects (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') + /** + * Creates a slice of `array` with `n` elements taken from the beginning. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to take. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.take([1, 2, 3]); + * // => [1] + * + * _.take([1, 2, 3], 2); + * // => [1, 2] + * + * _.take([1, 2, 3], 5); + * // => [1, 2, 3] + * + * _.take([1, 2, 3], 0); + * // => [] + */ + function take(array, n, guard) { + if (!(array && array.length)) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + return baseSlice(array, 0, n < 0 ? 0 : n); } - if (this.operator === '') { - if (this.value === '') { - return true - } - return new Range(comp.value, options).test(this.value) - } else if (comp.operator === '') { - if (comp.value === '') { - return true + /** + * Creates a slice of `array` with `n` elements taken from the end. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to take. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.takeRight([1, 2, 3]); + * // => [3] + * + * _.takeRight([1, 2, 3], 2); + * // => [2, 3] + * + * _.takeRight([1, 2, 3], 5); + * // => [1, 2, 3] + * + * _.takeRight([1, 2, 3], 0); + * // => [] + */ + function takeRight(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; } - return new Range(this.value, options).test(comp.semver) + n = (guard || n === undefined) ? 1 : toInteger(n); + n = length - n; + return baseSlice(array, n < 0 ? 0 : n, length); } - options = parseOptions(options) - - // Special cases where nothing can possibly be lower - if (options.includePrerelease && - (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { - return false - } - if (!options.includePrerelease && - (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { - return false + /** + * Creates a slice of `array` with elements taken from the end. Elements are + * taken until `predicate` returns falsey. The predicate is invoked with + * three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.takeRightWhile(users, function(o) { return !o.active; }); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.matches` iteratee shorthand. + * _.takeRightWhile(users, { 'user': 'pebbles', 'active': false }); + * // => objects for ['pebbles'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.takeRightWhile(users, ['active', false]); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.property` iteratee shorthand. + * _.takeRightWhile(users, 'active'); + * // => [] + */ + function takeRightWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), false, true) + : []; } - // Same direction increasing (> or >=) - if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { - return true - } - // Same direction decreasing (< or <=) - if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { - return true - } - // same SemVer and both sides are inclusive (<= or >=) - if ( - (this.semver.version === comp.semver.version) && - this.operator.includes('=') && comp.operator.includes('=')) { - return true - } - // opposite directions less than - if (cmp(this.semver, '<', comp.semver, options) && - this.operator.startsWith('>') && comp.operator.startsWith('<')) { - return true - } - // opposite directions greater than - if (cmp(this.semver, '>', comp.semver, options) && - this.operator.startsWith('<') && comp.operator.startsWith('>')) { - return true + /** + * Creates a slice of `array` with elements taken from the beginning. Elements + * are taken until `predicate` returns falsey. The predicate is invoked with + * three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.takeWhile(users, function(o) { return !o.active; }); + * // => objects for ['barney', 'fred'] + * + * // The `_.matches` iteratee shorthand. + * _.takeWhile(users, { 'user': 'barney', 'active': false }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.takeWhile(users, ['active', false]); + * // => objects for ['barney', 'fred'] + * + * // The `_.property` iteratee shorthand. + * _.takeWhile(users, 'active'); + * // => [] + */ + function takeWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3)) + : []; } - return false - } -} -module.exports = Comparator - -const parseOptions = __nccwpck_require__(85185) -const { re, t } = __nccwpck_require__(3682) -const cmp = __nccwpck_require__(23621) -const debug = __nccwpck_require__(22935) -const SemVer = __nccwpck_require__(93402) -const Range = __nccwpck_require__(34502) - - -/***/ }), + /** + * Creates an array of unique values, in order, from all given arrays using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of combined values. + * @example + * + * _.union([2], [1, 2]); + * // => [2, 1] + */ + var union = baseRest(function(arrays) { + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true)); + }); -/***/ 34502: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * This method is like `_.union` except that it accepts `iteratee` which is + * invoked for each element of each `arrays` to generate the criterion by + * which uniqueness is computed. Result values are chosen from the first + * array in which the value occurs. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of combined values. + * @example + * + * _.unionBy([2.1], [1.2, 2.3], Math.floor); + * // => [2.1, 1.2] + * + * // The `_.property` iteratee shorthand. + * _.unionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }, { 'x': 2 }] + */ + var unionBy = baseRest(function(arrays) { + var iteratee = last(arrays); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true), getIteratee(iteratee, 2)); + }); -// hoisted class for cyclic dependency -class Range { - constructor (range, options) { - options = parseOptions(options) + /** + * This method is like `_.union` except that it accepts `comparator` which + * is invoked to compare elements of `arrays`. Result values are chosen from + * the first array in which the value occurs. The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of combined values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.unionWith(objects, others, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }, { 'x': 1, 'y': 1 }] + */ + var unionWith = baseRest(function(arrays) { + var comparator = last(arrays); + comparator = typeof comparator == 'function' ? comparator : undefined; + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true), undefined, comparator); + }); - if (range instanceof Range) { - if ( - range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease - ) { - return range - } else { - return new Range(range.raw, options) - } + /** + * Creates a duplicate-free version of an array, using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons, in which only the first occurrence of each element + * is kept. The order of result values is determined by the order they occur + * in the array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.uniq([2, 1, 2]); + * // => [2, 1] + */ + function uniq(array) { + return (array && array.length) ? baseUniq(array) : []; } - if (range instanceof Comparator) { - // just put it in the set and return - this.raw = range.value - this.set = [[range]] - this.format() - return this + /** + * This method is like `_.uniq` except that it accepts `iteratee` which is + * invoked for each element in `array` to generate the criterion by which + * uniqueness is computed. The order of result values is determined by the + * order they occur in the array. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.uniqBy([2.1, 1.2, 2.3], Math.floor); + * // => [2.1, 1.2] + * + * // The `_.property` iteratee shorthand. + * _.uniqBy([{ 'x': 1 }, { 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }, { 'x': 2 }] + */ + function uniqBy(array, iteratee) { + return (array && array.length) ? baseUniq(array, getIteratee(iteratee, 2)) : []; } - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease - - // First, split based on boolean or || - this.raw = range - this.set = range - .split('||') - // map the range to a 2d array of comparators - .map(r => this.parseRange(r.trim())) - // throw out any comparator lists that are empty - // this generally means that it was not a valid range, which is allowed - // in loose mode, but will still throw if the WHOLE range is invalid. - .filter(c => c.length) - - if (!this.set.length) { - throw new TypeError(`Invalid SemVer Range: ${range}`) + /** + * This method is like `_.uniq` except that it accepts `comparator` which + * is invoked to compare elements of `array`. The order of result values is + * determined by the order they occur in the array.The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.uniqWith(objects, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }] + */ + function uniqWith(array, comparator) { + comparator = typeof comparator == 'function' ? comparator : undefined; + return (array && array.length) ? baseUniq(array, undefined, comparator) : []; } - // if we have any that are not the null set, throw out null sets. - if (this.set.length > 1) { - // keep the first one, in case they're all null sets - const first = this.set[0] - this.set = this.set.filter(c => !isNullSet(c[0])) - if (this.set.length === 0) { - this.set = [first] - } else if (this.set.length > 1) { - // if we have any that are *, then the range is just * - for (const c of this.set) { - if (c.length === 1 && isAny(c[0])) { - this.set = [c] - break - } - } + /** + * This method is like `_.zip` except that it accepts an array of grouped + * elements and creates an array regrouping the elements to their pre-zip + * configuration. + * + * @static + * @memberOf _ + * @since 1.2.0 + * @category Array + * @param {Array} array The array of grouped elements to process. + * @returns {Array} Returns the new array of regrouped elements. + * @example + * + * var zipped = _.zip(['a', 'b'], [1, 2], [true, false]); + * // => [['a', 1, true], ['b', 2, false]] + * + * _.unzip(zipped); + * // => [['a', 'b'], [1, 2], [true, false]] + */ + function unzip(array) { + if (!(array && array.length)) { + return []; } + var length = 0; + array = arrayFilter(array, function(group) { + if (isArrayLikeObject(group)) { + length = nativeMax(group.length, length); + return true; + } + }); + return baseTimes(length, function(index) { + return arrayMap(array, baseProperty(index)); + }); } - this.format() - } + /** + * This method is like `_.unzip` except that it accepts `iteratee` to specify + * how regrouped values should be combined. The iteratee is invoked with the + * elements of each group: (...group). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Array + * @param {Array} array The array of grouped elements to process. + * @param {Function} [iteratee=_.identity] The function to combine + * regrouped values. + * @returns {Array} Returns the new array of regrouped elements. + * @example + * + * var zipped = _.zip([1, 2], [10, 20], [100, 200]); + * // => [[1, 10, 100], [2, 20, 200]] + * + * _.unzipWith(zipped, _.add); + * // => [3, 30, 300] + */ + function unzipWith(array, iteratee) { + if (!(array && array.length)) { + return []; + } + var result = unzip(array); + if (iteratee == null) { + return result; + } + return arrayMap(result, function(group) { + return apply(iteratee, undefined, group); + }); + } - format () { - this.range = this.set - .map((comps) => { - return comps.join(' ').trim() - }) - .join('||') - .trim() - return this.range - } + /** + * Creates an array excluding all given values using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * **Note:** Unlike `_.pull`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...*} [values] The values to exclude. + * @returns {Array} Returns the new array of filtered values. + * @see _.difference, _.xor + * @example + * + * _.without([2, 1, 2, 3], 1, 2); + * // => [3] + */ + var without = baseRest(function(array, values) { + return isArrayLikeObject(array) + ? baseDifference(array, values) + : []; + }); - toString () { - return this.range - } + /** + * Creates an array of unique values that is the + * [symmetric difference](https://en.wikipedia.org/wiki/Symmetric_difference) + * of the given arrays. The order of result values is determined by the order + * they occur in the arrays. + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of filtered values. + * @see _.difference, _.without + * @example + * + * _.xor([2, 1], [2, 3]); + * // => [1, 3] + */ + var xor = baseRest(function(arrays) { + return baseXor(arrayFilter(arrays, isArrayLikeObject)); + }); - parseRange (range) { - range = range.trim() + /** + * This method is like `_.xor` except that it accepts `iteratee` which is + * invoked for each element of each `arrays` to generate the criterion by + * which by which they're compared. The order of result values is determined + * by the order they occur in the arrays. The iteratee is invoked with one + * argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.xorBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [1.2, 3.4] + * + * // The `_.property` iteratee shorthand. + * _.xorBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 2 }] + */ + var xorBy = baseRest(function(arrays) { + var iteratee = last(arrays); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return baseXor(arrayFilter(arrays, isArrayLikeObject), getIteratee(iteratee, 2)); + }); - // memoize range parsing for performance. - // this is a very hot path, and fully deterministic. - const memoOpts = - (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | - (this.options.loose && FLAG_LOOSE) - const memoKey = memoOpts + ':' + range - const cached = cache.get(memoKey) - if (cached) { - return cached - } + /** + * This method is like `_.xor` except that it accepts `comparator` which is + * invoked to compare elements of `arrays`. The order of result values is + * determined by the order they occur in the arrays. The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.xorWith(objects, others, _.isEqual); + * // => [{ 'x': 2, 'y': 1 }, { 'x': 1, 'y': 1 }] + */ + var xorWith = baseRest(function(arrays) { + var comparator = last(arrays); + comparator = typeof comparator == 'function' ? comparator : undefined; + return baseXor(arrayFilter(arrays, isArrayLikeObject), undefined, comparator); + }); - const loose = this.options.loose - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] - range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range) + /** + * Creates an array of grouped elements, the first of which contains the + * first elements of the given arrays, the second of which contains the + * second elements of the given arrays, and so on. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to process. + * @returns {Array} Returns the new array of grouped elements. + * @example + * + * _.zip(['a', 'b'], [1, 2], [true, false]); + * // => [['a', 1, true], ['b', 2, false]] + */ + var zip = baseRest(unzip); - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + /** + * This method is like `_.fromPairs` except that it accepts two arrays, + * one of property identifiers and one of corresponding values. + * + * @static + * @memberOf _ + * @since 0.4.0 + * @category Array + * @param {Array} [props=[]] The property identifiers. + * @param {Array} [values=[]] The property values. + * @returns {Object} Returns the new object. + * @example + * + * _.zipObject(['a', 'b'], [1, 2]); + * // => { 'a': 1, 'b': 2 } + */ + function zipObject(props, values) { + return baseZipObject(props || [], values || [], assignValue); + } - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[t.CARETTRIM], caretTrimReplace) + /** + * This method is like `_.zipObject` except that it supports property paths. + * + * @static + * @memberOf _ + * @since 4.1.0 + * @category Array + * @param {Array} [props=[]] The property identifiers. + * @param {Array} [values=[]] The property values. + * @returns {Object} Returns the new object. + * @example + * + * _.zipObjectDeep(['a.b[0].c', 'a.b[1].d'], [1, 2]); + * // => { 'a': { 'b': [{ 'c': 1 }, { 'd': 2 }] } } + */ + function zipObjectDeep(props, values) { + return baseZipObject(props || [], values || [], baseSet); + } - // normalize spaces - range = range.split(/\s+/).join(' ') + /** + * This method is like `_.zip` except that it accepts `iteratee` to specify + * how grouped values should be combined. The iteratee is invoked with the + * elements of each group: (...group). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Array + * @param {...Array} [arrays] The arrays to process. + * @param {Function} [iteratee=_.identity] The function to combine + * grouped values. + * @returns {Array} Returns the new array of grouped elements. + * @example + * + * _.zipWith([1, 2], [10, 20], [100, 200], function(a, b, c) { + * return a + b + c; + * }); + * // => [111, 222] + */ + var zipWith = baseRest(function(arrays) { + var length = arrays.length, + iteratee = length > 1 ? arrays[length - 1] : undefined; - // At this point, the range is completely trimmed and - // ready to be split into comparators. + iteratee = typeof iteratee == 'function' ? (arrays.pop(), iteratee) : undefined; + return unzipWith(arrays, iteratee); + }); - let rangeList = range - .split(' ') - .map(comp => parseComparator(comp, this.options)) - .join(' ') - .split(/\s+/) - // >=0.0.0 is equivalent to * - .map(comp => replaceGTE0(comp, this.options)) + /*------------------------------------------------------------------------*/ - if (loose) { - // in loose mode, throw out any that are not valid comparators - rangeList = rangeList.filter(comp => { - debug('loose invalid filter', comp, this.options) - return !!comp.match(re[t.COMPARATORLOOSE]) - }) + /** + * Creates a `lodash` wrapper instance that wraps `value` with explicit method + * chain sequences enabled. The result of such sequences must be unwrapped + * with `_#value`. + * + * @static + * @memberOf _ + * @since 1.3.0 + * @category Seq + * @param {*} value The value to wrap. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'pebbles', 'age': 1 } + * ]; + * + * var youngest = _ + * .chain(users) + * .sortBy('age') + * .map(function(o) { + * return o.user + ' is ' + o.age; + * }) + * .head() + * .value(); + * // => 'pebbles is 1' + */ + function chain(value) { + var result = lodash(value); + result.__chain__ = true; + return result; } - debug('range list', rangeList) - // if any comparators are the null set, then replace with JUST null set - // if more than one comparator, remove any * comparators - // also, don't include the same comparator more than once - const rangeMap = new Map() - const comparators = rangeList.map(comp => new Comparator(comp, this.options)) - for (const comp of comparators) { - if (isNullSet(comp)) { - return [comp] - } - rangeMap.set(comp.value, comp) + /** + * This method invokes `interceptor` and returns `value`. The interceptor + * is invoked with one argument; (value). The purpose of this method is to + * "tap into" a method chain sequence in order to modify intermediate results. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns `value`. + * @example + * + * _([1, 2, 3]) + * .tap(function(array) { + * // Mutate input array. + * array.pop(); + * }) + * .reverse() + * .value(); + * // => [2, 1] + */ + function tap(value, interceptor) { + interceptor(value); + return value; } - if (rangeMap.size > 1 && rangeMap.has('')) { - rangeMap.delete('') + + /** + * This method is like `_.tap` except that it returns the result of `interceptor`. + * The purpose of this method is to "pass thru" values replacing intermediate + * results in a method chain sequence. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Seq + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns the result of `interceptor`. + * @example + * + * _(' abc ') + * .chain() + * .trim() + * .thru(function(value) { + * return [value]; + * }) + * .value(); + * // => ['abc'] + */ + function thru(value, interceptor) { + return interceptor(value); } - const result = [...rangeMap.values()] - cache.set(memoKey, result) - return result - } + /** + * This method is the wrapper version of `_.at`. + * + * @name at + * @memberOf _ + * @since 1.0.0 + * @category Seq + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }, 4] }; + * + * _(object).at(['a[0].b.c', 'a[1]']).value(); + * // => [3, 4] + */ + var wrapperAt = flatRest(function(paths) { + var length = paths.length, + start = length ? paths[0] : 0, + value = this.__wrapped__, + interceptor = function(object) { return baseAt(object, paths); }; + + if (length > 1 || this.__actions__.length || + !(value instanceof LazyWrapper) || !isIndex(start)) { + return this.thru(interceptor); + } + value = value.slice(start, +start + (length ? 1 : 0)); + value.__actions__.push({ + 'func': thru, + 'args': [interceptor], + 'thisArg': undefined + }); + return new LodashWrapper(value, this.__chain__).thru(function(array) { + if (length && !array.length) { + array.push(undefined); + } + return array; + }); + }); - intersects (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') + /** + * Creates a `lodash` wrapper instance with explicit method chain sequences enabled. + * + * @name chain + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 } + * ]; + * + * // A sequence without explicit chaining. + * _(users).head(); + * // => { 'user': 'barney', 'age': 36 } + * + * // A sequence with explicit chaining. + * _(users) + * .chain() + * .head() + * .pick('user') + * .value(); + * // => { 'user': 'barney' } + */ + function wrapperChain() { + return chain(this); } - return this.set.some((thisComparators) => { - return ( - isSatisfiable(thisComparators, options) && - range.set.some((rangeComparators) => { - return ( - isSatisfiable(rangeComparators, options) && - thisComparators.every((thisComparator) => { - return rangeComparators.every((rangeComparator) => { - return thisComparator.intersects(rangeComparator, options) - }) - }) - ) - }) - ) - }) - } - - // if ANY of the sets match ALL of its comparators, then pass - test (version) { - if (!version) { - return false + /** + * Executes the chain sequence and returns the wrapped result. + * + * @name commit + * @memberOf _ + * @since 3.2.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var array = [1, 2]; + * var wrapped = _(array).push(3); + * + * console.log(array); + * // => [1, 2] + * + * wrapped = wrapped.commit(); + * console.log(array); + * // => [1, 2, 3] + * + * wrapped.last(); + * // => 3 + * + * console.log(array); + * // => [1, 2, 3] + */ + function wrapperCommit() { + return new LodashWrapper(this.value(), this.__chain__); } - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false + /** + * Gets the next value on a wrapped object following the + * [iterator protocol](https://mdn.io/iteration_protocols#iterator). + * + * @name next + * @memberOf _ + * @since 4.0.0 + * @category Seq + * @returns {Object} Returns the next iterator value. + * @example + * + * var wrapped = _([1, 2]); + * + * wrapped.next(); + * // => { 'done': false, 'value': 1 } + * + * wrapped.next(); + * // => { 'done': false, 'value': 2 } + * + * wrapped.next(); + * // => { 'done': true, 'value': undefined } + */ + function wrapperNext() { + if (this.__values__ === undefined) { + this.__values__ = toArray(this.value()); } - } + var done = this.__index__ >= this.__values__.length, + value = done ? undefined : this.__values__[this.__index__++]; - for (let i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } + return { 'done': done, 'value': value }; } - return false - } -} - -module.exports = Range - -const LRU = __nccwpck_require__(7129) -const cache = new LRU({ max: 1000 }) - -const parseOptions = __nccwpck_require__(85185) -const Comparator = __nccwpck_require__(24644) -const debug = __nccwpck_require__(22935) -const SemVer = __nccwpck_require__(93402) -const { - re, - t, - comparatorTrimReplace, - tildeTrimReplace, - caretTrimReplace, -} = __nccwpck_require__(3682) -const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = __nccwpck_require__(97344) - -const isNullSet = c => c.value === '<0.0.0-0' -const isAny = c => c.value === '' - -// take a set of comparators and determine whether there -// exists a version which can satisfy it -const isSatisfiable = (comparators, options) => { - let result = true - const remainingComparators = comparators.slice() - let testComparator = remainingComparators.pop() - - while (result && remainingComparators.length) { - result = remainingComparators.every((otherComparator) => { - return testComparator.intersects(otherComparator, options) - }) - - testComparator = remainingComparators.pop() - } - - return result -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -const parseComparator = (comp, options) => { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} - -const isX = id => !id || id.toLowerCase() === 'x' || id === '*' - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 -// ~0.0.1 --> >=0.0.1 <0.1.0-0 -const replaceTildes = (comp, options) => - comp.trim().split(/\s+/).map((c) => { - return replaceTilde(c, options) - }).join(' ') - -const replaceTilde = (comp, options) => { - const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] - return comp.replace(r, (_, M, m, p, pr) => { - debug('tilde', comp, _, M, m, p, pr) - let ret - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = `>=${M}.0.0 <${+M + 1}.0.0-0` - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0-0 - ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` - } else if (pr) { - debug('replaceTilde pr', pr) - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${+m + 1}.0-0` - } else { - // ~1.2.3 == >=1.2.3 <1.3.0-0 - ret = `>=${M}.${m}.${p - } <${M}.${+m + 1}.0-0` + /** + * Enables the wrapper to be iterable. + * + * @name Symbol.iterator + * @memberOf _ + * @since 4.0.0 + * @category Seq + * @returns {Object} Returns the wrapper object. + * @example + * + * var wrapped = _([1, 2]); + * + * wrapped[Symbol.iterator]() === wrapped; + * // => true + * + * Array.from(wrapped); + * // => [1, 2] + */ + function wrapperToIterator() { + return this; } - debug('tilde return', ret) - return ret - }) -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 -// ^1.2.3 --> >=1.2.3 <2.0.0-0 -// ^1.2.0 --> >=1.2.0 <2.0.0-0 -// ^0.0.1 --> >=0.0.1 <0.0.2-0 -// ^0.1.0 --> >=0.1.0 <0.2.0-0 -const replaceCarets = (comp, options) => - comp.trim().split(/\s+/).map((c) => { - return replaceCaret(c, options) - }).join(' ') - -const replaceCaret = (comp, options) => { - debug('caret', comp, options) - const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] - const z = options.includePrerelease ? '-0' : '' - return comp.replace(r, (_, M, m, p, pr) => { - debug('caret', comp, _, M, m, p, pr) - let ret + /** + * Creates a clone of the chain sequence planting `value` as the wrapped value. + * + * @name plant + * @memberOf _ + * @since 3.2.0 + * @category Seq + * @param {*} value The value to plant. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * function square(n) { + * return n * n; + * } + * + * var wrapped = _([1, 2]).map(square); + * var other = wrapped.plant([3, 4]); + * + * other.value(); + * // => [9, 16] + * + * wrapped.value(); + * // => [1, 4] + */ + function wrapperPlant(value) { + var result, + parent = this; - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` - } else if (isX(p)) { - if (M === '0') { - ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` - } else { - ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${m}.${+p + 1}-0` + while (parent instanceof baseLodash) { + var clone = wrapperClone(parent); + clone.__index__ = 0; + clone.__values__ = undefined; + if (result) { + previous.__wrapped__ = clone; } else { - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${+m + 1}.0-0` + result = clone; } - } else { - ret = `>=${M}.${m}.${p}-${pr - } <${+M + 1}.0.0-0` + var previous = clone; + parent = parent.__wrapped__; } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = `>=${M}.${m}.${p - }${z} <${M}.${m}.${+p + 1}-0` - } else { - ret = `>=${M}.${m}.${p - }${z} <${M}.${+m + 1}.0-0` + previous.__wrapped__ = value; + return result; + } + + /** + * This method is the wrapper version of `_.reverse`. + * + * **Note:** This method mutates the wrapped array. + * + * @name reverse + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var array = [1, 2, 3]; + * + * _(array).reverse().value() + * // => [3, 2, 1] + * + * console.log(array); + * // => [3, 2, 1] + */ + function wrapperReverse() { + var value = this.__wrapped__; + if (value instanceof LazyWrapper) { + var wrapped = value; + if (this.__actions__.length) { + wrapped = new LazyWrapper(this); } - } else { - ret = `>=${M}.${m}.${p - } <${+M + 1}.0.0-0` + wrapped = wrapped.reverse(); + wrapped.__actions__.push({ + 'func': thru, + 'args': [reverse], + 'thisArg': undefined + }); + return new LodashWrapper(wrapped, this.__chain__); } + return this.thru(reverse); } - debug('caret return', ret) - return ret - }) -} - -const replaceXRanges = (comp, options) => { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map((c) => { - return replaceXRange(c, options) - }).join(' ') -} - -const replaceXRange = (comp, options) => { - comp = comp.trim() - const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] - return comp.replace(r, (ret, gtlt, M, m, p, pr) => { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - const xM = isX(M) - const xm = xM || isX(m) - const xp = xm || isX(p) - const anyX = xp - - if (gtlt === '=' && anyX) { - gtlt = '' + /** + * Executes the chain sequence to resolve the unwrapped value. + * + * @name value + * @memberOf _ + * @since 0.1.0 + * @alias toJSON, valueOf + * @category Seq + * @returns {*} Returns the resolved unwrapped value. + * @example + * + * _([1, 2, 3]).value(); + * // => [1, 2, 3] + */ + function wrapperValue() { + return baseWrapperValue(this.__wrapped__, this.__actions__); } - // if we're including prereleases in the match, then we need - // to fix this to -0, the lowest possible prerelease value - pr = options.includePrerelease ? '-0' : '' + /*------------------------------------------------------------------------*/ - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0-0' + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The corresponding value of + * each key is the number of times the key was returned by `iteratee`. The + * iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.countBy([6.1, 4.2, 6.3], Math.floor); + * // => { '4': 1, '6': 2 } + * + * // The `_.property` iteratee shorthand. + * _.countBy(['one', 'two', 'three'], 'length'); + * // => { '3': 2, '5': 1 } + */ + var countBy = createAggregator(function(result, value, key) { + if (hasOwnProperty.call(result, key)) { + ++result[key]; } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } + baseAssignValue(result, key, 1); } + }); - if (gtlt === '<') { - pr = '-0' + /** + * Checks if `predicate` returns truthy for **all** elements of `collection`. + * Iteration is stopped once `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * **Note:** This method returns `true` for + * [empty collections](https://en.wikipedia.org/wiki/Empty_set) because + * [everything is true](https://en.wikipedia.org/wiki/Vacuous_truth) of + * elements of empty collections. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + * @example + * + * _.every([true, 1, null, 'yes'], Boolean); + * // => false + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.every(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.every(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.every(users, 'active'); + * // => false + */ + function every(collection, predicate, guard) { + var func = isArray(collection) ? arrayEvery : baseEvery; + if (guard && isIterateeCall(collection, predicate, guard)) { + predicate = undefined; } - - ret = `${gtlt + M}.${m}.${p}${pr}` - } else if (xm) { - ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` - } else if (xp) { - ret = `>=${M}.${m}.0${pr - } <${M}.${+m + 1}.0-0` + return func(collection, getIteratee(predicate, 3)); } - debug('xRange return', ret) - - return ret - }) -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -const replaceStars = (comp, options) => { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[t.STAR], '') -} - -const replaceGTE0 = (comp, options) => { - debug('replaceGTE0', comp, options) - return comp.trim() - .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') -} - -// This function is passed to string.replace(re[t.HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 -const hyphenReplace = incPr => ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) => { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = `>=${fM}.0.0${incPr ? '-0' : ''}` - } else if (isX(fp)) { - from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` - } else if (fpr) { - from = `>=${from}` - } else { - from = `>=${from}${incPr ? '-0' : ''}` - } + /** + * Iterates over elements of `collection`, returning an array of all elements + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * **Note:** Unlike `_.remove`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + * @see _.reject + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * _.filter(users, function(o) { return !o.active; }); + * // => objects for ['fred'] + * + * // The `_.matches` iteratee shorthand. + * _.filter(users, { 'age': 36, 'active': true }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.filter(users, ['active', false]); + * // => objects for ['fred'] + * + * // The `_.property` iteratee shorthand. + * _.filter(users, 'active'); + * // => objects for ['barney'] + * + * // Combining several predicates using `_.overEvery` or `_.overSome`. + * _.filter(users, _.overSome([{ 'age': 36 }, ['age', 40]])); + * // => objects for ['fred', 'barney'] + */ + function filter(collection, predicate) { + var func = isArray(collection) ? arrayFilter : baseFilter; + return func(collection, getIteratee(predicate, 3)); + } - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = `<${+tM + 1}.0.0-0` - } else if (isX(tp)) { - to = `<${tM}.${+tm + 1}.0-0` - } else if (tpr) { - to = `<=${tM}.${tm}.${tp}-${tpr}` - } else if (incPr) { - to = `<${tM}.${tm}.${+tp + 1}-0` - } else { - to = `<=${to}` - } + /** + * Iterates over elements of `collection`, returning the first element + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false }, + * { 'user': 'pebbles', 'age': 1, 'active': true } + * ]; + * + * _.find(users, function(o) { return o.age < 40; }); + * // => object for 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.find(users, { 'age': 1, 'active': true }); + * // => object for 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.find(users, ['active', false]); + * // => object for 'fred' + * + * // The `_.property` iteratee shorthand. + * _.find(users, 'active'); + * // => object for 'barney' + */ + var find = createFind(findIndex); - return (`${from} ${to}`).trim() -} + /** + * This method is like `_.find` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=collection.length-1] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * _.findLast([1, 2, 3, 4], function(n) { + * return n % 2 == 1; + * }); + * // => 3 + */ + var findLast = createFind(findLastIndex); -const testSet = (set, version, options) => { - for (let i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false + /** + * Creates a flattened array of values by running each element in `collection` + * thru `iteratee` and flattening the mapped results. The iteratee is invoked + * with three arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [n, n]; + * } + * + * _.flatMap([1, 2], duplicate); + * // => [1, 1, 2, 2] + */ + function flatMap(collection, iteratee) { + return baseFlatten(map(collection, iteratee), 1); } - } - - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (let i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === Comparator.ANY) { - continue - } - if (set[i].semver.prerelease.length > 0) { - const allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } + /** + * This method is like `_.flatMap` except that it recursively flattens the + * mapped results. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [[[n, n]]]; + * } + * + * _.flatMapDeep([1, 2], duplicate); + * // => [1, 1, 2, 2] + */ + function flatMapDeep(collection, iteratee) { + return baseFlatten(map(collection, iteratee), INFINITY); } - // Version has a -pre, but it's not one of the ones we like. - return false - } - - return true -} - - -/***/ }), - -/***/ 93402: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const debug = __nccwpck_require__(22935) -const { MAX_LENGTH, MAX_SAFE_INTEGER } = __nccwpck_require__(97344) -const { re, t } = __nccwpck_require__(3682) - -const parseOptions = __nccwpck_require__(85185) -const { compareIdentifiers } = __nccwpck_require__(22179) -class SemVer { - constructor (version, options) { - options = parseOptions(options) - - if (version instanceof SemVer) { - if (version.loose === !!options.loose && - version.includePrerelease === !!options.includePrerelease) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + /** + * This method is like `_.flatMap` except that it recursively flattens the + * mapped results up to `depth` times. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {number} [depth=1] The maximum recursion depth. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [[[n, n]]]; + * } + * + * _.flatMapDepth([1, 2], duplicate, 2); + * // => [[1, 1], [2, 2]] + */ + function flatMapDepth(collection, iteratee, depth) { + depth = depth === undefined ? 1 : toInteger(depth); + return baseFlatten(map(collection, iteratee), depth); } - if (version.length > MAX_LENGTH) { - throw new TypeError( - `version is longer than ${MAX_LENGTH} characters` - ) + /** + * Iterates over elements of `collection` and invokes `iteratee` for each element. + * The iteratee is invoked with three arguments: (value, index|key, collection). + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * **Note:** As with other "Collections" methods, objects with a "length" + * property are iterated like arrays. To avoid this behavior use `_.forIn` + * or `_.forOwn` for object iteration. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias each + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEachRight + * @example + * + * _.forEach([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `1` then `2`. + * + * _.forEach({ 'a': 1, 'b': 2 }, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ + function forEach(collection, iteratee) { + var func = isArray(collection) ? arrayEach : baseEach; + return func(collection, getIteratee(iteratee, 3)); } - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose - // this isn't actually relevant for versions, but keep it so that we - // don't run into trouble passing this.options around. - this.includePrerelease = !!options.includePrerelease - - const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) - - if (!m) { - throw new TypeError(`Invalid Version: ${version}`) + /** + * This method is like `_.forEach` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @alias eachRight + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEach + * @example + * + * _.forEachRight([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `2` then `1`. + */ + function forEachRight(collection, iteratee) { + var func = isArray(collection) ? arrayEachRight : baseEachRight; + return func(collection, getIteratee(iteratee, 3)); } - this.raw = version + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The order of grouped values + * is determined by the order they occur in `collection`. The corresponding + * value of each key is an array of elements responsible for generating the + * key. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.groupBy([6.1, 4.2, 6.3], Math.floor); + * // => { '4': [4.2], '6': [6.1, 6.3] } + * + * // The `_.property` iteratee shorthand. + * _.groupBy(['one', 'two', 'three'], 'length'); + * // => { '3': ['one', 'two'], '5': ['three'] } + */ + var groupBy = createAggregator(function(result, value, key) { + if (hasOwnProperty.call(result, key)) { + result[key].push(value); + } else { + baseAssignValue(result, key, [value]); + } + }); - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] + /** + * Checks if `value` is in `collection`. If `collection` is a string, it's + * checked for a substring of `value`, otherwise + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * is used for equality comparisons. If `fromIndex` is negative, it's used as + * the offset from the end of `collection`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`. + * @returns {boolean} Returns `true` if `value` is found, else `false`. + * @example + * + * _.includes([1, 2, 3], 1); + * // => true + * + * _.includes([1, 2, 3], 1, 2); + * // => false + * + * _.includes({ 'a': 1, 'b': 2 }, 1); + * // => true + * + * _.includes('abcd', 'bc'); + * // => true + */ + function includes(collection, value, fromIndex, guard) { + collection = isArrayLike(collection) ? collection : values(collection); + fromIndex = (fromIndex && !guard) ? toInteger(fromIndex) : 0; - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') + var length = collection.length; + if (fromIndex < 0) { + fromIndex = nativeMax(length + fromIndex, 0); + } + return isString(collection) + ? (fromIndex <= length && collection.indexOf(value, fromIndex) > -1) + : (!!length && baseIndexOf(collection, value, fromIndex) > -1); } - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } + /** + * Invokes the method at `path` of each element in `collection`, returning + * an array of the results of each invoked method. Any additional arguments + * are provided to each invoked method. If `path` is a function, it's invoked + * for, and `this` bound to, each element in `collection`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Array|Function|string} path The path of the method to invoke or + * the function invoked per iteration. + * @param {...*} [args] The arguments to invoke each method with. + * @returns {Array} Returns the array of results. + * @example + * + * _.invokeMap([[5, 1, 7], [3, 2, 1]], 'sort'); + * // => [[1, 5, 7], [1, 2, 3]] + * + * _.invokeMap([123, 456], String.prototype.split, ''); + * // => [['1', '2', '3'], ['4', '5', '6']] + */ + var invokeMap = baseRest(function(collection, path, args) { + var index = -1, + isFunc = typeof path == 'function', + result = isArrayLike(collection) ? Array(collection.length) : []; - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } + baseEach(collection, function(value) { + result[++index] = isFunc ? apply(path, value, args) : baseInvoke(value, path, args); + }); + return result; + }); - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map((id) => { - if (/^[0-9]+$/.test(id)) { - const num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The corresponding value of + * each key is the last element responsible for generating the key. The + * iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * var array = [ + * { 'dir': 'left', 'code': 97 }, + * { 'dir': 'right', 'code': 100 } + * ]; + * + * _.keyBy(array, function(o) { + * return String.fromCharCode(o.code); + * }); + * // => { 'a': { 'dir': 'left', 'code': 97 }, 'd': { 'dir': 'right', 'code': 100 } } + * + * _.keyBy(array, 'dir'); + * // => { 'left': { 'dir': 'left', 'code': 97 }, 'right': { 'dir': 'right', 'code': 100 } } + */ + var keyBy = createAggregator(function(result, value, key) { + baseAssignValue(result, key, value); + }); + + /** + * Creates an array of values by running each element in `collection` thru + * `iteratee`. The iteratee is invoked with three arguments: + * (value, index|key, collection). + * + * Many lodash methods are guarded to work as iteratees for methods like + * `_.every`, `_.filter`, `_.map`, `_.mapValues`, `_.reject`, and `_.some`. + * + * The guarded methods are: + * `ary`, `chunk`, `curry`, `curryRight`, `drop`, `dropRight`, `every`, + * `fill`, `invert`, `parseInt`, `random`, `range`, `rangeRight`, `repeat`, + * `sampleSize`, `slice`, `some`, `sortBy`, `split`, `take`, `takeRight`, + * `template`, `trim`, `trimEnd`, `trimStart`, and `words` + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + * @example + * + * function square(n) { + * return n * n; + * } + * + * _.map([4, 8], square); + * // => [16, 64] + * + * _.map({ 'a': 4, 'b': 8 }, square); + * // => [16, 64] (iteration order is not guaranteed) + * + * var users = [ + * { 'user': 'barney' }, + * { 'user': 'fred' } + * ]; + * + * // The `_.property` iteratee shorthand. + * _.map(users, 'user'); + * // => ['barney', 'fred'] + */ + function map(collection, iteratee) { + var func = isArray(collection) ? arrayMap : baseMap; + return func(collection, getIteratee(iteratee, 3)); } - this.build = m[5] ? m[5].split('.') : [] - this.format() - } - - format () { - this.version = `${this.major}.${this.minor}.${this.patch}` - if (this.prerelease.length) { - this.version += `-${this.prerelease.join('.')}` + /** + * This method is like `_.sortBy` except that it allows specifying the sort + * orders of the iteratees to sort by. If `orders` is unspecified, all values + * are sorted in ascending order. Otherwise, specify an order of "desc" for + * descending or "asc" for ascending sort order of corresponding values. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Array[]|Function[]|Object[]|string[]} [iteratees=[_.identity]] + * The iteratees to sort by. + * @param {string[]} [orders] The sort orders of `iteratees`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`. + * @returns {Array} Returns the new sorted array. + * @example + * + * var users = [ + * { 'user': 'fred', 'age': 48 }, + * { 'user': 'barney', 'age': 34 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'barney', 'age': 36 } + * ]; + * + * // Sort by `user` in ascending order and by `age` in descending order. + * _.orderBy(users, ['user', 'age'], ['asc', 'desc']); + * // => objects for [['barney', 36], ['barney', 34], ['fred', 48], ['fred', 40]] + */ + function orderBy(collection, iteratees, orders, guard) { + if (collection == null) { + return []; + } + if (!isArray(iteratees)) { + iteratees = iteratees == null ? [] : [iteratees]; + } + orders = guard ? undefined : orders; + if (!isArray(orders)) { + orders = orders == null ? [] : [orders]; + } + return baseOrderBy(collection, iteratees, orders); } - return this.version - } - toString () { - return this.version - } + /** + * Creates an array of elements split into two groups, the first of which + * contains elements `predicate` returns truthy for, the second of which + * contains elements `predicate` returns falsey for. The predicate is + * invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the array of grouped elements. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': true }, + * { 'user': 'pebbles', 'age': 1, 'active': false } + * ]; + * + * _.partition(users, function(o) { return o.active; }); + * // => objects for [['fred'], ['barney', 'pebbles']] + * + * // The `_.matches` iteratee shorthand. + * _.partition(users, { 'age': 1, 'active': false }); + * // => objects for [['pebbles'], ['barney', 'fred']] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.partition(users, ['active', false]); + * // => objects for [['barney', 'pebbles'], ['fred']] + * + * // The `_.property` iteratee shorthand. + * _.partition(users, 'active'); + * // => objects for [['fred'], ['barney', 'pebbles']] + */ + var partition = createAggregator(function(result, value, key) { + result[key ? 0 : 1].push(value); + }, function() { return [[], []]; }); - compare (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - if (typeof other === 'string' && other === this.version) { - return 0 - } - other = new SemVer(other, this.options) - } + /** + * Reduces `collection` to a value which is the accumulated result of running + * each element in `collection` thru `iteratee`, where each successive + * invocation is supplied the return value of the previous. If `accumulator` + * is not given, the first element of `collection` is used as the initial + * value. The iteratee is invoked with four arguments: + * (accumulator, value, index|key, collection). + * + * Many lodash methods are guarded to work as iteratees for methods like + * `_.reduce`, `_.reduceRight`, and `_.transform`. + * + * The guarded methods are: + * `assign`, `defaults`, `defaultsDeep`, `includes`, `merge`, `orderBy`, + * and `sortBy` + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @returns {*} Returns the accumulated value. + * @see _.reduceRight + * @example + * + * _.reduce([1, 2], function(sum, n) { + * return sum + n; + * }, 0); + * // => 3 + * + * _.reduce({ 'a': 1, 'b': 2, 'c': 1 }, function(result, value, key) { + * (result[value] || (result[value] = [])).push(key); + * return result; + * }, {}); + * // => { '1': ['a', 'c'], '2': ['b'] } (iteration order is not guaranteed) + */ + function reduce(collection, iteratee, accumulator) { + var func = isArray(collection) ? arrayReduce : baseReduce, + initAccum = arguments.length < 3; - if (other.version === this.version) { - return 0 + return func(collection, getIteratee(iteratee, 4), accumulator, initAccum, baseEach); } - return this.compareMain(other) || this.comparePre(other) - } + /** + * This method is like `_.reduce` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @returns {*} Returns the accumulated value. + * @see _.reduce + * @example + * + * var array = [[0, 1], [2, 3], [4, 5]]; + * + * _.reduceRight(array, function(flattened, other) { + * return flattened.concat(other); + * }, []); + * // => [4, 5, 2, 3, 0, 1] + */ + function reduceRight(collection, iteratee, accumulator) { + var func = isArray(collection) ? arrayReduceRight : baseReduce, + initAccum = arguments.length < 3; - compareMain (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) + return func(collection, getIteratee(iteratee, 4), accumulator, initAccum, baseEachRight); } - return ( - compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) - ) - } - - comparePre (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) + /** + * The opposite of `_.filter`; this method returns the elements of `collection` + * that `predicate` does **not** return truthy for. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + * @see _.filter + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': true } + * ]; + * + * _.reject(users, function(o) { return !o.active; }); + * // => objects for ['fred'] + * + * // The `_.matches` iteratee shorthand. + * _.reject(users, { 'age': 40, 'active': true }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.reject(users, ['active', false]); + * // => objects for ['fred'] + * + * // The `_.property` iteratee shorthand. + * _.reject(users, 'active'); + * // => objects for ['barney'] + */ + function reject(collection, predicate) { + var func = isArray(collection) ? arrayFilter : baseFilter; + return func(collection, negate(getIteratee(predicate, 3))); } - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 + /** + * Gets a random element from `collection`. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Collection + * @param {Array|Object} collection The collection to sample. + * @returns {*} Returns the random element. + * @example + * + * _.sample([1, 2, 3, 4]); + * // => 2 + */ + function sample(collection) { + var func = isArray(collection) ? arraySample : baseSample; + return func(collection); } - let i = 0 - do { - const a = this.prerelease[i] - const b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue + /** + * Gets `n` random elements at unique keys from `collection` up to the + * size of `collection`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to sample. + * @param {number} [n=1] The number of elements to sample. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the random elements. + * @example + * + * _.sampleSize([1, 2, 3], 2); + * // => [3, 1] + * + * _.sampleSize([1, 2, 3], 4); + * // => [2, 3, 1] + */ + function sampleSize(collection, n, guard) { + if ((guard ? isIterateeCall(collection, n, guard) : n === undefined)) { + n = 1; } else { - return compareIdentifiers(a, b) + n = toInteger(n); } - } while (++i) - } + var func = isArray(collection) ? arraySampleSize : baseSampleSize; + return func(collection, n); + } - compareBuild (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) + /** + * Creates an array of shuffled values, using a version of the + * [Fisher-Yates shuffle](https://en.wikipedia.org/wiki/Fisher-Yates_shuffle). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to shuffle. + * @returns {Array} Returns the new shuffled array. + * @example + * + * _.shuffle([1, 2, 3, 4]); + * // => [4, 1, 3, 2] + */ + function shuffle(collection) { + var func = isArray(collection) ? arrayShuffle : baseShuffle; + return func(collection); } - let i = 0 - do { - const a = this.build[i] - const b = other.build[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) + /** + * Gets the size of `collection` by returning its length for array-like + * values or the number of own enumerable string keyed properties for objects. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @returns {number} Returns the collection size. + * @example + * + * _.size([1, 2, 3]); + * // => 3 + * + * _.size({ 'a': 1, 'b': 2 }); + * // => 2 + * + * _.size('pebbles'); + * // => 7 + */ + function size(collection) { + if (collection == null) { + return 0; } - } while (++i) - } - - // preminor will bump the version up to the next minor release, and immediately - // down to pre-release. premajor and prepatch work the same way. - inc (release, identifier, identifierBase) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier, identifierBase) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier, identifierBase) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier, identifierBase) - this.inc('pre', identifier, identifierBase) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier, identifierBase) - } - this.inc('pre', identifier, identifierBase) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if ( - this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0 - ) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. - case 'pre': { - const base = Number(identifierBase) ? 1 : 0 - - if (!identifier && identifierBase === false) { - throw new Error('invalid increment argument: identifier is empty') - } - - if (this.prerelease.length === 0) { - this.prerelease = [base] - } else { - let i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - if (identifier === this.prerelease.join('.') && identifierBase === false) { - throw new Error('invalid increment argument: identifier already exists') - } - this.prerelease.push(base) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - let prerelease = [identifier, base] - if (identifierBase === false) { - prerelease = [identifier] - } - if (compareIdentifiers(this.prerelease[0], identifier) === 0) { - if (isNaN(this.prerelease[1])) { - this.prerelease = prerelease - } - } else { - this.prerelease = prerelease - } - } - break + if (isArrayLike(collection)) { + return isString(collection) ? stringSize(collection) : collection.length; } - default: - throw new Error(`invalid increment argument: ${release}`) + var tag = getTag(collection); + if (tag == mapTag || tag == setTag) { + return collection.size; + } + return baseKeys(collection).length; } - this.format() - this.raw = this.version - return this - } -} - -module.exports = SemVer - - -/***/ }), - -/***/ 18751: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const parse = __nccwpck_require__(53371) -const clean = (version, options) => { - const s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} -module.exports = clean - - -/***/ }), -/***/ 23621: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const eq = __nccwpck_require__(59792) -const neq = __nccwpck_require__(30658) -const gt = __nccwpck_require__(47040) -const gte = __nccwpck_require__(97445) -const lt = __nccwpck_require__(16054) -const lte = __nccwpck_require__(79387) - -const cmp = (a, op, b, loose) => { - switch (op) { - case '===': - if (typeof a === 'object') { - a = a.version - } - if (typeof b === 'object') { - b = b.version + /** + * Checks if `predicate` returns truthy for **any** element of `collection`. + * Iteration is stopped once `predicate` returns truthy. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + * @example + * + * _.some([null, 0, 'yes', false], Boolean); + * // => true + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.some(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.some(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.some(users, 'active'); + * // => true + */ + function some(collection, predicate, guard) { + var func = isArray(collection) ? arraySome : baseSome; + if (guard && isIterateeCall(collection, predicate, guard)) { + predicate = undefined; } - return a === b + return func(collection, getIteratee(predicate, 3)); + } - case '!==': - if (typeof a === 'object') { - a = a.version + /** + * Creates an array of elements, sorted in ascending order by the results of + * running each element in a collection thru each iteratee. This method + * performs a stable sort, that is, it preserves the original sort order of + * equal elements. The iteratees are invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {...(Function|Function[])} [iteratees=[_.identity]] + * The iteratees to sort by. + * @returns {Array} Returns the new sorted array. + * @example + * + * var users = [ + * { 'user': 'fred', 'age': 48 }, + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 30 }, + * { 'user': 'barney', 'age': 34 } + * ]; + * + * _.sortBy(users, [function(o) { return o.user; }]); + * // => objects for [['barney', 36], ['barney', 34], ['fred', 48], ['fred', 30]] + * + * _.sortBy(users, ['user', 'age']); + * // => objects for [['barney', 34], ['barney', 36], ['fred', 30], ['fred', 48]] + */ + var sortBy = baseRest(function(collection, iteratees) { + if (collection == null) { + return []; } - if (typeof b === 'object') { - b = b.version + var length = iteratees.length; + if (length > 1 && isIterateeCall(collection, iteratees[0], iteratees[1])) { + iteratees = []; + } else if (length > 2 && isIterateeCall(iteratees[0], iteratees[1], iteratees[2])) { + iteratees = [iteratees[0]]; } - return a !== b - - case '': - case '=': - case '==': - return eq(a, b, loose) - - case '!=': - return neq(a, b, loose) - - case '>': - return gt(a, b, loose) - - case '>=': - return gte(a, b, loose) - - case '<': - return lt(a, b, loose) - - case '<=': - return lte(a, b, loose) - - default: - throw new TypeError(`Invalid operator: ${op}`) - } -} -module.exports = cmp - - -/***/ }), - -/***/ 85766: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(93402) -const parse = __nccwpck_require__(53371) -const { re, t } = __nccwpck_require__(3682) - -const coerce = (version, options) => { - if (version instanceof SemVer) { - return version - } + return baseOrderBy(collection, baseFlatten(iteratees, 1), []); + }); - if (typeof version === 'number') { - version = String(version) - } + /*------------------------------------------------------------------------*/ - if (typeof version !== 'string') { - return null - } + /** + * Gets the timestamp of the number of milliseconds that have elapsed since + * the Unix epoch (1 January 1970 00:00:00 UTC). + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Date + * @returns {number} Returns the timestamp. + * @example + * + * _.defer(function(stamp) { + * console.log(_.now() - stamp); + * }, _.now()); + * // => Logs the number of milliseconds it took for the deferred invocation. + */ + var now = ctxNow || function() { + return root.Date.now(); + }; - options = options || {} + /*------------------------------------------------------------------------*/ - let match = null - if (!options.rtl) { - match = version.match(re[t.COERCE]) - } else { - // Find the right-most coercible string that does not share - // a terminus with a more left-ward coercible string. - // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' - // - // Walk through the string checking with a /g regexp - // Manually set the index so as to pick up overlapping matches. - // Stop when we get a match that ends at the string end, since no - // coercible string can be more right-ward without the same terminus. - let next - while ((next = re[t.COERCERTL].exec(version)) && - (!match || match.index + match[0].length !== version.length) - ) { - if (!match || - next.index + next[0].length !== match.index + match[0].length) { - match = next + /** + * The opposite of `_.before`; this method creates a function that invokes + * `func` once it's called `n` or more times. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {number} n The number of calls before `func` is invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var saves = ['profile', 'settings']; + * + * var done = _.after(saves.length, function() { + * console.log('done saving!'); + * }); + * + * _.forEach(saves, function(type) { + * asyncSave({ 'type': type, 'complete': done }); + * }); + * // => Logs 'done saving!' after the two async saves have completed. + */ + function after(n, func) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); } - re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + n = toInteger(n); + return function() { + if (--n < 1) { + return func.apply(this, arguments); + } + }; } - // leave it in a clean state - re[t.COERCERTL].lastIndex = -1 - } - - if (match === null) { - return null - } - - return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options) -} -module.exports = coerce - - -/***/ }), - -/***/ 1036: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(93402) -const compareBuild = (a, b, loose) => { - const versionA = new SemVer(a, loose) - const versionB = new SemVer(b, loose) - return versionA.compare(versionB) || versionA.compareBuild(versionB) -} -module.exports = compareBuild - -/***/ }), - -/***/ 4626: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(24343) -const compareLoose = (a, b) => compare(a, b, true) -module.exports = compareLoose - - -/***/ }), - -/***/ 24343: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(93402) -const compare = (a, b, loose) => - new SemVer(a, loose).compare(new SemVer(b, loose)) - -module.exports = compare - - -/***/ }), - -/***/ 94836: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const parse = __nccwpck_require__(53371) - -const diff = (version1, version2) => { - const v1 = parse(version1, null, true) - const v2 = parse(version2, null, true) - const comparison = v1.compare(v2) - - if (comparison === 0) { - return null - } - - const v1Higher = comparison > 0 - const highVersion = v1Higher ? v1 : v2 - const lowVersion = v1Higher ? v2 : v1 - const highHasPre = !!highVersion.prerelease.length - - // add the `pre` prefix if we are going to a prerelease version - const prefix = highHasPre ? 'pre' : '' - - if (v1.major !== v2.major) { - return prefix + 'major' - } - - if (v1.minor !== v2.minor) { - return prefix + 'minor' - } - - if (v1.patch !== v2.patch) { - return prefix + 'patch' - } - - // at this point we know stable versions match but overall versions are not equal, - // so either they are both prereleases, or the lower version is a prerelease - - if (highHasPre) { - // high and low are preleases - return 'prerelease' - } - - if (lowVersion.patch) { - // anything higher than a patch bump would result in the wrong version - return 'patch' - } - - if (lowVersion.minor) { - // anything higher than a minor bump would result in the wrong version - return 'minor' - } - - // bumping major/minor/patch all have same result - return 'major' -} - -module.exports = diff - - -/***/ }), - -/***/ 59792: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(24343) -const eq = (a, b, loose) => compare(a, b, loose) === 0 -module.exports = eq - - -/***/ }), - -/***/ 47040: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(24343) -const gt = (a, b, loose) => compare(a, b, loose) > 0 -module.exports = gt - - -/***/ }), - -/***/ 97445: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(24343) -const gte = (a, b, loose) => compare(a, b, loose) >= 0 -module.exports = gte - - -/***/ }), - -/***/ 88877: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(93402) - -const inc = (version, release, options, identifier, identifierBase) => { - if (typeof (options) === 'string') { - identifierBase = identifier - identifier = options - options = undefined - } - - try { - return new SemVer( - version instanceof SemVer ? version.version : version, - options - ).inc(release, identifier, identifierBase).version - } catch (er) { - return null - } -} -module.exports = inc - - -/***/ }), - -/***/ 16054: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(24343) -const lt = (a, b, loose) => compare(a, b, loose) < 0 -module.exports = lt - - -/***/ }), - -/***/ 79387: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(24343) -const lte = (a, b, loose) => compare(a, b, loose) <= 0 -module.exports = lte - - -/***/ }), - -/***/ 57269: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(93402) -const major = (a, loose) => new SemVer(a, loose).major -module.exports = major - - -/***/ }), - -/***/ 59725: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(93402) -const minor = (a, loose) => new SemVer(a, loose).minor -module.exports = minor - - -/***/ }), - -/***/ 30658: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(24343) -const neq = (a, b, loose) => compare(a, b, loose) !== 0 -module.exports = neq - - -/***/ }), - -/***/ 53371: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(93402) -const parse = (version, options, throwErrors = false) => { - if (version instanceof SemVer) { - return version - } - try { - return new SemVer(version, options) - } catch (er) { - if (!throwErrors) { - return null + /** + * Creates a function that invokes `func`, with up to `n` arguments, + * ignoring any additional arguments. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to cap arguments for. + * @param {number} [n=func.length] The arity cap. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new capped function. + * @example + * + * _.map(['6', '8', '10'], _.ary(parseInt, 1)); + * // => [6, 8, 10] + */ + function ary(func, n, guard) { + n = guard ? undefined : n; + n = (func && n == null) ? func.length : n; + return createWrap(func, WRAP_ARY_FLAG, undefined, undefined, undefined, undefined, n); } - throw er - } -} - -module.exports = parse - -/***/ }), - -/***/ 98691: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const SemVer = __nccwpck_require__(93402) -const patch = (a, loose) => new SemVer(a, loose).patch -module.exports = patch + /** + * Creates a function that invokes `func`, with the `this` binding and arguments + * of the created function, while it's called less than `n` times. Subsequent + * calls to the created function return the result of the last `func` invocation. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {number} n The number of calls at which `func` is no longer invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * jQuery(element).on('click', _.before(5, addContactToList)); + * // => Allows adding up to 4 contacts to the list. + */ + function before(n, func) { + var result; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n > 0) { + result = func.apply(this, arguments); + } + if (n <= 1) { + func = undefined; + } + return result; + }; + } + /** + * Creates a function that invokes `func` with the `this` binding of `thisArg` + * and `partials` prepended to the arguments it receives. + * + * The `_.bind.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for partially applied arguments. + * + * **Note:** Unlike native `Function#bind`, this method doesn't set the "length" + * property of bound functions. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to bind. + * @param {*} thisArg The `this` binding of `func`. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * function greet(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * + * var object = { 'user': 'fred' }; + * + * var bound = _.bind(greet, object, 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * // Bound with placeholders. + * var bound = _.bind(greet, object, _, '!'); + * bound('hi'); + * // => 'hi fred!' + */ + var bind = baseRest(function(func, thisArg, partials) { + var bitmask = WRAP_BIND_FLAG; + if (partials.length) { + var holders = replaceHolders(partials, getHolder(bind)); + bitmask |= WRAP_PARTIAL_FLAG; + } + return createWrap(func, bitmask, thisArg, partials, holders); + }); -/***/ }), + /** + * Creates a function that invokes the method at `object[key]` with `partials` + * prepended to the arguments it receives. + * + * This method differs from `_.bind` by allowing bound functions to reference + * methods that may be redefined or don't yet exist. See + * [Peter Michaux's article](http://peter.michaux.ca/articles/lazy-function-definition-pattern) + * for more details. + * + * The `_.bindKey.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Function + * @param {Object} object The object to invoke the method on. + * @param {string} key The key of the method. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * var object = { + * 'user': 'fred', + * 'greet': function(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * }; + * + * var bound = _.bindKey(object, 'greet', 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * object.greet = function(greeting, punctuation) { + * return greeting + 'ya ' + this.user + punctuation; + * }; + * + * bound('!'); + * // => 'hiya fred!' + * + * // Bound with placeholders. + * var bound = _.bindKey(object, 'greet', _, '!'); + * bound('hi'); + * // => 'hiya fred!' + */ + var bindKey = baseRest(function(object, key, partials) { + var bitmask = WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG; + if (partials.length) { + var holders = replaceHolders(partials, getHolder(bindKey)); + bitmask |= WRAP_PARTIAL_FLAG; + } + return createWrap(key, bitmask, object, partials, holders); + }); -/***/ 47892: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Creates a function that accepts arguments of `func` and either invokes + * `func` returning its result, if at least `arity` number of arguments have + * been provided, or returns a function that accepts the remaining `func` + * arguments, and so on. The arity of `func` may be specified if `func.length` + * is not sufficient. + * + * The `_.curry.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for provided arguments. + * + * **Note:** This method doesn't set the "length" property of curried functions. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Function + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new curried function. + * @example + * + * var abc = function(a, b, c) { + * return [a, b, c]; + * }; + * + * var curried = _.curry(abc); + * + * curried(1)(2)(3); + * // => [1, 2, 3] + * + * curried(1, 2)(3); + * // => [1, 2, 3] + * + * curried(1, 2, 3); + * // => [1, 2, 3] + * + * // Curried with placeholders. + * curried(1)(_, 3)(2); + * // => [1, 2, 3] + */ + function curry(func, arity, guard) { + arity = guard ? undefined : arity; + var result = createWrap(func, WRAP_CURRY_FLAG, undefined, undefined, undefined, undefined, undefined, arity); + result.placeholder = curry.placeholder; + return result; + } -const parse = __nccwpck_require__(53371) -const prerelease = (version, options) => { - const parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} -module.exports = prerelease + /** + * This method is like `_.curry` except that arguments are applied to `func` + * in the manner of `_.partialRight` instead of `_.partial`. + * + * The `_.curryRight.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for provided arguments. + * + * **Note:** This method doesn't set the "length" property of curried functions. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new curried function. + * @example + * + * var abc = function(a, b, c) { + * return [a, b, c]; + * }; + * + * var curried = _.curryRight(abc); + * + * curried(3)(2)(1); + * // => [1, 2, 3] + * + * curried(2, 3)(1); + * // => [1, 2, 3] + * + * curried(1, 2, 3); + * // => [1, 2, 3] + * + * // Curried with placeholders. + * curried(3)(1, _)(2); + * // => [1, 2, 3] + */ + function curryRight(func, arity, guard) { + arity = guard ? undefined : arity; + var result = createWrap(func, WRAP_CURRY_RIGHT_FLAG, undefined, undefined, undefined, undefined, undefined, arity); + result.placeholder = curryRight.placeholder; + return result; + } + /** + * Creates a debounced function that delays invoking `func` until after `wait` + * milliseconds have elapsed since the last time the debounced function was + * invoked. The debounced function comes with a `cancel` method to cancel + * delayed `func` invocations and a `flush` method to immediately invoke them. + * Provide `options` to indicate whether `func` should be invoked on the + * leading and/or trailing edge of the `wait` timeout. The `func` is invoked + * with the last arguments provided to the debounced function. Subsequent + * calls to the debounced function return the result of the last `func` + * invocation. + * + * **Note:** If `leading` and `trailing` options are `true`, `func` is + * invoked on the trailing edge of the timeout only if the debounced function + * is invoked more than once during the `wait` timeout. + * + * If `wait` is `0` and `leading` is `false`, `func` invocation is deferred + * until to the next tick, similar to `setTimeout` with a timeout of `0`. + * + * See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/) + * for details over the differences between `_.debounce` and `_.throttle`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to debounce. + * @param {number} [wait=0] The number of milliseconds to delay. + * @param {Object} [options={}] The options object. + * @param {boolean} [options.leading=false] + * Specify invoking on the leading edge of the timeout. + * @param {number} [options.maxWait] + * The maximum time `func` is allowed to be delayed before it's invoked. + * @param {boolean} [options.trailing=true] + * Specify invoking on the trailing edge of the timeout. + * @returns {Function} Returns the new debounced function. + * @example + * + * // Avoid costly calculations while the window size is in flux. + * jQuery(window).on('resize', _.debounce(calculateLayout, 150)); + * + * // Invoke `sendMail` when clicked, debouncing subsequent calls. + * jQuery(element).on('click', _.debounce(sendMail, 300, { + * 'leading': true, + * 'trailing': false + * })); + * + * // Ensure `batchLog` is invoked once after 1 second of debounced calls. + * var debounced = _.debounce(batchLog, 250, { 'maxWait': 1000 }); + * var source = new EventSource('/stream'); + * jQuery(source).on('message', debounced); + * + * // Cancel the trailing debounced invocation. + * jQuery(window).on('popstate', debounced.cancel); + */ + function debounce(func, wait, options) { + var lastArgs, + lastThis, + maxWait, + result, + timerId, + lastCallTime, + lastInvokeTime = 0, + leading = false, + maxing = false, + trailing = true; -/***/ }), + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + wait = toNumber(wait) || 0; + if (isObject(options)) { + leading = !!options.leading; + maxing = 'maxWait' in options; + maxWait = maxing ? nativeMax(toNumber(options.maxWait) || 0, wait) : maxWait; + trailing = 'trailing' in options ? !!options.trailing : trailing; + } -/***/ 98227: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + function invokeFunc(time) { + var args = lastArgs, + thisArg = lastThis; -const compare = __nccwpck_require__(24343) -const rcompare = (a, b, loose) => compare(b, a, loose) -module.exports = rcompare + lastArgs = lastThis = undefined; + lastInvokeTime = time; + result = func.apply(thisArg, args); + return result; + } + function leadingEdge(time) { + // Reset any `maxWait` timer. + lastInvokeTime = time; + // Start the timer for the trailing edge. + timerId = setTimeout(timerExpired, wait); + // Invoke the leading edge. + return leading ? invokeFunc(time) : result; + } -/***/ }), + function remainingWait(time) { + var timeSinceLastCall = time - lastCallTime, + timeSinceLastInvoke = time - lastInvokeTime, + timeWaiting = wait - timeSinceLastCall; -/***/ 24590: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return maxing + ? nativeMin(timeWaiting, maxWait - timeSinceLastInvoke) + : timeWaiting; + } -const compareBuild = __nccwpck_require__(1036) -const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) -module.exports = rsort + function shouldInvoke(time) { + var timeSinceLastCall = time - lastCallTime, + timeSinceLastInvoke = time - lastInvokeTime; + // Either this is the first call, activity has stopped and we're at the + // trailing edge, the system time has gone backwards and we're treating + // it as the trailing edge, or we've hit the `maxWait` limit. + return (lastCallTime === undefined || (timeSinceLastCall >= wait) || + (timeSinceLastCall < 0) || (maxing && timeSinceLastInvoke >= maxWait)); + } -/***/ }), + function timerExpired() { + var time = now(); + if (shouldInvoke(time)) { + return trailingEdge(time); + } + // Restart the timer. + timerId = setTimeout(timerExpired, remainingWait(time)); + } -/***/ 45056: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + function trailingEdge(time) { + timerId = undefined; -const Range = __nccwpck_require__(34502) -const satisfies = (version, range, options) => { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} -module.exports = satisfies + // Only invoke if we have `lastArgs` which means `func` has been + // debounced at least once. + if (trailing && lastArgs) { + return invokeFunc(time); + } + lastArgs = lastThis = undefined; + return result; + } + function cancel() { + if (timerId !== undefined) { + clearTimeout(timerId); + } + lastInvokeTime = 0; + lastArgs = lastCallTime = lastThis = timerId = undefined; + } -/***/ }), + function flush() { + return timerId === undefined ? result : trailingEdge(now()); + } -/***/ 52340: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + function debounced() { + var time = now(), + isInvoking = shouldInvoke(time); -const compareBuild = __nccwpck_require__(1036) -const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) -module.exports = sort + lastArgs = arguments; + lastThis = this; + lastCallTime = time; + if (isInvoking) { + if (timerId === undefined) { + return leadingEdge(lastCallTime); + } + if (maxing) { + // Handle invocations in a tight loop. + clearTimeout(timerId); + timerId = setTimeout(timerExpired, wait); + return invokeFunc(lastCallTime); + } + } + if (timerId === undefined) { + timerId = setTimeout(timerExpired, wait); + } + return result; + } + debounced.cancel = cancel; + debounced.flush = flush; + return debounced; + } -/***/ }), + /** + * Defers invoking the `func` until the current call stack has cleared. Any + * additional arguments are provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to defer. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.defer(function(text) { + * console.log(text); + * }, 'deferred'); + * // => Logs 'deferred' after one millisecond. + */ + var defer = baseRest(function(func, args) { + return baseDelay(func, 1, args); + }); -/***/ 85715: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Invokes `func` after `wait` milliseconds. Any additional arguments are + * provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.delay(function(text) { + * console.log(text); + * }, 1000, 'later'); + * // => Logs 'later' after one second. + */ + var delay = baseRest(function(func, wait, args) { + return baseDelay(func, toNumber(wait) || 0, args); + }); -const parse = __nccwpck_require__(53371) -const valid = (version, options) => { - const v = parse(version, options) - return v ? v.version : null -} -module.exports = valid + /** + * Creates a function that invokes `func` with arguments reversed. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to flip arguments for. + * @returns {Function} Returns the new flipped function. + * @example + * + * var flipped = _.flip(function() { + * return _.toArray(arguments); + * }); + * + * flipped('a', 'b', 'c', 'd'); + * // => ['d', 'c', 'b', 'a'] + */ + function flip(func) { + return createWrap(func, WRAP_FLIP_FLAG); + } + /** + * Creates a function that memoizes the result of `func`. If `resolver` is + * provided, it determines the cache key for storing the result based on the + * arguments provided to the memoized function. By default, the first argument + * provided to the memoized function is used as the map cache key. The `func` + * is invoked with the `this` binding of the memoized function. + * + * **Note:** The cache is exposed as the `cache` property on the memoized + * function. Its creation may be customized by replacing the `_.memoize.Cache` + * constructor with one whose instances implement the + * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) + * method interface of `clear`, `delete`, `get`, `has`, and `set`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to have its output memoized. + * @param {Function} [resolver] The function to resolve the cache key. + * @returns {Function} Returns the new memoized function. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * var other = { 'c': 3, 'd': 4 }; + * + * var values = _.memoize(_.values); + * values(object); + * // => [1, 2] + * + * values(other); + * // => [3, 4] + * + * object.a = 2; + * values(object); + * // => [1, 2] + * + * // Modify the result cache. + * values.cache.set(object, ['a', 'b']); + * values(object); + * // => ['a', 'b'] + * + * // Replace `_.memoize.Cache`. + * _.memoize.Cache = WeakMap; + */ + function memoize(func, resolver) { + if (typeof func != 'function' || (resolver != null && typeof resolver != 'function')) { + throw new TypeError(FUNC_ERROR_TEXT); + } + var memoized = function() { + var args = arguments, + key = resolver ? resolver.apply(this, args) : args[0], + cache = memoized.cache; -/***/ }), + if (cache.has(key)) { + return cache.get(key); + } + var result = func.apply(this, args); + memoized.cache = cache.set(key, result) || cache; + return result; + }; + memoized.cache = new (memoize.Cache || MapCache); + return memoized; + } -/***/ 43998: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Expose `MapCache`. + memoize.Cache = MapCache; -// just pre-load all the stuff that index.js lazily exports -const internalRe = __nccwpck_require__(3682) -const constants = __nccwpck_require__(97344) -const SemVer = __nccwpck_require__(93402) -const identifiers = __nccwpck_require__(22179) -const parse = __nccwpck_require__(53371) -const valid = __nccwpck_require__(85715) -const clean = __nccwpck_require__(18751) -const inc = __nccwpck_require__(88877) -const diff = __nccwpck_require__(94836) -const major = __nccwpck_require__(57269) -const minor = __nccwpck_require__(59725) -const patch = __nccwpck_require__(98691) -const prerelease = __nccwpck_require__(47892) -const compare = __nccwpck_require__(24343) -const rcompare = __nccwpck_require__(98227) -const compareLoose = __nccwpck_require__(4626) -const compareBuild = __nccwpck_require__(1036) -const sort = __nccwpck_require__(52340) -const rsort = __nccwpck_require__(24590) -const gt = __nccwpck_require__(47040) -const lt = __nccwpck_require__(16054) -const eq = __nccwpck_require__(59792) -const neq = __nccwpck_require__(30658) -const gte = __nccwpck_require__(97445) -const lte = __nccwpck_require__(79387) -const cmp = __nccwpck_require__(23621) -const coerce = __nccwpck_require__(85766) -const Comparator = __nccwpck_require__(24644) -const Range = __nccwpck_require__(34502) -const satisfies = __nccwpck_require__(45056) -const toComparators = __nccwpck_require__(54676) -const maxSatisfying = __nccwpck_require__(5471) -const minSatisfying = __nccwpck_require__(65356) -const minVersion = __nccwpck_require__(7825) -const validRange = __nccwpck_require__(66895) -const outside = __nccwpck_require__(8629) -const gtr = __nccwpck_require__(67970) -const ltr = __nccwpck_require__(53270) -const intersects = __nccwpck_require__(1373) -const simplifyRange = __nccwpck_require__(94336) -const subset = __nccwpck_require__(54979) -module.exports = { - parse, - valid, - clean, - inc, - diff, - major, - minor, - patch, - prerelease, - compare, - rcompare, - compareLoose, - compareBuild, - sort, - rsort, - gt, - lt, - eq, - neq, - gte, - lte, - cmp, - coerce, - Comparator, - Range, - satisfies, - toComparators, - maxSatisfying, - minSatisfying, - minVersion, - validRange, - outside, - gtr, - ltr, - intersects, - simplifyRange, - subset, - SemVer, - re: internalRe.re, - src: internalRe.src, - tokens: internalRe.t, - SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, - RELEASE_TYPES: constants.RELEASE_TYPES, - compareIdentifiers: identifiers.compareIdentifiers, - rcompareIdentifiers: identifiers.rcompareIdentifiers, -} + /** + * Creates a function that negates the result of the predicate `func`. The + * `func` predicate is invoked with the `this` binding and arguments of the + * created function. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} predicate The predicate to negate. + * @returns {Function} Returns the new negated function. + * @example + * + * function isEven(n) { + * return n % 2 == 0; + * } + * + * _.filter([1, 2, 3, 4, 5, 6], _.negate(isEven)); + * // => [1, 3, 5] + */ + function negate(predicate) { + if (typeof predicate != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return function() { + var args = arguments; + switch (args.length) { + case 0: return !predicate.call(this); + case 1: return !predicate.call(this, args[0]); + case 2: return !predicate.call(this, args[0], args[1]); + case 3: return !predicate.call(this, args[0], args[1], args[2]); + } + return !predicate.apply(this, args); + }; + } + /** + * Creates a function that is restricted to invoking `func` once. Repeat calls + * to the function return the value of the first invocation. The `func` is + * invoked with the `this` binding and arguments of the created function. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var initialize = _.once(createApplication); + * initialize(); + * initialize(); + * // => `createApplication` is invoked once + */ + function once(func) { + return before(2, func); + } -/***/ }), + /** + * Creates a function that invokes `func` with its arguments transformed. + * + * @static + * @since 4.0.0 + * @memberOf _ + * @category Function + * @param {Function} func The function to wrap. + * @param {...(Function|Function[])} [transforms=[_.identity]] + * The argument transforms. + * @returns {Function} Returns the new function. + * @example + * + * function doubled(n) { + * return n * 2; + * } + * + * function square(n) { + * return n * n; + * } + * + * var func = _.overArgs(function(x, y) { + * return [x, y]; + * }, [square, doubled]); + * + * func(9, 3); + * // => [81, 6] + * + * func(10, 5); + * // => [100, 10] + */ + var overArgs = castRest(function(func, transforms) { + transforms = (transforms.length == 1 && isArray(transforms[0])) + ? arrayMap(transforms[0], baseUnary(getIteratee())) + : arrayMap(baseFlatten(transforms, 1), baseUnary(getIteratee())); -/***/ 97344: -/***/ ((module) => { + var funcsLength = transforms.length; + return baseRest(function(args) { + var index = -1, + length = nativeMin(args.length, funcsLength); -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -const SEMVER_SPEC_VERSION = '2.0.0' + while (++index < length) { + args[index] = transforms[index].call(this, args[index]); + } + return apply(func, this, args); + }); + }); -const MAX_LENGTH = 256 -const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || -/* istanbul ignore next */ 9007199254740991 + /** + * Creates a function that invokes `func` with `partials` prepended to the + * arguments it receives. This method is like `_.bind` except it does **not** + * alter the `this` binding. + * + * The `_.partial.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * **Note:** This method doesn't set the "length" property of partially + * applied functions. + * + * @static + * @memberOf _ + * @since 0.2.0 + * @category Function + * @param {Function} func The function to partially apply arguments to. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new partially applied function. + * @example + * + * function greet(greeting, name) { + * return greeting + ' ' + name; + * } + * + * var sayHelloTo = _.partial(greet, 'hello'); + * sayHelloTo('fred'); + * // => 'hello fred' + * + * // Partially applied with placeholders. + * var greetFred = _.partial(greet, _, 'fred'); + * greetFred('hi'); + * // => 'hi fred' + */ + var partial = baseRest(function(func, partials) { + var holders = replaceHolders(partials, getHolder(partial)); + return createWrap(func, WRAP_PARTIAL_FLAG, undefined, partials, holders); + }); -// Max safe segment length for coercion. -const MAX_SAFE_COMPONENT_LENGTH = 16 + /** + * This method is like `_.partial` except that partially applied arguments + * are appended to the arguments it receives. + * + * The `_.partialRight.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * **Note:** This method doesn't set the "length" property of partially + * applied functions. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Function + * @param {Function} func The function to partially apply arguments to. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new partially applied function. + * @example + * + * function greet(greeting, name) { + * return greeting + ' ' + name; + * } + * + * var greetFred = _.partialRight(greet, 'fred'); + * greetFred('hi'); + * // => 'hi fred' + * + * // Partially applied with placeholders. + * var sayHelloTo = _.partialRight(greet, 'hello', _); + * sayHelloTo('fred'); + * // => 'hello fred' + */ + var partialRight = baseRest(function(func, partials) { + var holders = replaceHolders(partials, getHolder(partialRight)); + return createWrap(func, WRAP_PARTIAL_RIGHT_FLAG, undefined, partials, holders); + }); -const RELEASE_TYPES = [ - 'major', - 'premajor', - 'minor', - 'preminor', - 'patch', - 'prepatch', - 'prerelease', -] + /** + * Creates a function that invokes `func` with arguments arranged according + * to the specified `indexes` where the argument value at the first index is + * provided as the first argument, the argument value at the second index is + * provided as the second argument, and so on. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to rearrange arguments for. + * @param {...(number|number[])} indexes The arranged argument indexes. + * @returns {Function} Returns the new function. + * @example + * + * var rearged = _.rearg(function(a, b, c) { + * return [a, b, c]; + * }, [2, 0, 1]); + * + * rearged('b', 'c', 'a') + * // => ['a', 'b', 'c'] + */ + var rearg = flatRest(function(func, indexes) { + return createWrap(func, WRAP_REARG_FLAG, undefined, undefined, undefined, indexes); + }); -module.exports = { - MAX_LENGTH, - MAX_SAFE_COMPONENT_LENGTH, - MAX_SAFE_INTEGER, - RELEASE_TYPES, - SEMVER_SPEC_VERSION, - FLAG_INCLUDE_PRERELEASE: 0b001, - FLAG_LOOSE: 0b010, -} + /** + * Creates a function that invokes `func` with the `this` binding of the + * created function and arguments from `start` and beyond provided as + * an array. + * + * **Note:** This method is based on the + * [rest parameter](https://mdn.io/rest_parameters). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + * @example + * + * var say = _.rest(function(what, names) { + * return what + ' ' + _.initial(names).join(', ') + + * (_.size(names) > 1 ? ', & ' : '') + _.last(names); + * }); + * + * say('hello', 'fred', 'barney', 'pebbles'); + * // => 'hello fred, barney, & pebbles' + */ + function rest(func, start) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + start = start === undefined ? start : toInteger(start); + return baseRest(func, start); + } + /** + * Creates a function that invokes `func` with the `this` binding of the + * create function and an array of arguments much like + * [`Function#apply`](http://www.ecma-international.org/ecma-262/7.0/#sec-function.prototype.apply). + * + * **Note:** This method is based on the + * [spread operator](https://mdn.io/spread_operator). + * + * @static + * @memberOf _ + * @since 3.2.0 + * @category Function + * @param {Function} func The function to spread arguments over. + * @param {number} [start=0] The start position of the spread. + * @returns {Function} Returns the new function. + * @example + * + * var say = _.spread(function(who, what) { + * return who + ' says ' + what; + * }); + * + * say(['fred', 'hello']); + * // => 'fred says hello' + * + * var numbers = Promise.all([ + * Promise.resolve(40), + * Promise.resolve(36) + * ]); + * + * numbers.then(_.spread(function(x, y) { + * return x + y; + * })); + * // => a Promise of 76 + */ + function spread(func, start) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + start = start == null ? 0 : nativeMax(toInteger(start), 0); + return baseRest(function(args) { + var array = args[start], + otherArgs = castSlice(args, 0, start); -/***/ }), + if (array) { + arrayPush(otherArgs, array); + } + return apply(func, this, otherArgs); + }); + } -/***/ 22935: -/***/ ((module) => { + /** + * Creates a throttled function that only invokes `func` at most once per + * every `wait` milliseconds. The throttled function comes with a `cancel` + * method to cancel delayed `func` invocations and a `flush` method to + * immediately invoke them. Provide `options` to indicate whether `func` + * should be invoked on the leading and/or trailing edge of the `wait` + * timeout. The `func` is invoked with the last arguments provided to the + * throttled function. Subsequent calls to the throttled function return the + * result of the last `func` invocation. + * + * **Note:** If `leading` and `trailing` options are `true`, `func` is + * invoked on the trailing edge of the timeout only if the throttled function + * is invoked more than once during the `wait` timeout. + * + * If `wait` is `0` and `leading` is `false`, `func` invocation is deferred + * until to the next tick, similar to `setTimeout` with a timeout of `0`. + * + * See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/) + * for details over the differences between `_.throttle` and `_.debounce`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to throttle. + * @param {number} [wait=0] The number of milliseconds to throttle invocations to. + * @param {Object} [options={}] The options object. + * @param {boolean} [options.leading=true] + * Specify invoking on the leading edge of the timeout. + * @param {boolean} [options.trailing=true] + * Specify invoking on the trailing edge of the timeout. + * @returns {Function} Returns the new throttled function. + * @example + * + * // Avoid excessively updating the position while scrolling. + * jQuery(window).on('scroll', _.throttle(updatePosition, 100)); + * + * // Invoke `renewToken` when the click event is fired, but not more than once every 5 minutes. + * var throttled = _.throttle(renewToken, 300000, { 'trailing': false }); + * jQuery(element).on('click', throttled); + * + * // Cancel the trailing throttled invocation. + * jQuery(window).on('popstate', throttled.cancel); + */ + function throttle(func, wait, options) { + var leading = true, + trailing = true; -const debug = ( - typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG) -) ? (...args) => console.error('SEMVER', ...args) - : () => {} + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + if (isObject(options)) { + leading = 'leading' in options ? !!options.leading : leading; + trailing = 'trailing' in options ? !!options.trailing : trailing; + } + return debounce(func, wait, { + 'leading': leading, + 'maxWait': wait, + 'trailing': trailing + }); + } -module.exports = debug + /** + * Creates a function that accepts up to one argument, ignoring any + * additional arguments. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to cap arguments for. + * @returns {Function} Returns the new capped function. + * @example + * + * _.map(['6', '8', '10'], _.unary(parseInt)); + * // => [6, 8, 10] + */ + function unary(func) { + return ary(func, 1); + } + /** + * Creates a function that provides `value` to `wrapper` as its first + * argument. Any additional arguments provided to the function are appended + * to those provided to the `wrapper`. The wrapper is invoked with the `this` + * binding of the created function. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {*} value The value to wrap. + * @param {Function} [wrapper=identity] The wrapper function. + * @returns {Function} Returns the new function. + * @example + * + * var p = _.wrap(_.escape, function(func, text) { + * return '

' + func(text) + '

'; + * }); + * + * p('fred, barney, & pebbles'); + * // => '

fred, barney, & pebbles

' + */ + function wrap(value, wrapper) { + return partial(castFunction(wrapper), value); + } -/***/ }), + /*------------------------------------------------------------------------*/ -/***/ 22179: -/***/ ((module) => { + /** + * Casts `value` as an array if it's not one. + * + * @static + * @memberOf _ + * @since 4.4.0 + * @category Lang + * @param {*} value The value to inspect. + * @returns {Array} Returns the cast array. + * @example + * + * _.castArray(1); + * // => [1] + * + * _.castArray({ 'a': 1 }); + * // => [{ 'a': 1 }] + * + * _.castArray('abc'); + * // => ['abc'] + * + * _.castArray(null); + * // => [null] + * + * _.castArray(undefined); + * // => [undefined] + * + * _.castArray(); + * // => [] + * + * var array = [1, 2, 3]; + * console.log(_.castArray(array) === array); + * // => true + */ + function castArray() { + if (!arguments.length) { + return []; + } + var value = arguments[0]; + return isArray(value) ? value : [value]; + } -const numeric = /^[0-9]+$/ -const compareIdentifiers = (a, b) => { - const anum = numeric.test(a) - const bnum = numeric.test(b) + /** + * Creates a shallow clone of `value`. + * + * **Note:** This method is loosely based on the + * [structured clone algorithm](https://mdn.io/Structured_clone_algorithm) + * and supports cloning arrays, array buffers, booleans, date objects, maps, + * numbers, `Object` objects, regexes, sets, strings, symbols, and typed + * arrays. The own enumerable properties of `arguments` objects are cloned + * as plain objects. An empty object is returned for uncloneable values such + * as error objects, functions, DOM nodes, and WeakMaps. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to clone. + * @returns {*} Returns the cloned value. + * @see _.cloneDeep + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var shallow = _.clone(objects); + * console.log(shallow[0] === objects[0]); + * // => true + */ + function clone(value) { + return baseClone(value, CLONE_SYMBOLS_FLAG); + } - if (anum && bnum) { - a = +a - b = +b - } + /** + * This method is like `_.clone` except that it accepts `customizer` which + * is invoked to produce the cloned value. If `customizer` returns `undefined`, + * cloning is handled by the method instead. The `customizer` is invoked with + * up to four arguments; (value [, index|key, object, stack]). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to clone. + * @param {Function} [customizer] The function to customize cloning. + * @returns {*} Returns the cloned value. + * @see _.cloneDeepWith + * @example + * + * function customizer(value) { + * if (_.isElement(value)) { + * return value.cloneNode(false); + * } + * } + * + * var el = _.cloneWith(document.body, customizer); + * + * console.log(el === document.body); + * // => false + * console.log(el.nodeName); + * // => 'BODY' + * console.log(el.childNodes.length); + * // => 0 + */ + function cloneWith(value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseClone(value, CLONE_SYMBOLS_FLAG, customizer); + } - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} + /** + * This method is like `_.clone` except that it recursively clones `value`. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Lang + * @param {*} value The value to recursively clone. + * @returns {*} Returns the deep cloned value. + * @see _.clone + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var deep = _.cloneDeep(objects); + * console.log(deep[0] === objects[0]); + * // => false + */ + function cloneDeep(value) { + return baseClone(value, CLONE_DEEP_FLAG | CLONE_SYMBOLS_FLAG); + } -const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + /** + * This method is like `_.cloneWith` except that it recursively clones `value`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to recursively clone. + * @param {Function} [customizer] The function to customize cloning. + * @returns {*} Returns the deep cloned value. + * @see _.cloneWith + * @example + * + * function customizer(value) { + * if (_.isElement(value)) { + * return value.cloneNode(true); + * } + * } + * + * var el = _.cloneDeepWith(document.body, customizer); + * + * console.log(el === document.body); + * // => false + * console.log(el.nodeName); + * // => 'BODY' + * console.log(el.childNodes.length); + * // => 20 + */ + function cloneDeepWith(value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseClone(value, CLONE_DEEP_FLAG | CLONE_SYMBOLS_FLAG, customizer); + } -module.exports = { - compareIdentifiers, - rcompareIdentifiers, -} + /** + * Checks if `object` conforms to `source` by invoking the predicate + * properties of `source` with the corresponding property values of `object`. + * + * **Note:** This method is equivalent to `_.conforms` when `source` is + * partially applied. + * + * @static + * @memberOf _ + * @since 4.14.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property predicates to conform to. + * @returns {boolean} Returns `true` if `object` conforms, else `false`. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * + * _.conformsTo(object, { 'b': function(n) { return n > 1; } }); + * // => true + * + * _.conformsTo(object, { 'b': function(n) { return n > 2; } }); + * // => false + */ + function conformsTo(object, source) { + return source == null || baseConformsTo(object, source, keys(source)); + } + /** + * Performs a + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * comparison between two values to determine if they are equivalent. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.eq(object, object); + * // => true + * + * _.eq(object, other); + * // => false + * + * _.eq('a', 'a'); + * // => true + * + * _.eq('a', Object('a')); + * // => false + * + * _.eq(NaN, NaN); + * // => true + */ + function eq(value, other) { + return value === other || (value !== value && other !== other); + } -/***/ }), + /** + * Checks if `value` is greater than `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + * @see _.lt + * @example + * + * _.gt(3, 1); + * // => true + * + * _.gt(3, 3); + * // => false + * + * _.gt(1, 3); + * // => false + */ + var gt = createRelationalOperation(baseGt); -/***/ 85185: -/***/ ((module) => { + /** + * Checks if `value` is greater than or equal to `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than or equal to + * `other`, else `false`. + * @see _.lte + * @example + * + * _.gte(3, 1); + * // => true + * + * _.gte(3, 3); + * // => true + * + * _.gte(1, 3); + * // => false + */ + var gte = createRelationalOperation(function(value, other) { + return value >= other; + }); -// parse out just the options we care about -const looseOption = Object.freeze({ loose: true }) -const emptyOpts = Object.freeze({ }) -const parseOptions = options => { - if (!options) { - return emptyOpts - } + /** + * Checks if `value` is likely an `arguments` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + * else `false`. + * @example + * + * _.isArguments(function() { return arguments; }()); + * // => true + * + * _.isArguments([1, 2, 3]); + * // => false + */ + var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { + return isObjectLike(value) && hasOwnProperty.call(value, 'callee') && + !propertyIsEnumerable.call(value, 'callee'); + }; - if (typeof options !== 'object') { - return looseOption - } + /** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ + var isArray = Array.isArray; - return options -} -module.exports = parseOptions + /** + * Checks if `value` is classified as an `ArrayBuffer` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array buffer, else `false`. + * @example + * + * _.isArrayBuffer(new ArrayBuffer(2)); + * // => true + * + * _.isArrayBuffer(new Array(2)); + * // => false + */ + var isArrayBuffer = nodeIsArrayBuffer ? baseUnary(nodeIsArrayBuffer) : baseIsArrayBuffer; + /** + * Checks if `value` is array-like. A value is considered array-like if it's + * not a function and has a `value.length` that's an integer greater than or + * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is array-like, else `false`. + * @example + * + * _.isArrayLike([1, 2, 3]); + * // => true + * + * _.isArrayLike(document.body.children); + * // => true + * + * _.isArrayLike('abc'); + * // => true + * + * _.isArrayLike(_.noop); + * // => false + */ + function isArrayLike(value) { + return value != null && isLength(value.length) && !isFunction(value); + } -/***/ }), + /** + * This method is like `_.isArrayLike` except that it also checks if `value` + * is an object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array-like object, + * else `false`. + * @example + * + * _.isArrayLikeObject([1, 2, 3]); + * // => true + * + * _.isArrayLikeObject(document.body.children); + * // => true + * + * _.isArrayLikeObject('abc'); + * // => false + * + * _.isArrayLikeObject(_.noop); + * // => false + */ + function isArrayLikeObject(value) { + return isObjectLike(value) && isArrayLike(value); + } -/***/ 3682: -/***/ ((module, exports, __nccwpck_require__) => { + /** + * Checks if `value` is classified as a boolean primitive or object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a boolean, else `false`. + * @example + * + * _.isBoolean(false); + * // => true + * + * _.isBoolean(null); + * // => false + */ + function isBoolean(value) { + return value === true || value === false || + (isObjectLike(value) && baseGetTag(value) == boolTag); + } -const { MAX_SAFE_COMPONENT_LENGTH } = __nccwpck_require__(97344) -const debug = __nccwpck_require__(22935) -exports = module.exports = {} + /** + * Checks if `value` is a buffer. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a buffer, else `false`. + * @example + * + * _.isBuffer(new Buffer(2)); + * // => true + * + * _.isBuffer(new Uint8Array(2)); + * // => false + */ + var isBuffer = nativeIsBuffer || stubFalse; -// The actual regexps go on exports.re -const re = exports.re = [] -const src = exports.src = [] -const t = exports.t = {} -let R = 0 + /** + * Checks if `value` is classified as a `Date` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + * @example + * + * _.isDate(new Date); + * // => true + * + * _.isDate('Mon April 23 2012'); + * // => false + */ + var isDate = nodeIsDate ? baseUnary(nodeIsDate) : baseIsDate; -const createToken = (name, value, isGlobal) => { - const index = R++ - debug(name, index, value) - t[name] = index - src[index] = value - re[index] = new RegExp(value, isGlobal ? 'g' : undefined) -} + /** + * Checks if `value` is likely a DOM element. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a DOM element, else `false`. + * @example + * + * _.isElement(document.body); + * // => true + * + * _.isElement(''); + * // => false + */ + function isElement(value) { + return isObjectLike(value) && value.nodeType === 1 && !isPlainObject(value); + } -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. + /** + * Checks if `value` is an empty object, collection, map, or set. + * + * Objects are considered empty if they have no own enumerable string keyed + * properties. + * + * Array-like values such as `arguments` objects, arrays, buffers, strings, or + * jQuery-like collections are considered empty if they have a `length` of `0`. + * Similarly, maps and sets are considered empty if they have a `size` of `0`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is empty, else `false`. + * @example + * + * _.isEmpty(null); + * // => true + * + * _.isEmpty(true); + * // => true + * + * _.isEmpty(1); + * // => true + * + * _.isEmpty([1, 2, 3]); + * // => false + * + * _.isEmpty({ 'a': 1 }); + * // => false + */ + function isEmpty(value) { + if (value == null) { + return true; + } + if (isArrayLike(value) && + (isArray(value) || typeof value == 'string' || typeof value.splice == 'function' || + isBuffer(value) || isTypedArray(value) || isArguments(value))) { + return !value.length; + } + var tag = getTag(value); + if (tag == mapTag || tag == setTag) { + return !value.size; + } + if (isPrototype(value)) { + return !baseKeys(value).length; + } + for (var key in value) { + if (hasOwnProperty.call(value, key)) { + return false; + } + } + return true; + } -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. + /** + * Performs a deep comparison between two values to determine if they are + * equivalent. + * + * **Note:** This method supports comparing arrays, array buffers, booleans, + * date objects, error objects, maps, numbers, `Object` objects, regexes, + * sets, strings, symbols, and typed arrays. `Object` objects are compared + * by their own, not inherited, enumerable properties. Functions and DOM + * nodes are compared by strict equality, i.e. `===`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.isEqual(object, other); + * // => true + * + * object === other; + * // => false + */ + function isEqual(value, other) { + return baseIsEqual(value, other); + } -createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') -createToken('NUMERICIDENTIFIERLOOSE', '[0-9]+') + /** + * This method is like `_.isEqual` except that it accepts `customizer` which + * is invoked to compare values. If `customizer` returns `undefined`, comparisons + * are handled by the method instead. The `customizer` is invoked with up to + * six arguments: (objValue, othValue [, index|key, object, other, stack]). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * function isGreeting(value) { + * return /^h(?:i|ello)$/.test(value); + * } + * + * function customizer(objValue, othValue) { + * if (isGreeting(objValue) && isGreeting(othValue)) { + * return true; + * } + * } + * + * var array = ['hello', 'goodbye']; + * var other = ['hi', 'goodbye']; + * + * _.isEqualWith(array, other, customizer); + * // => true + */ + function isEqualWith(value, other, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + var result = customizer ? customizer(value, other) : undefined; + return result === undefined ? baseIsEqual(value, other, undefined, customizer) : !!result; + } -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. + /** + * Checks if `value` is an `Error`, `EvalError`, `RangeError`, `ReferenceError`, + * `SyntaxError`, `TypeError`, or `URIError` object. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an error object, else `false`. + * @example + * + * _.isError(new Error); + * // => true + * + * _.isError(Error); + * // => false + */ + function isError(value) { + if (!isObjectLike(value)) { + return false; + } + var tag = baseGetTag(value); + return tag == errorTag || tag == domExcTag || + (typeof value.message == 'string' && typeof value.name == 'string' && !isPlainObject(value)); + } -createToken('NONNUMERICIDENTIFIER', '\\d*[a-zA-Z-][a-zA-Z0-9-]*') + /** + * Checks if `value` is a finite primitive number. + * + * **Note:** This method is based on + * [`Number.isFinite`](https://mdn.io/Number/isFinite). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a finite number, else `false`. + * @example + * + * _.isFinite(3); + * // => true + * + * _.isFinite(Number.MIN_VALUE); + * // => true + * + * _.isFinite(Infinity); + * // => false + * + * _.isFinite('3'); + * // => false + */ + function isFinite(value) { + return typeof value == 'number' && nativeIsFinite(value); + } -// ## Main Version -// Three dot-separated numeric identifiers. + /** + * Checks if `value` is classified as a `Function` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a function, else `false`. + * @example + * + * _.isFunction(_); + * // => true + * + * _.isFunction(/abc/); + * // => false + */ + function isFunction(value) { + if (!isObject(value)) { + return false; + } + // The use of `Object#toString` avoids issues with the `typeof` operator + // in Safari 9 which returns 'object' for typed arrays and other constructors. + var tag = baseGetTag(value); + return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; + } -createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + - `(${src[t.NUMERICIDENTIFIER]})\\.` + - `(${src[t.NUMERICIDENTIFIER]})`) + /** + * Checks if `value` is an integer. + * + * **Note:** This method is based on + * [`Number.isInteger`](https://mdn.io/Number/isInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an integer, else `false`. + * @example + * + * _.isInteger(3); + * // => true + * + * _.isInteger(Number.MIN_VALUE); + * // => false + * + * _.isInteger(Infinity); + * // => false + * + * _.isInteger('3'); + * // => false + */ + function isInteger(value) { + return typeof value == 'number' && value == toInteger(value); + } -createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + - `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + - `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + /** + * Checks if `value` is a valid array-like length. + * + * **Note:** This method is loosely based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. + * @example + * + * _.isLength(3); + * // => true + * + * _.isLength(Number.MIN_VALUE); + * // => false + * + * _.isLength(Infinity); + * // => false + * + * _.isLength('3'); + * // => false + */ + function isLength(value) { + return typeof value == 'number' && + value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; + } -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. + /** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ + function isObject(value) { + var type = typeof value; + return value != null && (type == 'object' || type == 'function'); + } -createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] -}|${src[t.NONNUMERICIDENTIFIER]})`) + /** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ + function isObjectLike(value) { + return value != null && typeof value == 'object'; + } -createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] -}|${src[t.NONNUMERICIDENTIFIER]})`) + /** + * Checks if `value` is classified as a `Map` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a map, else `false`. + * @example + * + * _.isMap(new Map); + * // => true + * + * _.isMap(new WeakMap); + * // => false + */ + var isMap = nodeIsMap ? baseUnary(nodeIsMap) : baseIsMap; -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. + /** + * Performs a partial deep comparison between `object` and `source` to + * determine if `object` contains equivalent property values. + * + * **Note:** This method is equivalent to `_.matches` when `source` is + * partially applied. + * + * Partial comparisons will match empty array and empty object `source` + * values against any array or object value, respectively. See `_.isEqual` + * for a list of supported value comparisons. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * + * _.isMatch(object, { 'b': 2 }); + * // => true + * + * _.isMatch(object, { 'b': 1 }); + * // => false + */ + function isMatch(object, source) { + return object === source || baseIsMatch(object, source, getMatchData(source)); + } -createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] -}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + /** + * This method is like `_.isMatch` except that it accepts `customizer` which + * is invoked to compare values. If `customizer` returns `undefined`, comparisons + * are handled by the method instead. The `customizer` is invoked with five + * arguments: (objValue, srcValue, index|key, object, source). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + * @example + * + * function isGreeting(value) { + * return /^h(?:i|ello)$/.test(value); + * } + * + * function customizer(objValue, srcValue) { + * if (isGreeting(objValue) && isGreeting(srcValue)) { + * return true; + * } + * } + * + * var object = { 'greeting': 'hello' }; + * var source = { 'greeting': 'hi' }; + * + * _.isMatchWith(object, source, customizer); + * // => true + */ + function isMatchWith(object, source, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseIsMatch(object, source, getMatchData(source), customizer); + } -createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] -}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + /** + * Checks if `value` is `NaN`. + * + * **Note:** This method is based on + * [`Number.isNaN`](https://mdn.io/Number/isNaN) and is not the same as + * global [`isNaN`](https://mdn.io/isNaN) which returns `true` for + * `undefined` and other non-number values. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + * @example + * + * _.isNaN(NaN); + * // => true + * + * _.isNaN(new Number(NaN)); + * // => true + * + * isNaN(undefined); + * // => true + * + * _.isNaN(undefined); + * // => false + */ + function isNaN(value) { + // An `NaN` primitive is the only value that is not equal to itself. + // Perform the `toStringTag` check first to avoid errors with some + // ActiveX objects in IE. + return isNumber(value) && value != +value; + } -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. + /** + * Checks if `value` is a pristine native function. + * + * **Note:** This method can't reliably detect native functions in the presence + * of the core-js package because core-js circumvents this kind of detection. + * Despite multiple requests, the core-js maintainer has made it clear: any + * attempt to fix the detection will be obstructed. As a result, we're left + * with little choice but to throw an error. Unfortunately, this also affects + * packages, like [babel-polyfill](https://www.npmjs.com/package/babel-polyfill), + * which rely on core-js. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. + * @example + * + * _.isNative(Array.prototype.push); + * // => true + * + * _.isNative(_); + * // => false + */ + function isNative(value) { + if (isMaskable(value)) { + throw new Error(CORE_ERROR_TEXT); + } + return baseIsNative(value); + } -createToken('BUILDIDENTIFIER', '[0-9A-Za-z-]+') + /** + * Checks if `value` is `null`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `null`, else `false`. + * @example + * + * _.isNull(null); + * // => true + * + * _.isNull(void 0); + * // => false + */ + function isNull(value) { + return value === null; + } -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. + /** + * Checks if `value` is `null` or `undefined`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is nullish, else `false`. + * @example + * + * _.isNil(null); + * // => true + * + * _.isNil(void 0); + * // => true + * + * _.isNil(NaN); + * // => false + */ + function isNil(value) { + return value == null; + } -createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] -}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + /** + * Checks if `value` is classified as a `Number` primitive or object. + * + * **Note:** To exclude `Infinity`, `-Infinity`, and `NaN`, which are + * classified as numbers, use the `_.isFinite` method. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a number, else `false`. + * @example + * + * _.isNumber(3); + * // => true + * + * _.isNumber(Number.MIN_VALUE); + * // => true + * + * _.isNumber(Infinity); + * // => true + * + * _.isNumber('3'); + * // => false + */ + function isNumber(value) { + return typeof value == 'number' || + (isObjectLike(value) && baseGetTag(value) == numberTag); + } -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. + /** + * Checks if `value` is a plain object, that is, an object created by the + * `Object` constructor or one with a `[[Prototype]]` of `null`. + * + * @static + * @memberOf _ + * @since 0.8.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * _.isPlainObject(new Foo); + * // => false + * + * _.isPlainObject([1, 2, 3]); + * // => false + * + * _.isPlainObject({ 'x': 0, 'y': 0 }); + * // => true + * + * _.isPlainObject(Object.create(null)); + * // => true + */ + function isPlainObject(value) { + if (!isObjectLike(value) || baseGetTag(value) != objectTag) { + return false; + } + var proto = getPrototype(value); + if (proto === null) { + return true; + } + var Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor; + return typeof Ctor == 'function' && Ctor instanceof Ctor && + funcToString.call(Ctor) == objectCtorString; + } -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. + /** + * Checks if `value` is classified as a `RegExp` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + * @example + * + * _.isRegExp(/abc/); + * // => true + * + * _.isRegExp('/abc/'); + * // => false + */ + var isRegExp = nodeIsRegExp ? baseUnary(nodeIsRegExp) : baseIsRegExp; -createToken('FULLPLAIN', `v?${src[t.MAINVERSION] -}${src[t.PRERELEASE]}?${ - src[t.BUILD]}?`) + /** + * Checks if `value` is a safe integer. An integer is safe if it's an IEEE-754 + * double precision number which isn't the result of a rounded unsafe integer. + * + * **Note:** This method is based on + * [`Number.isSafeInteger`](https://mdn.io/Number/isSafeInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a safe integer, else `false`. + * @example + * + * _.isSafeInteger(3); + * // => true + * + * _.isSafeInteger(Number.MIN_VALUE); + * // => false + * + * _.isSafeInteger(Infinity); + * // => false + * + * _.isSafeInteger('3'); + * // => false + */ + function isSafeInteger(value) { + return isInteger(value) && value >= -MAX_SAFE_INTEGER && value <= MAX_SAFE_INTEGER; + } -createToken('FULL', `^${src[t.FULLPLAIN]}$`) + /** + * Checks if `value` is classified as a `Set` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a set, else `false`. + * @example + * + * _.isSet(new Set); + * // => true + * + * _.isSet(new WeakSet); + * // => false + */ + var isSet = nodeIsSet ? baseUnary(nodeIsSet) : baseIsSet; -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] -}${src[t.PRERELEASELOOSE]}?${ - src[t.BUILD]}?`) + /** + * Checks if `value` is classified as a `String` primitive or object. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a string, else `false`. + * @example + * + * _.isString('abc'); + * // => true + * + * _.isString(1); + * // => false + */ + function isString(value) { + return typeof value == 'string' || + (!isArray(value) && isObjectLike(value) && baseGetTag(value) == stringTag); + } -createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + /** + * Checks if `value` is classified as a `Symbol` primitive or object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. + * @example + * + * _.isSymbol(Symbol.iterator); + * // => true + * + * _.isSymbol('abc'); + * // => false + */ + function isSymbol(value) { + return typeof value == 'symbol' || + (isObjectLike(value) && baseGetTag(value) == symbolTag); + } -createToken('GTLT', '((?:<|>)?=?)') + /** + * Checks if `value` is classified as a typed array. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + * @example + * + * _.isTypedArray(new Uint8Array); + * // => true + * + * _.isTypedArray([]); + * // => false + */ + var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray; -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) -createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + /** + * Checks if `value` is `undefined`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `undefined`, else `false`. + * @example + * + * _.isUndefined(void 0); + * // => true + * + * _.isUndefined(null); + * // => false + */ + function isUndefined(value) { + return value === undefined; + } -createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + - `(?:${src[t.PRERELEASE]})?${ - src[t.BUILD]}?` + - `)?)?`) + /** + * Checks if `value` is classified as a `WeakMap` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a weak map, else `false`. + * @example + * + * _.isWeakMap(new WeakMap); + * // => true + * + * _.isWeakMap(new Map); + * // => false + */ + function isWeakMap(value) { + return isObjectLike(value) && getTag(value) == weakMapTag; + } -createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:${src[t.PRERELEASELOOSE]})?${ - src[t.BUILD]}?` + - `)?)?`) + /** + * Checks if `value` is classified as a `WeakSet` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a weak set, else `false`. + * @example + * + * _.isWeakSet(new WeakSet); + * // => true + * + * _.isWeakSet(new Set); + * // => false + */ + function isWeakSet(value) { + return isObjectLike(value) && baseGetTag(value) == weakSetTag; + } -createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) -createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + /** + * Checks if `value` is less than `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than `other`, + * else `false`. + * @see _.gt + * @example + * + * _.lt(1, 3); + * // => true + * + * _.lt(3, 3); + * // => false + * + * _.lt(3, 1); + * // => false + */ + var lt = createRelationalOperation(baseLt); -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -createToken('COERCE', `${'(^|[^\\d])' + - '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + - `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + - `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + - `(?:$|[^\\d])`) -createToken('COERCERTL', src[t.COERCE], true) + /** + * Checks if `value` is less than or equal to `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than or equal to + * `other`, else `false`. + * @see _.gte + * @example + * + * _.lte(1, 3); + * // => true + * + * _.lte(3, 3); + * // => true + * + * _.lte(3, 1); + * // => false + */ + var lte = createRelationalOperation(function(value, other) { + return value <= other; + }); -// Tilde ranges. -// Meaning is "reasonably at or greater than" -createToken('LONETILDE', '(?:~>?)') + /** + * Converts `value` to an array. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to convert. + * @returns {Array} Returns the converted array. + * @example + * + * _.toArray({ 'a': 1, 'b': 2 }); + * // => [1, 2] + * + * _.toArray('abc'); + * // => ['a', 'b', 'c'] + * + * _.toArray(1); + * // => [] + * + * _.toArray(null); + * // => [] + */ + function toArray(value) { + if (!value) { + return []; + } + if (isArrayLike(value)) { + return isString(value) ? stringToArray(value) : copyArray(value); + } + if (symIterator && value[symIterator]) { + return iteratorToArray(value[symIterator]()); + } + var tag = getTag(value), + func = tag == mapTag ? mapToArray : (tag == setTag ? setToArray : values); -createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) -exports.tildeTrimReplace = '$1~' + return func(value); + } -createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) -createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + /** + * Converts `value` to a finite number. + * + * @static + * @memberOf _ + * @since 4.12.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted number. + * @example + * + * _.toFinite(3.2); + * // => 3.2 + * + * _.toFinite(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toFinite(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toFinite('3.2'); + * // => 3.2 + */ + function toFinite(value) { + if (!value) { + return value === 0 ? value : 0; + } + value = toNumber(value); + if (value === INFINITY || value === -INFINITY) { + var sign = (value < 0 ? -1 : 1); + return sign * MAX_INTEGER; + } + return value === value ? value : 0; + } -// Caret ranges. -// Meaning is "at least and backwards compatible with" -createToken('LONECARET', '(?:\\^)') + /** + * Converts `value` to an integer. + * + * **Note:** This method is loosely based on + * [`ToInteger`](http://www.ecma-international.org/ecma-262/7.0/#sec-tointeger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toInteger(3.2); + * // => 3 + * + * _.toInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toInteger(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toInteger('3.2'); + * // => 3 + */ + function toInteger(value) { + var result = toFinite(value), + remainder = result % 1; -createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) -exports.caretTrimReplace = '$1^' + return result === result ? (remainder ? result - remainder : result) : 0; + } -createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) -createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + /** + * Converts `value` to an integer suitable for use as the length of an + * array-like object. + * + * **Note:** This method is based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toLength(3.2); + * // => 3 + * + * _.toLength(Number.MIN_VALUE); + * // => 0 + * + * _.toLength(Infinity); + * // => 4294967295 + * + * _.toLength('3.2'); + * // => 3 + */ + function toLength(value) { + return value ? baseClamp(toInteger(value), 0, MAX_ARRAY_LENGTH) : 0; + } -// A simple gt/lt/eq thing, or just "" to indicate "any version" -createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) -createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + /** + * Converts `value` to a number. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to process. + * @returns {number} Returns the number. + * @example + * + * _.toNumber(3.2); + * // => 3.2 + * + * _.toNumber(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toNumber(Infinity); + * // => Infinity + * + * _.toNumber('3.2'); + * // => 3.2 + */ + function toNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + if (isObject(value)) { + var other = typeof value.valueOf == 'function' ? value.valueOf() : value; + value = isObject(other) ? (other + '') : other; + } + if (typeof value != 'string') { + return value === 0 ? value : +value; + } + value = baseTrim(value); + var isBinary = reIsBinary.test(value); + return (isBinary || reIsOctal.test(value)) + ? freeParseInt(value.slice(2), isBinary ? 2 : 8) + : (reIsBadHex.test(value) ? NAN : +value); + } -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] -}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) -exports.comparatorTrimReplace = '$1$2$3' + /** + * Converts `value` to a plain object flattening inherited enumerable string + * keyed properties of `value` to own properties of the plain object. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {Object} Returns the converted plain object. + * @example + * + * function Foo() { + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.assign({ 'a': 1 }, new Foo); + * // => { 'a': 1, 'b': 2 } + * + * _.assign({ 'a': 1 }, _.toPlainObject(new Foo)); + * // => { 'a': 1, 'b': 2, 'c': 3 } + */ + function toPlainObject(value) { + return copyObject(value, keysIn(value)); + } -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + - `\\s+-\\s+` + - `(${src[t.XRANGEPLAIN]})` + - `\\s*$`) + /** + * Converts `value` to a safe integer. A safe integer can be compared and + * represented correctly. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toSafeInteger(3.2); + * // => 3 + * + * _.toSafeInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toSafeInteger(Infinity); + * // => 9007199254740991 + * + * _.toSafeInteger('3.2'); + * // => 3 + */ + function toSafeInteger(value) { + return value + ? baseClamp(toInteger(value), -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER) + : (value === 0 ? value : 0); + } -createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + - `\\s+-\\s+` + - `(${src[t.XRANGEPLAINLOOSE]})` + - `\\s*$`) + /** + * Converts `value` to a string. An empty string is returned for `null` + * and `undefined` values. The sign of `-0` is preserved. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.toString(null); + * // => '' + * + * _.toString(-0); + * // => '-0' + * + * _.toString([1, 2, 3]); + * // => '1,2,3' + */ + function toString(value) { + return value == null ? '' : baseToString(value); + } -// Star ranges basically just allow anything at all. -createToken('STAR', '(<|>)?=?\\s*\\*') -// >=0.0.0 is like a star -createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') -createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') + /*------------------------------------------------------------------------*/ + /** + * Assigns own enumerable string keyed properties of source objects to the + * destination object. Source objects are applied from left to right. + * Subsequent sources overwrite property assignments of previous sources. + * + * **Note:** This method mutates `object` and is loosely based on + * [`Object.assign`](https://mdn.io/Object/assign). + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assignIn + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assign({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'c': 3 } + */ + var assign = createAssigner(function(object, source) { + if (isPrototype(source) || isArrayLike(source)) { + copyObject(source, keys(source), object); + return; + } + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + assignValue(object, key, source[key]); + } + } + }); -/***/ }), + /** + * This method is like `_.assign` except that it iterates over own and + * inherited source properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extend + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assign + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assignIn({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'b': 2, 'c': 3, 'd': 4 } + */ + var assignIn = createAssigner(function(object, source) { + copyObject(source, keysIn(source), object); + }); -/***/ 67970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * This method is like `_.assignIn` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extendWith + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignInWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var assignInWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keysIn(source), object, customizer); + }); -// Determine if version is greater than all the versions possible in the range. -const outside = __nccwpck_require__(8629) -const gtr = (version, range, options) => outside(version, range, '>', options) -module.exports = gtr + /** + * This method is like `_.assign` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignInWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var assignWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keys(source), object, customizer); + }); + /** + * Creates an array of values corresponding to `paths` of `object`. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Array} Returns the picked values. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }, 4] }; + * + * _.at(object, ['a[0].b.c', 'a[1]']); + * // => [3, 4] + */ + var at = flatRest(baseAt); -/***/ }), + /** + * Creates an object that inherits from the `prototype` object. If a + * `properties` object is given, its own enumerable string keyed properties + * are assigned to the created object. + * + * @static + * @memberOf _ + * @since 2.3.0 + * @category Object + * @param {Object} prototype The object to inherit from. + * @param {Object} [properties] The properties to assign to the object. + * @returns {Object} Returns the new object. + * @example + * + * function Shape() { + * this.x = 0; + * this.y = 0; + * } + * + * function Circle() { + * Shape.call(this); + * } + * + * Circle.prototype = _.create(Shape.prototype, { + * 'constructor': Circle + * }); + * + * var circle = new Circle; + * circle instanceof Circle; + * // => true + * + * circle instanceof Shape; + * // => true + */ + function create(prototype, properties) { + var result = baseCreate(prototype); + return properties == null ? result : baseAssign(result, properties); + } -/***/ 1373: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Assigns own and inherited enumerable string keyed properties of source + * objects to the destination object for all destination properties that + * resolve to `undefined`. Source objects are applied from left to right. + * Once a property is set, additional values of the same property are ignored. + * + * **Note:** This method mutates `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaultsDeep + * @example + * + * _.defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var defaults = baseRest(function(object, sources) { + object = Object(object); -const Range = __nccwpck_require__(34502) -const intersects = (r1, r2, options) => { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2, options) -} -module.exports = intersects + var index = -1; + var length = sources.length; + var guard = length > 2 ? sources[2] : undefined; + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + length = 1; + } -/***/ }), + while (++index < length) { + var source = sources[index]; + var props = keysIn(source); + var propsIndex = -1; + var propsLength = props.length; -/***/ 53270: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + while (++propsIndex < propsLength) { + var key = props[propsIndex]; + var value = object[key]; -const outside = __nccwpck_require__(8629) -// Determine if version is less than all the versions possible in the range -const ltr = (version, range, options) => outside(version, range, '<', options) -module.exports = ltr + if (value === undefined || + (eq(value, objectProto[key]) && !hasOwnProperty.call(object, key))) { + object[key] = source[key]; + } + } + } + return object; + }); -/***/ }), + /** + * This method is like `_.defaults` except that it recursively assigns + * default properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 3.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaults + * @example + * + * _.defaultsDeep({ 'a': { 'b': 2 } }, { 'a': { 'b': 1, 'c': 3 } }); + * // => { 'a': { 'b': 2, 'c': 3 } } + */ + var defaultsDeep = baseRest(function(args) { + args.push(undefined, customDefaultsMerge); + return apply(mergeWith, undefined, args); + }); -/***/ 5471: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * This method is like `_.find` except that it returns the key of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Object + * @param {Object} object The object to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {string|undefined} Returns the key of the matched element, + * else `undefined`. + * @example + * + * var users = { + * 'barney': { 'age': 36, 'active': true }, + * 'fred': { 'age': 40, 'active': false }, + * 'pebbles': { 'age': 1, 'active': true } + * }; + * + * _.findKey(users, function(o) { return o.age < 40; }); + * // => 'barney' (iteration order is not guaranteed) + * + * // The `_.matches` iteratee shorthand. + * _.findKey(users, { 'age': 1, 'active': true }); + * // => 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findKey(users, ['active', false]); + * // => 'fred' + * + * // The `_.property` iteratee shorthand. + * _.findKey(users, 'active'); + * // => 'barney' + */ + function findKey(object, predicate) { + return baseFindKey(object, getIteratee(predicate, 3), baseForOwn); + } -const SemVer = __nccwpck_require__(93402) -const Range = __nccwpck_require__(34502) + /** + * This method is like `_.findKey` except that it iterates over elements of + * a collection in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {string|undefined} Returns the key of the matched element, + * else `undefined`. + * @example + * + * var users = { + * 'barney': { 'age': 36, 'active': true }, + * 'fred': { 'age': 40, 'active': false }, + * 'pebbles': { 'age': 1, 'active': true } + * }; + * + * _.findLastKey(users, function(o) { return o.age < 40; }); + * // => returns 'pebbles' assuming `_.findKey` returns 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.findLastKey(users, { 'age': 36, 'active': true }); + * // => 'barney' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findLastKey(users, ['active', false]); + * // => 'fred' + * + * // The `_.property` iteratee shorthand. + * _.findLastKey(users, 'active'); + * // => 'pebbles' + */ + function findLastKey(object, predicate) { + return baseFindKey(object, getIteratee(predicate, 3), baseForOwnRight); + } -const maxSatisfying = (versions, range, options) => { - let max = null - let maxSV = null - let rangeObj = null - try { - rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach((v) => { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } + /** + * Iterates over own and inherited enumerable string keyed properties of an + * object and invokes `iteratee` for each property. The iteratee is invoked + * with three arguments: (value, key, object). Iteratee functions may exit + * iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 0.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forInRight + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forIn(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a', 'b', then 'c' (iteration order is not guaranteed). + */ + function forIn(object, iteratee) { + return object == null + ? object + : baseFor(object, getIteratee(iteratee, 3), keysIn); } - }) - return max -} -module.exports = maxSatisfying - - -/***/ }), - -/***/ 65356: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const SemVer = __nccwpck_require__(93402) -const Range = __nccwpck_require__(34502) -const minSatisfying = (versions, range, options) => { - let min = null - let minSV = null - let rangeObj = null - try { - rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach((v) => { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } + /** + * This method is like `_.forIn` except that it iterates over properties of + * `object` in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forIn + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forInRight(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'c', 'b', then 'a' assuming `_.forIn` logs 'a', 'b', then 'c'. + */ + function forInRight(object, iteratee) { + return object == null + ? object + : baseForRight(object, getIteratee(iteratee, 3), keysIn); } - }) - return min -} -module.exports = minSatisfying + /** + * Iterates over own enumerable string keyed properties of an object and + * invokes `iteratee` for each property. The iteratee is invoked with three + * arguments: (value, key, object). Iteratee functions may exit iteration + * early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 0.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forOwnRight + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forOwn(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ + function forOwn(object, iteratee) { + return object && baseForOwn(object, getIteratee(iteratee, 3)); + } -/***/ }), - -/***/ 7825: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * This method is like `_.forOwn` except that it iterates over properties of + * `object` in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forOwn + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forOwnRight(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'b' then 'a' assuming `_.forOwn` logs 'a' then 'b'. + */ + function forOwnRight(object, iteratee) { + return object && baseForOwnRight(object, getIteratee(iteratee, 3)); + } -const SemVer = __nccwpck_require__(93402) -const Range = __nccwpck_require__(34502) -const gt = __nccwpck_require__(47040) + /** + * Creates an array of function property names from own enumerable properties + * of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to inspect. + * @returns {Array} Returns the function names. + * @see _.functionsIn + * @example + * + * function Foo() { + * this.a = _.constant('a'); + * this.b = _.constant('b'); + * } + * + * Foo.prototype.c = _.constant('c'); + * + * _.functions(new Foo); + * // => ['a', 'b'] + */ + function functions(object) { + return object == null ? [] : baseFunctions(object, keys(object)); + } -const minVersion = (range, loose) => { - range = new Range(range, loose) + /** + * Creates an array of function property names from own and inherited + * enumerable properties of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to inspect. + * @returns {Array} Returns the function names. + * @see _.functions + * @example + * + * function Foo() { + * this.a = _.constant('a'); + * this.b = _.constant('b'); + * } + * + * Foo.prototype.c = _.constant('c'); + * + * _.functionsIn(new Foo); + * // => ['a', 'b', 'c'] + */ + function functionsIn(object) { + return object == null ? [] : baseFunctions(object, keysIn(object)); + } - let minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } + /** + * Gets the value at `path` of `object`. If the resolved value is + * `undefined`, the `defaultValue` is returned in its place. + * + * @static + * @memberOf _ + * @since 3.7.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to get. + * @param {*} [defaultValue] The value returned for `undefined` resolved values. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.get(object, 'a[0].b.c'); + * // => 3 + * + * _.get(object, ['a', '0', 'b', 'c']); + * // => 3 + * + * _.get(object, 'a.b.c', 'default'); + * // => 'default' + */ + function get(object, path, defaultValue) { + var result = object == null ? undefined : baseGet(object, path); + return result === undefined ? defaultValue : result; + } - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } + /** + * Checks if `path` is a direct property of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = { 'a': { 'b': 2 } }; + * var other = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.has(object, 'a'); + * // => true + * + * _.has(object, 'a.b'); + * // => true + * + * _.has(object, ['a', 'b']); + * // => true + * + * _.has(other, 'a'); + * // => false + */ + function has(object, path) { + return object != null && hasPath(object, path, baseHas); + } - minver = null - for (let i = 0; i < range.set.length; ++i) { - const comparators = range.set[i] + /** + * Checks if `path` is a direct or inherited property of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.hasIn(object, 'a'); + * // => true + * + * _.hasIn(object, 'a.b'); + * // => true + * + * _.hasIn(object, ['a', 'b']); + * // => true + * + * _.hasIn(object, 'b'); + * // => false + */ + function hasIn(object, path) { + return object != null && hasPath(object, path, baseHasIn); + } - let setMin = null - comparators.forEach((comparator) => { - // Clone to avoid manipulating the comparator's semver object. - const compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!setMin || gt(compver, setMin)) { - setMin = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error(`Unexpected operation: ${comparator.operator}`) + /** + * Creates an object composed of the inverted keys and values of `object`. + * If `object` contains duplicate values, subsequent values overwrite + * property assignments of previous values. + * + * @static + * @memberOf _ + * @since 0.7.0 + * @category Object + * @param {Object} object The object to invert. + * @returns {Object} Returns the new inverted object. + * @example + * + * var object = { 'a': 1, 'b': 2, 'c': 1 }; + * + * _.invert(object); + * // => { '1': 'c', '2': 'b' } + */ + var invert = createInverter(function(result, value, key) { + if (value != null && + typeof value.toString != 'function') { + value = nativeObjectToString.call(value); } - }) - if (setMin && (!minver || gt(minver, setMin))) { - minver = setMin - } - } - if (minver && range.test(minver)) { - return minver - } + result[value] = key; + }, constant(identity)); - return null -} -module.exports = minVersion + /** + * This method is like `_.invert` except that the inverted object is generated + * from the results of running each element of `object` thru `iteratee`. The + * corresponding inverted value of each inverted key is an array of keys + * responsible for generating the inverted value. The iteratee is invoked + * with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.1.0 + * @category Object + * @param {Object} object The object to invert. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Object} Returns the new inverted object. + * @example + * + * var object = { 'a': 1, 'b': 2, 'c': 1 }; + * + * _.invertBy(object); + * // => { '1': ['a', 'c'], '2': ['b'] } + * + * _.invertBy(object, function(value) { + * return 'group' + value; + * }); + * // => { 'group1': ['a', 'c'], 'group2': ['b'] } + */ + var invertBy = createInverter(function(result, value, key) { + if (value != null && + typeof value.toString != 'function') { + value = nativeObjectToString.call(value); + } + if (hasOwnProperty.call(result, value)) { + result[value].push(key); + } else { + result[value] = [key]; + } + }, getIteratee); -/***/ }), + /** + * Invokes the method at `path` of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the method to invoke. + * @param {...*} [args] The arguments to invoke the method with. + * @returns {*} Returns the result of the invoked method. + * @example + * + * var object = { 'a': [{ 'b': { 'c': [1, 2, 3, 4] } }] }; + * + * _.invoke(object, 'a[0].b.c.slice', 1, 3); + * // => [2, 3] + */ + var invoke = baseRest(baseInvoke); -/***/ 8629: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Creates an array of the own enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. See the + * [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * for more details. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keys(new Foo); + * // => ['a', 'b'] (iteration order is not guaranteed) + * + * _.keys('hi'); + * // => ['0', '1'] + */ + function keys(object) { + return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object); + } -const SemVer = __nccwpck_require__(93402) -const Comparator = __nccwpck_require__(24644) -const { ANY } = Comparator -const Range = __nccwpck_require__(34502) -const satisfies = __nccwpck_require__(45056) -const gt = __nccwpck_require__(47040) -const lt = __nccwpck_require__(16054) -const lte = __nccwpck_require__(79387) -const gte = __nccwpck_require__(97445) + /** + * Creates an array of the own and inherited enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keysIn(new Foo); + * // => ['a', 'b', 'c'] (iteration order is not guaranteed) + */ + function keysIn(object) { + return isArrayLike(object) ? arrayLikeKeys(object, true) : baseKeysIn(object); + } -const outside = (version, range, hilo, options) => { - version = new SemVer(version, options) - range = new Range(range, options) + /** + * The opposite of `_.mapValues`; this method creates an object with the + * same values as `object` and keys generated by running each own enumerable + * string keyed property of `object` thru `iteratee`. The iteratee is invoked + * with three arguments: (value, key, object). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns the new mapped object. + * @see _.mapValues + * @example + * + * _.mapKeys({ 'a': 1, 'b': 2 }, function(value, key) { + * return key + value; + * }); + * // => { 'a1': 1, 'b2': 2 } + */ + function mapKeys(object, iteratee) { + var result = {}; + iteratee = getIteratee(iteratee, 3); - let gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } + baseForOwn(object, function(value, key, object) { + baseAssignValue(result, iteratee(value, key, object), value); + }); + return result; + } - // If it satisfies the range it is not outside - if (satisfies(version, range, options)) { - return false - } + /** + * Creates an object with the same keys as `object` and values generated + * by running each own enumerable string keyed property of `object` thru + * `iteratee`. The iteratee is invoked with three arguments: + * (value, key, object). + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns the new mapped object. + * @see _.mapKeys + * @example + * + * var users = { + * 'fred': { 'user': 'fred', 'age': 40 }, + * 'pebbles': { 'user': 'pebbles', 'age': 1 } + * }; + * + * _.mapValues(users, function(o) { return o.age; }); + * // => { 'fred': 40, 'pebbles': 1 } (iteration order is not guaranteed) + * + * // The `_.property` iteratee shorthand. + * _.mapValues(users, 'age'); + * // => { 'fred': 40, 'pebbles': 1 } (iteration order is not guaranteed) + */ + function mapValues(object, iteratee) { + var result = {}; + iteratee = getIteratee(iteratee, 3); - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. + baseForOwn(object, function(value, key, object) { + baseAssignValue(result, key, iteratee(value, key, object)); + }); + return result; + } - for (let i = 0; i < range.set.length; ++i) { - const comparators = range.set[i] + /** + * This method is like `_.assign` except that it recursively merges own and + * inherited enumerable string keyed properties of source objects into the + * destination object. Source properties that resolve to `undefined` are + * skipped if a destination value exists. Array and plain object properties + * are merged recursively. Other objects and value types are overridden by + * assignment. Source objects are applied from left to right. Subsequent + * sources overwrite property assignments of previous sources. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @example + * + * var object = { + * 'a': [{ 'b': 2 }, { 'd': 4 }] + * }; + * + * var other = { + * 'a': [{ 'c': 3 }, { 'e': 5 }] + * }; + * + * _.merge(object, other); + * // => { 'a': [{ 'b': 2, 'c': 3 }, { 'd': 4, 'e': 5 }] } + */ + var merge = createAssigner(function(object, source, srcIndex) { + baseMerge(object, source, srcIndex); + }); - let high = null - let low = null + /** + * This method is like `_.merge` except that it accepts `customizer` which + * is invoked to produce the merged values of the destination and source + * properties. If `customizer` returns `undefined`, merging is handled by the + * method instead. The `customizer` is invoked with six arguments: + * (objValue, srcValue, key, object, source, stack). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} customizer The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * function customizer(objValue, srcValue) { + * if (_.isArray(objValue)) { + * return objValue.concat(srcValue); + * } + * } + * + * var object = { 'a': [1], 'b': [2] }; + * var other = { 'a': [3], 'b': [4] }; + * + * _.mergeWith(object, other, customizer); + * // => { 'a': [1, 3], 'b': [2, 4] } + */ + var mergeWith = createAssigner(function(object, source, srcIndex, customizer) { + baseMerge(object, source, srcIndex, customizer); + }); - comparators.forEach((comparator) => { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') + /** + * The opposite of `_.pick`; this method creates an object composed of the + * own and inherited enumerable property paths of `object` that are not omitted. + * + * **Note:** This method is considerably slower than `_.pick`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The source object. + * @param {...(string|string[])} [paths] The property paths to omit. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.omit(object, ['a', 'c']); + * // => { 'b': '2' } + */ + var omit = flatRest(function(object, paths) { + var result = {}; + if (object == null) { + return result; } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator + var isDeep = false; + paths = arrayMap(paths, function(path) { + path = castPath(path, object); + isDeep || (isDeep = path.length > 1); + return path; + }); + copyObject(object, getAllKeysIn(object), result); + if (isDeep) { + result = baseClone(result, CLONE_DEEP_FLAG | CLONE_FLAT_FLAG | CLONE_SYMBOLS_FLAG, customOmitClone); } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } + var length = paths.length; + while (length--) { + baseUnset(result, paths[length]); + } + return result; + }); - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false + /** + * The opposite of `_.pickBy`; this method creates an object composed of + * the own and inherited enumerable string keyed properties of `object` that + * `predicate` doesn't return truthy for. The predicate is invoked with two + * arguments: (value, key). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The source object. + * @param {Function} [predicate=_.identity] The function invoked per property. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.omitBy(object, _.isNumber); + * // => { 'b': '2' } + */ + function omitBy(object, predicate) { + return pickBy(object, negate(getIteratee(predicate))); } - } - return true -} -module.exports = outside + /** + * Creates an object composed of the picked `object` properties. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The source object. + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.pick(object, ['a', 'c']); + * // => { 'a': 1, 'c': 3 } + */ + var pick = flatRest(function(object, paths) { + return object == null ? {} : basePick(object, paths); + }); + /** + * Creates an object composed of the `object` properties `predicate` returns + * truthy for. The predicate is invoked with two arguments: (value, key). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The source object. + * @param {Function} [predicate=_.identity] The function invoked per property. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.pickBy(object, _.isNumber); + * // => { 'a': 1, 'c': 3 } + */ + function pickBy(object, predicate) { + if (object == null) { + return {}; + } + var props = arrayMap(getAllKeysIn(object), function(prop) { + return [prop]; + }); + predicate = getIteratee(predicate); + return basePickBy(object, props, function(value, path) { + return predicate(value, path[0]); + }); + } -/***/ }), + /** + * This method is like `_.get` except that if the resolved value is a + * function it's invoked with the `this` binding of its parent object and + * its result is returned. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to resolve. + * @param {*} [defaultValue] The value returned for `undefined` resolved values. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { 'a': [{ 'b': { 'c1': 3, 'c2': _.constant(4) } }] }; + * + * _.result(object, 'a[0].b.c1'); + * // => 3 + * + * _.result(object, 'a[0].b.c2'); + * // => 4 + * + * _.result(object, 'a[0].b.c3', 'default'); + * // => 'default' + * + * _.result(object, 'a[0].b.c3', _.constant('default')); + * // => 'default' + */ + function result(object, path, defaultValue) { + path = castPath(path, object); -/***/ 94336: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var index = -1, + length = path.length; -// given a set of versions and a range, create a "simplified" range -// that includes the same versions that the original range does -// If the original range is shorter than the simplified one, return that. -const satisfies = __nccwpck_require__(45056) -const compare = __nccwpck_require__(24343) -module.exports = (versions, range, options) => { - const set = [] - let first = null - let prev = null - const v = versions.sort((a, b) => compare(a, b, options)) - for (const version of v) { - const included = satisfies(version, range, options) - if (included) { - prev = version - if (!first) { - first = version + // Ensure the loop is entered when path is empty. + if (!length) { + length = 1; + object = undefined; } - } else { - if (prev) { - set.push([first, prev]) + while (++index < length) { + var value = object == null ? undefined : object[toKey(path[index])]; + if (value === undefined) { + index = length; + value = defaultValue; + } + object = isFunction(value) ? value.call(object) : value; } - prev = null - first = null + return object; } - } - if (first) { - set.push([first, null]) - } - const ranges = [] - for (const [min, max] of set) { - if (min === max) { - ranges.push(min) - } else if (!max && min === v[0]) { - ranges.push('*') - } else if (!max) { - ranges.push(`>=${min}`) - } else if (min === v[0]) { - ranges.push(`<=${max}`) - } else { - ranges.push(`${min} - ${max}`) + /** + * Sets the value at `path` of `object`. If a portion of `path` doesn't exist, + * it's created. Arrays are created for missing index properties while objects + * are created for all other missing properties. Use `_.setWith` to customize + * `path` creation. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 3.7.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @returns {Object} Returns `object`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.set(object, 'a[0].b.c', 4); + * console.log(object.a[0].b.c); + * // => 4 + * + * _.set(object, ['x', '0', 'y', 'z'], 5); + * console.log(object.x[0].y.z); + * // => 5 + */ + function set(object, path, value) { + return object == null ? object : baseSet(object, path, value); } - } - const simplified = ranges.join(' || ') - const original = typeof range.raw === 'string' ? range.raw : String(range) - return simplified.length < original.length ? simplified : range -} - - -/***/ }), - -/***/ 54979: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const Range = __nccwpck_require__(34502) -const Comparator = __nccwpck_require__(24644) -const { ANY } = Comparator -const satisfies = __nccwpck_require__(45056) -const compare = __nccwpck_require__(24343) + /** + * This method is like `_.set` except that it accepts `customizer` which is + * invoked to produce the objects of `path`. If `customizer` returns `undefined` + * path creation is handled by the method instead. The `customizer` is invoked + * with three arguments: (nsValue, key, nsObject). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * var object = {}; + * + * _.setWith(object, '[0][1]', 'a', Object); + * // => { '0': { '1': 'a' } } + */ + function setWith(object, path, value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return object == null ? object : baseSet(object, path, value, customizer); + } -// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: -// - Every simple range `r1, r2, ...` is a null set, OR -// - Every simple range `r1, r2, ...` which is not a null set is a subset of -// some `R1, R2, ...` -// -// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: -// - If c is only the ANY comparator -// - If C is only the ANY comparator, return true -// - Else if in prerelease mode, return false -// - else replace c with `[>=0.0.0]` -// - If C is only the ANY comparator -// - if in prerelease mode, return true -// - else replace C with `[>=0.0.0]` -// - Let EQ be the set of = comparators in c -// - If EQ is more than one, return true (null set) -// - Let GT be the highest > or >= comparator in c -// - Let LT be the lowest < or <= comparator in c -// - If GT and LT, and GT.semver > LT.semver, return true (null set) -// - If any C is a = range, and GT or LT are set, return false -// - If EQ -// - If GT, and EQ does not satisfy GT, return true (null set) -// - If LT, and EQ does not satisfy LT, return true (null set) -// - If EQ satisfies every C, return true -// - Else return false -// - If GT -// - If GT.semver is lower than any > or >= comp in C, return false -// - If GT is >=, and GT.semver does not satisfy every C, return false -// - If GT.semver has a prerelease, and not in prerelease mode -// - If no C has a prerelease and the GT.semver tuple, return false -// - If LT -// - If LT.semver is greater than any < or <= comp in C, return false -// - If LT is <=, and LT.semver does not satisfy every C, return false -// - If GT.semver has a prerelease, and not in prerelease mode -// - If no C has a prerelease and the LT.semver tuple, return false -// - Else return true + /** + * Creates an array of own enumerable string keyed-value pairs for `object` + * which can be consumed by `_.fromPairs`. If `object` is a map or set, its + * entries are returned. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias entries + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the key-value pairs. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.toPairs(new Foo); + * // => [['a', 1], ['b', 2]] (iteration order is not guaranteed) + */ + var toPairs = createToPairs(keys); -const subset = (sub, dom, options = {}) => { - if (sub === dom) { - return true - } + /** + * Creates an array of own and inherited enumerable string keyed-value pairs + * for `object` which can be consumed by `_.fromPairs`. If `object` is a map + * or set, its entries are returned. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias entriesIn + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the key-value pairs. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.toPairsIn(new Foo); + * // => [['a', 1], ['b', 2], ['c', 3]] (iteration order is not guaranteed) + */ + var toPairsIn = createToPairs(keysIn); - sub = new Range(sub, options) - dom = new Range(dom, options) - let sawNonNull = false + /** + * An alternative to `_.reduce`; this method transforms `object` to a new + * `accumulator` object which is the result of running each of its own + * enumerable string keyed properties thru `iteratee`, with each invocation + * potentially mutating the `accumulator` object. If `accumulator` is not + * provided, a new object with the same `[[Prototype]]` will be used. The + * iteratee is invoked with four arguments: (accumulator, value, key, object). + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 1.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The custom accumulator value. + * @returns {*} Returns the accumulated value. + * @example + * + * _.transform([2, 3, 4], function(result, n) { + * result.push(n *= n); + * return n % 2 == 0; + * }, []); + * // => [4, 9] + * + * _.transform({ 'a': 1, 'b': 2, 'c': 1 }, function(result, value, key) { + * (result[value] || (result[value] = [])).push(key); + * }, {}); + * // => { '1': ['a', 'c'], '2': ['b'] } + */ + function transform(object, iteratee, accumulator) { + var isArr = isArray(object), + isArrLike = isArr || isBuffer(object) || isTypedArray(object); - OUTER: for (const simpleSub of sub.set) { - for (const simpleDom of dom.set) { - const isSub = simpleSubset(simpleSub, simpleDom, options) - sawNonNull = sawNonNull || isSub !== null - if (isSub) { - continue OUTER + iteratee = getIteratee(iteratee, 4); + if (accumulator == null) { + var Ctor = object && object.constructor; + if (isArrLike) { + accumulator = isArr ? new Ctor : []; + } + else if (isObject(object)) { + accumulator = isFunction(Ctor) ? baseCreate(getPrototype(object)) : {}; + } + else { + accumulator = {}; + } } + (isArrLike ? arrayEach : baseForOwn)(object, function(value, index, object) { + return iteratee(accumulator, value, index, object); + }); + return accumulator; } - // the null set is a subset of everything, but null simple ranges in - // a complex range should be ignored. so if we saw a non-null range, - // then we know this isn't a subset, but if EVERY simple range was null, - // then it is a subset. - if (sawNonNull) { - return false - } - } - return true -} - -const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] -const minimumVersion = [new Comparator('>=0.0.0')] - -const simpleSubset = (sub, dom, options) => { - if (sub === dom) { - return true - } - if (sub.length === 1 && sub[0].semver === ANY) { - if (dom.length === 1 && dom[0].semver === ANY) { - return true - } else if (options.includePrerelease) { - sub = minimumVersionWithPreRelease - } else { - sub = minimumVersion + /** + * Removes the property at `path` of `object`. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to unset. + * @returns {boolean} Returns `true` if the property is deleted, else `false`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 7 } }] }; + * _.unset(object, 'a[0].b.c'); + * // => true + * + * console.log(object); + * // => { 'a': [{ 'b': {} }] }; + * + * _.unset(object, ['a', '0', 'b', 'c']); + * // => true + * + * console.log(object); + * // => { 'a': [{ 'b': {} }] }; + */ + function unset(object, path) { + return object == null ? true : baseUnset(object, path); } - } - if (dom.length === 1 && dom[0].semver === ANY) { - if (options.includePrerelease) { - return true - } else { - dom = minimumVersion + /** + * This method is like `_.set` except that accepts `updater` to produce the + * value to set. Use `_.updateWith` to customize `path` creation. The `updater` + * is invoked with one argument: (value). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {Function} updater The function to produce the updated value. + * @returns {Object} Returns `object`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.update(object, 'a[0].b.c', function(n) { return n * n; }); + * console.log(object.a[0].b.c); + * // => 9 + * + * _.update(object, 'x[0].y.z', function(n) { return n ? n + 1 : 0; }); + * console.log(object.x[0].y.z); + * // => 0 + */ + function update(object, path, updater) { + return object == null ? object : baseUpdate(object, path, castFunction(updater)); } - } - const eqSet = new Set() - let gt, lt - for (const c of sub) { - if (c.operator === '>' || c.operator === '>=') { - gt = higherGT(gt, c, options) - } else if (c.operator === '<' || c.operator === '<=') { - lt = lowerLT(lt, c, options) - } else { - eqSet.add(c.semver) + /** + * This method is like `_.update` except that it accepts `customizer` which is + * invoked to produce the objects of `path`. If `customizer` returns `undefined` + * path creation is handled by the method instead. The `customizer` is invoked + * with three arguments: (nsValue, key, nsObject). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {Function} updater The function to produce the updated value. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * var object = {}; + * + * _.updateWith(object, '[0][1]', _.constant('a'), Object); + * // => { '0': { '1': 'a' } } + */ + function updateWith(object, path, updater, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return object == null ? object : baseUpdate(object, path, castFunction(updater), customizer); } - } - - if (eqSet.size > 1) { - return null - } - let gtltComp - if (gt && lt) { - gtltComp = compare(gt.semver, lt.semver, options) - if (gtltComp > 0) { - return null - } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { - return null + /** + * Creates an array of the own enumerable string keyed property values of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property values. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.values(new Foo); + * // => [1, 2] (iteration order is not guaranteed) + * + * _.values('hi'); + * // => ['h', 'i'] + */ + function values(object) { + return object == null ? [] : baseValues(object, keys(object)); } - } - // will iterate one or zero times - for (const eq of eqSet) { - if (gt && !satisfies(eq, String(gt), options)) { - return null + /** + * Creates an array of the own and inherited enumerable string keyed property + * values of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property values. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.valuesIn(new Foo); + * // => [1, 2, 3] (iteration order is not guaranteed) + */ + function valuesIn(object) { + return object == null ? [] : baseValues(object, keysIn(object)); } - if (lt && !satisfies(eq, String(lt), options)) { - return null - } + /*------------------------------------------------------------------------*/ - for (const c of dom) { - if (!satisfies(eq, String(c), options)) { - return false + /** + * Clamps `number` within the inclusive `lower` and `upper` bounds. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Number + * @param {number} number The number to clamp. + * @param {number} [lower] The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the clamped number. + * @example + * + * _.clamp(-10, -5, 5); + * // => -5 + * + * _.clamp(10, -5, 5); + * // => 5 + */ + function clamp(number, lower, upper) { + if (upper === undefined) { + upper = lower; + lower = undefined; + } + if (upper !== undefined) { + upper = toNumber(upper); + upper = upper === upper ? upper : 0; + } + if (lower !== undefined) { + lower = toNumber(lower); + lower = lower === lower ? lower : 0; } + return baseClamp(toNumber(number), lower, upper); } - return true - } - - let higher, lower - let hasDomLT, hasDomGT - // if the subset has a prerelease, we need a comparator in the superset - // with the same tuple and a prerelease, or it's not a subset - let needDomLTPre = lt && - !options.includePrerelease && - lt.semver.prerelease.length ? lt.semver : false - let needDomGTPre = gt && - !options.includePrerelease && - gt.semver.prerelease.length ? gt.semver : false - // exception: <1.2.3-0 is the same as <1.2.3 - if (needDomLTPre && needDomLTPre.prerelease.length === 1 && - lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { - needDomLTPre = false - } + /** + * Checks if `n` is between `start` and up to, but not including, `end`. If + * `end` is not specified, it's set to `start` with `start` then set to `0`. + * If `start` is greater than `end` the params are swapped to support + * negative ranges. + * + * @static + * @memberOf _ + * @since 3.3.0 + * @category Number + * @param {number} number The number to check. + * @param {number} [start=0] The start of the range. + * @param {number} end The end of the range. + * @returns {boolean} Returns `true` if `number` is in the range, else `false`. + * @see _.range, _.rangeRight + * @example + * + * _.inRange(3, 2, 4); + * // => true + * + * _.inRange(4, 8); + * // => true + * + * _.inRange(4, 2); + * // => false + * + * _.inRange(2, 2); + * // => false + * + * _.inRange(1.2, 2); + * // => true + * + * _.inRange(5.2, 4); + * // => false + * + * _.inRange(-3, -2, -6); + * // => true + */ + function inRange(number, start, end) { + start = toFinite(start); + if (end === undefined) { + end = start; + start = 0; + } else { + end = toFinite(end); + } + number = toNumber(number); + return baseInRange(number, start, end); + } - for (const c of dom) { - hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' - hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' - if (gt) { - if (needDomGTPre) { - if (c.semver.prerelease && c.semver.prerelease.length && - c.semver.major === needDomGTPre.major && - c.semver.minor === needDomGTPre.minor && - c.semver.patch === needDomGTPre.patch) { - needDomGTPre = false - } + /** + * Produces a random number between the inclusive `lower` and `upper` bounds. + * If only one argument is provided a number between `0` and the given number + * is returned. If `floating` is `true`, or either `lower` or `upper` are + * floats, a floating-point number is returned instead of an integer. + * + * **Note:** JavaScript follows the IEEE-754 standard for resolving + * floating-point values which can produce unexpected results. + * + * @static + * @memberOf _ + * @since 0.7.0 + * @category Number + * @param {number} [lower=0] The lower bound. + * @param {number} [upper=1] The upper bound. + * @param {boolean} [floating] Specify returning a floating-point number. + * @returns {number} Returns the random number. + * @example + * + * _.random(0, 5); + * // => an integer between 0 and 5 + * + * _.random(5); + * // => also an integer between 0 and 5 + * + * _.random(5, true); + * // => a floating-point number between 0 and 5 + * + * _.random(1.2, 5.2); + * // => a floating-point number between 1.2 and 5.2 + */ + function random(lower, upper, floating) { + if (floating && typeof floating != 'boolean' && isIterateeCall(lower, upper, floating)) { + upper = floating = undefined; } - if (c.operator === '>' || c.operator === '>=') { - higher = higherGT(gt, c, options) - if (higher === c && higher !== gt) { - return false + if (floating === undefined) { + if (typeof upper == 'boolean') { + floating = upper; + upper = undefined; } - } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { - return false - } - } - if (lt) { - if (needDomLTPre) { - if (c.semver.prerelease && c.semver.prerelease.length && - c.semver.major === needDomLTPre.major && - c.semver.minor === needDomLTPre.minor && - c.semver.patch === needDomLTPre.patch) { - needDomLTPre = false + else if (typeof lower == 'boolean') { + floating = lower; + lower = undefined; } } - if (c.operator === '<' || c.operator === '<=') { - lower = lowerLT(lt, c, options) - if (lower === c && lower !== lt) { - return false - } - } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { - return false + if (lower === undefined && upper === undefined) { + lower = 0; + upper = 1; } - } - if (!c.operator && (lt || gt) && gtltComp !== 0) { - return false - } - } - - // if there was a < or >, and nothing in the dom, then must be false - // UNLESS it was limited by another range in the other direction. - // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 - if (gt && hasDomLT && !lt && gtltComp !== 0) { - return false - } - - if (lt && hasDomGT && !gt && gtltComp !== 0) { - return false - } - - // we needed a prerelease range in a specific tuple, but didn't get one - // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, - // because it includes prereleases in the 1.2.3 tuple - if (needDomGTPre || needDomLTPre) { - return false - } - - return true -} - -// >=1.2.3 is lower than >1.2.3 -const higherGT = (a, b, options) => { - if (!a) { - return b - } - const comp = compare(a.semver, b.semver, options) - return comp > 0 ? a - : comp < 0 ? b - : b.operator === '>' && a.operator === '>=' ? b - : a -} - -// <=1.2.3 is higher than <1.2.3 -const lowerLT = (a, b, options) => { - if (!a) { - return b - } - const comp = compare(a.semver, b.semver, options) - return comp < 0 ? a - : comp > 0 ? b - : b.operator === '<' && a.operator === '<=' ? b - : a -} - -module.exports = subset - - -/***/ }), - -/***/ 54676: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const Range = __nccwpck_require__(34502) - -// Mostly just for testing and legacy API reasons -const toComparators = (range, options) => - new Range(range, options).set - .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) - -module.exports = toComparators - - -/***/ }), - -/***/ 66895: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const Range = __nccwpck_require__(34502) -const validRange = (range, options) => { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} -module.exports = validRange - - -/***/ }), - -/***/ 82022: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const timespan = __nccwpck_require__(20910); -const PS_SUPPORTED = __nccwpck_require__(59085); -const validateAsymmetricKey = __nccwpck_require__(47596); -const jws = __nccwpck_require__(22597); -const {includes, isBoolean, isInteger, isNumber, isPlainObject, isString, once} = __nccwpck_require__(90250) -const { KeyObject, createSecretKey, createPrivateKey } = __nccwpck_require__(6113) - -const SUPPORTED_ALGS = ['RS256', 'RS384', 'RS512', 'ES256', 'ES384', 'ES512', 'HS256', 'HS384', 'HS512', 'none']; -if (PS_SUPPORTED) { - SUPPORTED_ALGS.splice(3, 0, 'PS256', 'PS384', 'PS512'); -} - -const sign_options_schema = { - expiresIn: { isValid: function(value) { return isInteger(value) || (isString(value) && value); }, message: '"expiresIn" should be a number of seconds or string representing a timespan' }, - notBefore: { isValid: function(value) { return isInteger(value) || (isString(value) && value); }, message: '"notBefore" should be a number of seconds or string representing a timespan' }, - audience: { isValid: function(value) { return isString(value) || Array.isArray(value); }, message: '"audience" must be a string or array' }, - algorithm: { isValid: includes.bind(null, SUPPORTED_ALGS), message: '"algorithm" must be a valid string enum value' }, - header: { isValid: isPlainObject, message: '"header" must be an object' }, - encoding: { isValid: isString, message: '"encoding" must be a string' }, - issuer: { isValid: isString, message: '"issuer" must be a string' }, - subject: { isValid: isString, message: '"subject" must be a string' }, - jwtid: { isValid: isString, message: '"jwtid" must be a string' }, - noTimestamp: { isValid: isBoolean, message: '"noTimestamp" must be a boolean' }, - keyid: { isValid: isString, message: '"keyid" must be a string' }, - mutatePayload: { isValid: isBoolean, message: '"mutatePayload" must be a boolean' }, - allowInsecureKeySizes: { isValid: isBoolean, message: '"allowInsecureKeySizes" must be a boolean'}, - allowInvalidAsymmetricKeyTypes: { isValid: isBoolean, message: '"allowInvalidAsymmetricKeyTypes" must be a boolean'} -}; - -const registered_claims_schema = { - iat: { isValid: isNumber, message: '"iat" should be a number of seconds' }, - exp: { isValid: isNumber, message: '"exp" should be a number of seconds' }, - nbf: { isValid: isNumber, message: '"nbf" should be a number of seconds' } -}; - -function validate(schema, allowUnknown, object, parameterName) { - if (!isPlainObject(object)) { - throw new Error('Expected "' + parameterName + '" to be a plain object.'); - } - Object.keys(object) - .forEach(function(key) { - const validator = schema[key]; - if (!validator) { - if (!allowUnknown) { - throw new Error('"' + key + '" is not allowed in "' + parameterName + '"'); + else { + lower = toFinite(lower); + if (upper === undefined) { + upper = lower; + lower = 0; + } else { + upper = toFinite(upper); } - return; } - if (!validator.isValid(object[key])) { - throw new Error(validator.message); + if (lower > upper) { + var temp = lower; + lower = upper; + upper = temp; } - }); -} - -function validateOptions(options) { - return validate(sign_options_schema, false, options, 'options'); -} + if (floating || lower % 1 || upper % 1) { + var rand = nativeRandom(); + return nativeMin(lower + (rand * (upper - lower + freeParseFloat('1e-' + ((rand + '').length - 1)))), upper); + } + return baseRandom(lower, upper); + } -function validatePayload(payload) { - return validate(registered_claims_schema, true, payload, 'payload'); -} + /*------------------------------------------------------------------------*/ -const options_to_payload = { - 'audience': 'aud', - 'issuer': 'iss', - 'subject': 'sub', - 'jwtid': 'jti' -}; + /** + * Converts `string` to [camel case](https://en.wikipedia.org/wiki/CamelCase). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the camel cased string. + * @example + * + * _.camelCase('Foo Bar'); + * // => 'fooBar' + * + * _.camelCase('--foo-bar--'); + * // => 'fooBar' + * + * _.camelCase('__FOO_BAR__'); + * // => 'fooBar' + */ + var camelCase = createCompounder(function(result, word, index) { + word = word.toLowerCase(); + return result + (index ? capitalize(word) : word); + }); -const options_for_objects = [ - 'expiresIn', - 'notBefore', - 'noTimestamp', - 'audience', - 'issuer', - 'subject', - 'jwtid', -]; + /** + * Converts the first character of `string` to upper case and the remaining + * to lower case. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to capitalize. + * @returns {string} Returns the capitalized string. + * @example + * + * _.capitalize('FRED'); + * // => 'Fred' + */ + function capitalize(string) { + return upperFirst(toString(string).toLowerCase()); + } -module.exports = function (payload, secretOrPrivateKey, options, callback) { - if (typeof options === 'function') { - callback = options; - options = {}; - } else { - options = options || {}; - } + /** + * Deburrs `string` by converting + * [Latin-1 Supplement](https://en.wikipedia.org/wiki/Latin-1_Supplement_(Unicode_block)#Character_table) + * and [Latin Extended-A](https://en.wikipedia.org/wiki/Latin_Extended-A) + * letters to basic Latin letters and removing + * [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to deburr. + * @returns {string} Returns the deburred string. + * @example + * + * _.deburr('déjà vu'); + * // => 'deja vu' + */ + function deburr(string) { + string = toString(string); + return string && string.replace(reLatin, deburrLetter).replace(reComboMark, ''); + } - const isObjectPayload = typeof payload === 'object' && - !Buffer.isBuffer(payload); + /** + * Checks if `string` ends with the given target string. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to inspect. + * @param {string} [target] The string to search for. + * @param {number} [position=string.length] The position to search up to. + * @returns {boolean} Returns `true` if `string` ends with `target`, + * else `false`. + * @example + * + * _.endsWith('abc', 'c'); + * // => true + * + * _.endsWith('abc', 'b'); + * // => false + * + * _.endsWith('abc', 'b', 2); + * // => true + */ + function endsWith(string, target, position) { + string = toString(string); + target = baseToString(target); - const header = Object.assign({ - alg: options.algorithm || 'HS256', - typ: isObjectPayload ? 'JWT' : undefined, - kid: options.keyid - }, options.header); + var length = string.length; + position = position === undefined + ? length + : baseClamp(toInteger(position), 0, length); - function failure(err) { - if (callback) { - return callback(err); + var end = position; + position -= target.length; + return position >= 0 && string.slice(position, end) == target; } - throw err; - } - - if (!secretOrPrivateKey && options.algorithm !== 'none') { - return failure(new Error('secretOrPrivateKey must have a value')); - } - if (secretOrPrivateKey != null && !(secretOrPrivateKey instanceof KeyObject)) { - try { - secretOrPrivateKey = createPrivateKey(secretOrPrivateKey) - } catch (_) { - try { - secretOrPrivateKey = createSecretKey(typeof secretOrPrivateKey === 'string' ? Buffer.from(secretOrPrivateKey) : secretOrPrivateKey) - } catch (_) { - return failure(new Error('secretOrPrivateKey is not valid key material')); - } + /** + * Converts the characters "&", "<", ">", '"', and "'" in `string` to their + * corresponding HTML entities. + * + * **Note:** No other characters are escaped. To escape additional + * characters use a third-party library like [_he_](https://mths.be/he). + * + * Though the ">" character is escaped for symmetry, characters like + * ">" and "/" don't need escaping in HTML and have no special meaning + * unless they're part of a tag or unquoted attribute value. See + * [Mathias Bynens's article](https://mathiasbynens.be/notes/ambiguous-ampersands) + * (under "semi-related fun fact") for more details. + * + * When working with HTML you should always + * [quote attribute values](http://wonko.com/post/html-escaping) to reduce + * XSS vectors. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escape('fred, barney, & pebbles'); + * // => 'fred, barney, & pebbles' + */ + function escape(string) { + string = toString(string); + return (string && reHasUnescapedHtml.test(string)) + ? string.replace(reUnescapedHtml, escapeHtmlChar) + : string; } - } - if (header.alg.startsWith('HS') && secretOrPrivateKey.type !== 'secret') { - return failure(new Error((`secretOrPrivateKey must be a symmetric key when using ${header.alg}`))) - } else if (/^(?:RS|PS|ES)/.test(header.alg)) { - if (secretOrPrivateKey.type !== 'private') { - return failure(new Error((`secretOrPrivateKey must be an asymmetric key when using ${header.alg}`))) - } - if (!options.allowInsecureKeySizes && - !header.alg.startsWith('ES') && - secretOrPrivateKey.asymmetricKeyDetails !== undefined && //KeyObject.asymmetricKeyDetails is supported in Node 15+ - secretOrPrivateKey.asymmetricKeyDetails.modulusLength < 2048) { - return failure(new Error(`secretOrPrivateKey has a minimum key size of 2048 bits for ${header.alg}`)); + /** + * Escapes the `RegExp` special characters "^", "$", "\", ".", "*", "+", + * "?", "(", ")", "[", "]", "{", "}", and "|" in `string`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escapeRegExp('[lodash](https://lodash.com/)'); + * // => '\[lodash\]\(https://lodash\.com/\)' + */ + function escapeRegExp(string) { + string = toString(string); + return (string && reHasRegExpChar.test(string)) + ? string.replace(reRegExpChar, '\\$&') + : string; } - } - if (typeof payload === 'undefined') { - return failure(new Error('payload is required')); - } else if (isObjectPayload) { - try { - validatePayload(payload); - } - catch (error) { - return failure(error); - } - if (!options.mutatePayload) { - payload = Object.assign({},payload); - } - } else { - const invalid_options = options_for_objects.filter(function (opt) { - return typeof options[opt] !== 'undefined'; + /** + * Converts `string` to + * [kebab case](https://en.wikipedia.org/wiki/Letter_case#Special_case_styles). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the kebab cased string. + * @example + * + * _.kebabCase('Foo Bar'); + * // => 'foo-bar' + * + * _.kebabCase('fooBar'); + * // => 'foo-bar' + * + * _.kebabCase('__FOO_BAR__'); + * // => 'foo-bar' + */ + var kebabCase = createCompounder(function(result, word, index) { + return result + (index ? '-' : '') + word.toLowerCase(); }); - if (invalid_options.length > 0) { - return failure(new Error('invalid ' + invalid_options.join(',') + ' option for ' + (typeof payload ) + ' payload')); - } - } - - if (typeof payload.exp !== 'undefined' && typeof options.expiresIn !== 'undefined') { - return failure(new Error('Bad "options.expiresIn" option the payload already has an "exp" property.')); - } + /** + * Converts `string`, as space separated words, to lower case. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the lower cased string. + * @example + * + * _.lowerCase('--Foo-Bar--'); + * // => 'foo bar' + * + * _.lowerCase('fooBar'); + * // => 'foo bar' + * + * _.lowerCase('__FOO_BAR__'); + * // => 'foo bar' + */ + var lowerCase = createCompounder(function(result, word, index) { + return result + (index ? ' ' : '') + word.toLowerCase(); + }); - if (typeof payload.nbf !== 'undefined' && typeof options.notBefore !== 'undefined') { - return failure(new Error('Bad "options.notBefore" option the payload already has an "nbf" property.')); - } + /** + * Converts the first character of `string` to lower case. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.lowerFirst('Fred'); + * // => 'fred' + * + * _.lowerFirst('FRED'); + * // => 'fRED' + */ + var lowerFirst = createCaseFirst('toLowerCase'); - try { - validateOptions(options); - } - catch (error) { - return failure(error); - } + /** + * Pads `string` on the left and right sides if it's shorter than `length`. + * Padding characters are truncated if they can't be evenly divided by `length`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.pad('abc', 8); + * // => ' abc ' + * + * _.pad('abc', 8, '_-'); + * // => '_-abc_-_' + * + * _.pad('abc', 3); + * // => 'abc' + */ + function pad(string, length, chars) { + string = toString(string); + length = toInteger(length); - if (!options.allowInvalidAsymmetricKeyTypes) { - try { - validateAsymmetricKey(header.alg, secretOrPrivateKey); - } catch (error) { - return failure(error); + var strLength = length ? stringSize(string) : 0; + if (!length || strLength >= length) { + return string; + } + var mid = (length - strLength) / 2; + return ( + createPadding(nativeFloor(mid), chars) + + string + + createPadding(nativeCeil(mid), chars) + ); } - } - - const timestamp = payload.iat || Math.floor(Date.now() / 1000); - if (options.noTimestamp) { - delete payload.iat; - } else if (isObjectPayload) { - payload.iat = timestamp; - } + /** + * Pads `string` on the right side if it's shorter than `length`. Padding + * characters are truncated if they exceed `length`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.padEnd('abc', 6); + * // => 'abc ' + * + * _.padEnd('abc', 6, '_-'); + * // => 'abc_-_' + * + * _.padEnd('abc', 3); + * // => 'abc' + */ + function padEnd(string, length, chars) { + string = toString(string); + length = toInteger(length); - if (typeof options.notBefore !== 'undefined') { - try { - payload.nbf = timespan(options.notBefore, timestamp); - } - catch (err) { - return failure(err); - } - if (typeof payload.nbf === 'undefined') { - return failure(new Error('"notBefore" should be a number of seconds or string representing a timespan eg: "1d", "20h", 60')); + var strLength = length ? stringSize(string) : 0; + return (length && strLength < length) + ? (string + createPadding(length - strLength, chars)) + : string; } - } - if (typeof options.expiresIn !== 'undefined' && typeof payload === 'object') { - try { - payload.exp = timespan(options.expiresIn, timestamp); - } - catch (err) { - return failure(err); + /** + * Pads `string` on the left side if it's shorter than `length`. Padding + * characters are truncated if they exceed `length`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.padStart('abc', 6); + * // => ' abc' + * + * _.padStart('abc', 6, '_-'); + * // => '_-_abc' + * + * _.padStart('abc', 3); + * // => 'abc' + */ + function padStart(string, length, chars) { + string = toString(string); + length = toInteger(length); + + var strLength = length ? stringSize(string) : 0; + return (length && strLength < length) + ? (createPadding(length - strLength, chars) + string) + : string; } - if (typeof payload.exp === 'undefined') { - return failure(new Error('"expiresIn" should be a number of seconds or string representing a timespan eg: "1d", "20h", 60')); + + /** + * Converts `string` to an integer of the specified radix. If `radix` is + * `undefined` or `0`, a `radix` of `10` is used unless `value` is a + * hexadecimal, in which case a `radix` of `16` is used. + * + * **Note:** This method aligns with the + * [ES5 implementation](https://es5.github.io/#x15.1.2.2) of `parseInt`. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category String + * @param {string} string The string to convert. + * @param {number} [radix=10] The radix to interpret `value` by. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {number} Returns the converted integer. + * @example + * + * _.parseInt('08'); + * // => 8 + * + * _.map(['6', '08', '10'], _.parseInt); + * // => [6, 8, 10] + */ + function parseInt(string, radix, guard) { + if (guard || radix == null) { + radix = 0; + } else if (radix) { + radix = +radix; + } + return nativeParseInt(toString(string).replace(reTrimStart, ''), radix || 0); } - } - Object.keys(options_to_payload).forEach(function (key) { - const claim = options_to_payload[key]; - if (typeof options[key] !== 'undefined') { - if (typeof payload[claim] !== 'undefined') { - return failure(new Error('Bad "options.' + key + '" option. The payload already has an "' + claim + '" property.')); + /** + * Repeats the given string `n` times. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to repeat. + * @param {number} [n=1] The number of times to repeat the string. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {string} Returns the repeated string. + * @example + * + * _.repeat('*', 3); + * // => '***' + * + * _.repeat('abc', 2); + * // => 'abcabc' + * + * _.repeat('abc', 0); + * // => '' + */ + function repeat(string, n, guard) { + if ((guard ? isIterateeCall(string, n, guard) : n === undefined)) { + n = 1; + } else { + n = toInteger(n); } - payload[claim] = options[key]; + return baseRepeat(toString(string), n); } - }); - - const encoding = options.encoding || 'utf8'; - if (typeof callback === 'function') { - callback = callback && once(callback); + /** + * Replaces matches for `pattern` in `string` with `replacement`. + * + * **Note:** This method is based on + * [`String#replace`](https://mdn.io/String/replace). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to modify. + * @param {RegExp|string} pattern The pattern to replace. + * @param {Function|string} replacement The match replacement. + * @returns {string} Returns the modified string. + * @example + * + * _.replace('Hi Fred', 'Fred', 'Barney'); + * // => 'Hi Barney' + */ + function replace() { + var args = arguments, + string = toString(args[0]); - jws.createSign({ - header: header, - privateKey: secretOrPrivateKey, - payload: payload, - encoding: encoding - }).once('error', callback) - .once('done', function (signature) { - // TODO: Remove in favor of the modulus length check before signing once node 15+ is the minimum supported version - if(!options.allowInsecureKeySizes && /^(?:RS|PS)/.test(header.alg) && signature.length < 256) { - return callback(new Error(`secretOrPrivateKey has a minimum key size of 2048 bits for ${header.alg}`)) - } - callback(null, signature); - }); - } else { - let signature = jws.sign({header: header, payload: payload, secret: secretOrPrivateKey, encoding: encoding}); - // TODO: Remove in favor of the modulus length check before signing once node 15+ is the minimum supported version - if(!options.allowInsecureKeySizes && /^(?:RS|PS)/.test(header.alg) && signature.length < 256) { - throw new Error(`secretOrPrivateKey has a minimum key size of 2048 bits for ${header.alg}`) + return args.length < 3 ? string : string.replace(args[1], args[2]); } - return signature - } -}; - - -/***/ }), - -/***/ 12327: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const JsonWebTokenError = __nccwpck_require__(405); -const NotBeforeError = __nccwpck_require__(4383); -const TokenExpiredError = __nccwpck_require__(46637); -const decode = __nccwpck_require__(53359); -const timespan = __nccwpck_require__(20910); -const validateAsymmetricKey = __nccwpck_require__(47596); -const PS_SUPPORTED = __nccwpck_require__(59085); -const jws = __nccwpck_require__(22597); -const {KeyObject, createSecretKey, createPublicKey} = __nccwpck_require__(6113); - -const PUB_KEY_ALGS = ['RS256', 'RS384', 'RS512']; -const EC_KEY_ALGS = ['ES256', 'ES384', 'ES512']; -const RSA_KEY_ALGS = ['RS256', 'RS384', 'RS512']; -const HS_ALGS = ['HS256', 'HS384', 'HS512']; - -if (PS_SUPPORTED) { - PUB_KEY_ALGS.splice(PUB_KEY_ALGS.length, 0, 'PS256', 'PS384', 'PS512'); - RSA_KEY_ALGS.splice(RSA_KEY_ALGS.length, 0, 'PS256', 'PS384', 'PS512'); -} - -module.exports = function (jwtString, secretOrPublicKey, options, callback) { - if ((typeof options === 'function') && !callback) { - callback = options; - options = {}; - } - if (!options) { - options = {}; - } + /** + * Converts `string` to + * [snake case](https://en.wikipedia.org/wiki/Snake_case). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the snake cased string. + * @example + * + * _.snakeCase('Foo Bar'); + * // => 'foo_bar' + * + * _.snakeCase('fooBar'); + * // => 'foo_bar' + * + * _.snakeCase('--FOO-BAR--'); + * // => 'foo_bar' + */ + var snakeCase = createCompounder(function(result, word, index) { + return result + (index ? '_' : '') + word.toLowerCase(); + }); - //clone this object since we are going to mutate it. - options = Object.assign({}, options); + /** + * Splits `string` by `separator`. + * + * **Note:** This method is based on + * [`String#split`](https://mdn.io/String/split). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to split. + * @param {RegExp|string} separator The separator pattern to split by. + * @param {number} [limit] The length to truncate results to. + * @returns {Array} Returns the string segments. + * @example + * + * _.split('a-b-c', '-', 2); + * // => ['a', 'b'] + */ + function split(string, separator, limit) { + if (limit && typeof limit != 'number' && isIterateeCall(string, separator, limit)) { + separator = limit = undefined; + } + limit = limit === undefined ? MAX_ARRAY_LENGTH : limit >>> 0; + if (!limit) { + return []; + } + string = toString(string); + if (string && ( + typeof separator == 'string' || + (separator != null && !isRegExp(separator)) + )) { + separator = baseToString(separator); + if (!separator && hasUnicode(string)) { + return castSlice(stringToArray(string), 0, limit); + } + } + return string.split(separator, limit); + } - let done; + /** + * Converts `string` to + * [start case](https://en.wikipedia.org/wiki/Letter_case#Stylistic_or_specialised_usage). + * + * @static + * @memberOf _ + * @since 3.1.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the start cased string. + * @example + * + * _.startCase('--foo-bar--'); + * // => 'Foo Bar' + * + * _.startCase('fooBar'); + * // => 'Foo Bar' + * + * _.startCase('__FOO_BAR__'); + * // => 'FOO BAR' + */ + var startCase = createCompounder(function(result, word, index) { + return result + (index ? ' ' : '') + upperFirst(word); + }); - if (callback) { - done = callback; - } else { - done = function(err, data) { - if (err) throw err; - return data; - }; - } + /** + * Checks if `string` starts with the given target string. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to inspect. + * @param {string} [target] The string to search for. + * @param {number} [position=0] The position to search from. + * @returns {boolean} Returns `true` if `string` starts with `target`, + * else `false`. + * @example + * + * _.startsWith('abc', 'a'); + * // => true + * + * _.startsWith('abc', 'b'); + * // => false + * + * _.startsWith('abc', 'b', 1); + * // => true + */ + function startsWith(string, target, position) { + string = toString(string); + position = position == null + ? 0 + : baseClamp(toInteger(position), 0, string.length); - if (options.clockTimestamp && typeof options.clockTimestamp !== 'number') { - return done(new JsonWebTokenError('clockTimestamp must be a number')); - } + target = baseToString(target); + return string.slice(position, position + target.length) == target; + } - if (options.nonce !== undefined && (typeof options.nonce !== 'string' || options.nonce.trim() === '')) { - return done(new JsonWebTokenError('nonce must be a non-empty string')); - } + /** + * Creates a compiled template function that can interpolate data properties + * in "interpolate" delimiters, HTML-escape interpolated data properties in + * "escape" delimiters, and execute JavaScript in "evaluate" delimiters. Data + * properties may be accessed as free variables in the template. If a setting + * object is given, it takes precedence over `_.templateSettings` values. + * + * **Note:** In the development build `_.template` utilizes + * [sourceURLs](http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl) + * for easier debugging. + * + * For more information on precompiling templates see + * [lodash's custom builds documentation](https://lodash.com/custom-builds). + * + * For more information on Chrome extension sandboxes see + * [Chrome's extensions documentation](https://developer.chrome.com/extensions/sandboxingEval). + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category String + * @param {string} [string=''] The template string. + * @param {Object} [options={}] The options object. + * @param {RegExp} [options.escape=_.templateSettings.escape] + * The HTML "escape" delimiter. + * @param {RegExp} [options.evaluate=_.templateSettings.evaluate] + * The "evaluate" delimiter. + * @param {Object} [options.imports=_.templateSettings.imports] + * An object to import into the template as free variables. + * @param {RegExp} [options.interpolate=_.templateSettings.interpolate] + * The "interpolate" delimiter. + * @param {string} [options.sourceURL='lodash.templateSources[n]'] + * The sourceURL of the compiled template. + * @param {string} [options.variable='obj'] + * The data object variable name. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the compiled template function. + * @example + * + * // Use the "interpolate" delimiter to create a compiled template. + * var compiled = _.template('hello <%= user %>!'); + * compiled({ 'user': 'fred' }); + * // => 'hello fred!' + * + * // Use the HTML "escape" delimiter to escape data property values. + * var compiled = _.template('<%- value %>'); + * compiled({ 'value': '"]; + int32 quotes = 3 [json_name = "unbalanced\"quotes"]; + int32 script_and_quotes = 4 + [json_name = "\""]; +} + +message TestExtensions { + .protobuf_unittest.TestAllExtensions extensions = 1; +} + +message TestEnumValue { + EnumType enum_value1 = 1; + EnumType enum_value2 = 2; + EnumType enum_value3 = 3; +} + +message MapsTestCases { + EmptyMap empty_map = 1; + StringtoInt string_to_int = 2; + IntToString int_to_string = 3; + Mixed1 mixed1 = 4; + Mixed2 mixed2 = 5; + MapOfObjects map_of_objects = 6; + + // Empty key tests + StringtoInt empty_key_string_to_int1 = 7; + StringtoInt empty_key_string_to_int2 = 8; + StringtoInt empty_key_string_to_int3 = 9; + BoolToString empty_key_bool_to_string = 10; + IntToString empty_key_int_to_string = 11; + Mixed1 empty_key_mixed = 12; + MapOfObjects empty_key_map_objects = 13; +} + +message EmptyMap { + map map = 1; +} + +message StringtoInt { + map map = 1; +} + +message IntToString { + map map = 1; +} + +message BoolToString { + map map = 1; +} + +message Mixed1 { + string msg = 1; + map map = 2; +} + +message Mixed2 { + enum E { + E0 = 0; + E1 = 1; + E2 = 2; + E3 = 3; + } + map map = 1; + E ee = 2; +} + +message MapOfObjects { + message M { + string inner_text = 1; + } + map map = 1; +} + +message MapIn { + string other = 1; + repeated string things = 2; + map map_input = 3; + map map_any = 4; +} + +message MapOut { + map map1 = 1; + map map2 = 2; + map map3 = 3; + map map4 = 5; + string bar = 4; +} + +// A message with exactly the same wire representation as MapOut, but using +// repeated message fields instead of map fields. We use this message to test +// the wire-format compatibility of the JSON transcoder (e.g., whether it +// handles missing keys correctly). +message MapOutWireFormat { + message Map1Entry { + string key = 1; + MapM value = 2; + } + repeated Map1Entry map1 = 1; + message Map2Entry { + string key = 1; + MapOut value = 2; + } + repeated Map2Entry map2 = 2; + message Map3Entry { + int32 key = 1; + string value = 2; + } + repeated Map3Entry map3 = 3; + message Map4Entry { + bool key = 1; + string value = 2; + } + repeated Map4Entry map4 = 5; + string bar = 4; +} + +message MapM { + string foo = 1; +} diff --git a/dist/protos/google/protobuf/wrappers.proto b/dist/protos/google/protobuf/wrappers.proto new file mode 100644 index 0000000..1959fa5 --- /dev/null +++ b/dist/protos/google/protobuf/wrappers.proto @@ -0,0 +1,123 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Wrappers for primitive (non-message) types. These types are useful +// for embedding primitives in the `google.protobuf.Any` type and for places +// where we need to distinguish between the absence of a primitive +// typed field and its default value. +// +// These wrappers have no meaningful use within repeated fields as they lack +// the ability to detect presence on individual elements. +// These wrappers have no meaningful use within a map or a oneof since +// individual entries of a map or fields of a oneof can already detect presence. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/wrapperspb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "WrappersProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// Wrapper message for `double`. +// +// The JSON representation for `DoubleValue` is JSON number. +message DoubleValue { + // The double value. + double value = 1; +} + +// Wrapper message for `float`. +// +// The JSON representation for `FloatValue` is JSON number. +message FloatValue { + // The float value. + float value = 1; +} + +// Wrapper message for `int64`. +// +// The JSON representation for `Int64Value` is JSON string. +message Int64Value { + // The int64 value. + int64 value = 1; +} + +// Wrapper message for `uint64`. +// +// The JSON representation for `UInt64Value` is JSON string. +message UInt64Value { + // The uint64 value. + uint64 value = 1; +} + +// Wrapper message for `int32`. +// +// The JSON representation for `Int32Value` is JSON number. +message Int32Value { + // The int32 value. + int32 value = 1; +} + +// Wrapper message for `uint32`. +// +// The JSON representation for `UInt32Value` is JSON number. +message UInt32Value { + // The uint32 value. + uint32 value = 1; +} + +// Wrapper message for `bool`. +// +// The JSON representation for `BoolValue` is JSON `true` and `false`. +message BoolValue { + // The bool value. + bool value = 1; +} + +// Wrapper message for `string`. +// +// The JSON representation for `StringValue` is JSON string. +message StringValue { + // The string value. + string value = 1; +} + +// Wrapper message for `bytes`. +// +// The JSON representation for `BytesValue` is JSON string. +message BytesValue { + // The bytes value. + bytes value = 1; +} diff --git a/dist/protos/google/rpc/code.proto b/dist/protos/google/rpc/code.proto new file mode 100644 index 0000000..7c810af --- /dev/null +++ b/dist/protos/google/rpc/code.proto @@ -0,0 +1,186 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +option go_package = "google.golang.org/genproto/googleapis/rpc/code;code"; +option java_multiple_files = true; +option java_outer_classname = "CodeProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// The canonical error codes for gRPC APIs. +// +// +// Sometimes multiple error codes may apply. Services should return +// the most specific error code that applies. For example, prefer +// `OUT_OF_RANGE` over `FAILED_PRECONDITION` if both codes apply. +// Similarly prefer `NOT_FOUND` or `ALREADY_EXISTS` over `FAILED_PRECONDITION`. +enum Code { + // Not an error; returned on success. + // + // HTTP Mapping: 200 OK + OK = 0; + + // The operation was cancelled, typically by the caller. + // + // HTTP Mapping: 499 Client Closed Request + CANCELLED = 1; + + // Unknown error. For example, this error may be returned when + // a `Status` value received from another address space belongs to + // an error space that is not known in this address space. Also + // errors raised by APIs that do not return enough error information + // may be converted to this error. + // + // HTTP Mapping: 500 Internal Server Error + UNKNOWN = 2; + + // The client specified an invalid argument. Note that this differs + // from `FAILED_PRECONDITION`. `INVALID_ARGUMENT` indicates arguments + // that are problematic regardless of the state of the system + // (e.g., a malformed file name). + // + // HTTP Mapping: 400 Bad Request + INVALID_ARGUMENT = 3; + + // The deadline expired before the operation could complete. For operations + // that change the state of the system, this error may be returned + // even if the operation has completed successfully. For example, a + // successful response from a server could have been delayed long + // enough for the deadline to expire. + // + // HTTP Mapping: 504 Gateway Timeout + DEADLINE_EXCEEDED = 4; + + // Some requested entity (e.g., file or directory) was not found. + // + // Note to server developers: if a request is denied for an entire class + // of users, such as gradual feature rollout or undocumented allowlist, + // `NOT_FOUND` may be used. If a request is denied for some users within + // a class of users, such as user-based access control, `PERMISSION_DENIED` + // must be used. + // + // HTTP Mapping: 404 Not Found + NOT_FOUND = 5; + + // The entity that a client attempted to create (e.g., file or directory) + // already exists. + // + // HTTP Mapping: 409 Conflict + ALREADY_EXISTS = 6; + + // The caller does not have permission to execute the specified + // operation. `PERMISSION_DENIED` must not be used for rejections + // caused by exhausting some resource (use `RESOURCE_EXHAUSTED` + // instead for those errors). `PERMISSION_DENIED` must not be + // used if the caller can not be identified (use `UNAUTHENTICATED` + // instead for those errors). This error code does not imply the + // request is valid or the requested entity exists or satisfies + // other pre-conditions. + // + // HTTP Mapping: 403 Forbidden + PERMISSION_DENIED = 7; + + // The request does not have valid authentication credentials for the + // operation. + // + // HTTP Mapping: 401 Unauthorized + UNAUTHENTICATED = 16; + + // Some resource has been exhausted, perhaps a per-user quota, or + // perhaps the entire file system is out of space. + // + // HTTP Mapping: 429 Too Many Requests + RESOURCE_EXHAUSTED = 8; + + // The operation was rejected because the system is not in a state + // required for the operation's execution. For example, the directory + // to be deleted is non-empty, an rmdir operation is applied to + // a non-directory, etc. + // + // Service implementors can use the following guidelines to decide + // between `FAILED_PRECONDITION`, `ABORTED`, and `UNAVAILABLE`: + // (a) Use `UNAVAILABLE` if the client can retry just the failing call. + // (b) Use `ABORTED` if the client should retry at a higher level. For + // example, when a client-specified test-and-set fails, indicating the + // client should restart a read-modify-write sequence. + // (c) Use `FAILED_PRECONDITION` if the client should not retry until + // the system state has been explicitly fixed. For example, if an "rmdir" + // fails because the directory is non-empty, `FAILED_PRECONDITION` + // should be returned since the client should not retry unless + // the files are deleted from the directory. + // + // HTTP Mapping: 400 Bad Request + FAILED_PRECONDITION = 9; + + // The operation was aborted, typically due to a concurrency issue such as + // a sequencer check failure or transaction abort. + // + // See the guidelines above for deciding between `FAILED_PRECONDITION`, + // `ABORTED`, and `UNAVAILABLE`. + // + // HTTP Mapping: 409 Conflict + ABORTED = 10; + + // The operation was attempted past the valid range. E.g., seeking or + // reading past end-of-file. + // + // Unlike `INVALID_ARGUMENT`, this error indicates a problem that may + // be fixed if the system state changes. For example, a 32-bit file + // system will generate `INVALID_ARGUMENT` if asked to read at an + // offset that is not in the range [0,2^32-1], but it will generate + // `OUT_OF_RANGE` if asked to read from an offset past the current + // file size. + // + // There is a fair bit of overlap between `FAILED_PRECONDITION` and + // `OUT_OF_RANGE`. We recommend using `OUT_OF_RANGE` (the more specific + // error) when it applies so that callers who are iterating through + // a space can easily look for an `OUT_OF_RANGE` error to detect when + // they are done. + // + // HTTP Mapping: 400 Bad Request + OUT_OF_RANGE = 11; + + // The operation is not implemented or is not supported/enabled in this + // service. + // + // HTTP Mapping: 501 Not Implemented + UNIMPLEMENTED = 12; + + // Internal errors. This means that some invariants expected by the + // underlying system have been broken. This error code is reserved + // for serious errors. + // + // HTTP Mapping: 500 Internal Server Error + INTERNAL = 13; + + // The service is currently unavailable. This is most likely a + // transient condition, which can be corrected by retrying with + // a backoff. Note that it is not always safe to retry + // non-idempotent operations. + // + // See the guidelines above for deciding between `FAILED_PRECONDITION`, + // `ABORTED`, and `UNAVAILABLE`. + // + // HTTP Mapping: 503 Service Unavailable + UNAVAILABLE = 14; + + // Unrecoverable data loss or corruption. + // + // HTTP Mapping: 500 Internal Server Error + DATA_LOSS = 15; +} diff --git a/dist/protos/google/rpc/context/attribute_context.proto b/dist/protos/google/rpc/context/attribute_context.proto new file mode 100644 index 0000000..ef9242e --- /dev/null +++ b/dist/protos/google/rpc/context/attribute_context.proto @@ -0,0 +1,344 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc.context; + +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/rpc/context/attribute_context;attribute_context"; +option java_multiple_files = true; +option java_outer_classname = "AttributeContextProto"; +option java_package = "com.google.rpc.context"; + +// This message defines the standard attribute vocabulary for Google APIs. +// +// An attribute is a piece of metadata that describes an activity on a network +// service. For example, the size of an HTTP request, or the status code of +// an HTTP response. +// +// Each attribute has a type and a name, which is logically defined as +// a proto message field in `AttributeContext`. The field type becomes the +// attribute type, and the field path becomes the attribute name. For example, +// the attribute `source.ip` maps to field `AttributeContext.source.ip`. +// +// This message definition is guaranteed not to have any wire breaking change. +// So you can use it directly for passing attributes across different systems. +// +// NOTE: Different system may generate different subset of attributes. Please +// verify the system specification before relying on an attribute generated +// a system. +message AttributeContext { + // This message defines attributes for a node that handles a network request. + // The node can be either a service or an application that sends, forwards, + // or receives the request. Service peers should fill in + // `principal` and `labels` as appropriate. + message Peer { + // The IP address of the peer. + string ip = 1; + + // The network port of the peer. + int64 port = 2; + + // The labels associated with the peer. + map labels = 6; + + // The identity of this peer. Similar to `Request.auth.principal`, but + // relative to the peer instead of the request. For example, the + // identity associated with a load balancer that forwarded the request. + string principal = 7; + + // The CLDR country/region code associated with the above IP address. + // If the IP address is private, the `region_code` should reflect the + // physical location where this peer is running. + string region_code = 8; + } + + // This message defines attributes associated with API operations, such as + // a network API request. The terminology is based on the conventions used + // by Google APIs, Istio, and OpenAPI. + message Api { + // The API service name. It is a logical identifier for a networked API, + // such as "pubsub.googleapis.com". The naming syntax depends on the + // API management system being used for handling the request. + string service = 1; + + // The API operation name. For gRPC requests, it is the fully qualified API + // method name, such as "google.pubsub.v1.Publisher.Publish". For OpenAPI + // requests, it is the `operationId`, such as "getPet". + string operation = 2; + + // The API protocol used for sending the request, such as "http", "https", + // "grpc", or "internal". + string protocol = 3; + + // The API version associated with the API operation above, such as "v1" or + // "v1alpha1". + string version = 4; + } + + // This message defines request authentication attributes. Terminology is + // based on the JSON Web Token (JWT) standard, but the terms also + // correlate to concepts in other standards. + message Auth { + // The authenticated principal. Reflects the issuer (`iss`) and subject + // (`sub`) claims within a JWT. The issuer and subject should be `/` + // delimited, with `/` percent-encoded within the subject fragment. For + // Google accounts, the principal format is: + // "https://accounts.google.com/{id}" + string principal = 1; + + // The intended audience(s) for this authentication information. Reflects + // the audience (`aud`) claim within a JWT. The audience + // value(s) depends on the `issuer`, but typically include one or more of + // the following pieces of information: + // + // * The services intended to receive the credential. For example, + // ["https://pubsub.googleapis.com/", "https://storage.googleapis.com/"]. + // * A set of service-based scopes. For example, + // ["https://www.googleapis.com/auth/cloud-platform"]. + // * The client id of an app, such as the Firebase project id for JWTs + // from Firebase Auth. + // + // Consult the documentation for the credential issuer to determine the + // information provided. + repeated string audiences = 2; + + // The authorized presenter of the credential. Reflects the optional + // Authorized Presenter (`azp`) claim within a JWT or the + // OAuth client id. For example, a Google Cloud Platform client id looks + // as follows: "123456789012.apps.googleusercontent.com". + string presenter = 3; + + // Structured claims presented with the credential. JWTs include + // `{key: value}` pairs for standard and private claims. The following + // is a subset of the standard required and optional claims that would + // typically be presented for a Google-based JWT: + // + // {'iss': 'accounts.google.com', + // 'sub': '113289723416554971153', + // 'aud': ['123456789012', 'pubsub.googleapis.com'], + // 'azp': '123456789012.apps.googleusercontent.com', + // 'email': 'jsmith@example.com', + // 'iat': 1353601026, + // 'exp': 1353604926} + // + // SAML assertions are similarly specified, but with an identity provider + // dependent structure. + google.protobuf.Struct claims = 4; + + // A list of access level resource names that allow resources to be + // accessed by authenticated requester. It is part of Secure GCP processing + // for the incoming request. An access level string has the format: + // "//{api_service_name}/accessPolicies/{policy_id}/accessLevels/{short_name}" + // + // Example: + // "//accesscontextmanager.googleapis.com/accessPolicies/MY_POLICY_ID/accessLevels/MY_LEVEL" + repeated string access_levels = 5; + } + + // This message defines attributes for an HTTP request. If the actual + // request is not an HTTP request, the runtime system should try to map + // the actual request to an equivalent HTTP request. + message Request { + // The unique ID for a request, which can be propagated to downstream + // systems. The ID should have low probability of collision + // within a single day for a specific service. + string id = 1; + + // The HTTP request method, such as `GET`, `POST`. + string method = 2; + + // The HTTP request headers. If multiple headers share the same key, they + // must be merged according to the HTTP spec. All header keys must be + // lowercased, because HTTP header keys are case-insensitive. + map headers = 3; + + // The HTTP URL path, excluding the query parameters. + string path = 4; + + // The HTTP request `Host` header value. + string host = 5; + + // The HTTP URL scheme, such as `http` and `https`. + string scheme = 6; + + // The HTTP URL query in the format of `name1=value1&name2=value2`, as it + // appears in the first line of the HTTP request. No decoding is performed. + string query = 7; + + // The timestamp when the `destination` service receives the last byte of + // the request. + google.protobuf.Timestamp time = 9; + + // The HTTP request size in bytes. If unknown, it must be -1. + int64 size = 10; + + // The network protocol used with the request, such as "http/1.1", + // "spdy/3", "h2", "h2c", "webrtc", "tcp", "udp", "quic". See + // https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + // for details. + string protocol = 11; + + // A special parameter for request reason. It is used by security systems + // to associate auditing information with a request. + string reason = 12; + + // The request authentication. May be absent for unauthenticated requests. + // Derived from the HTTP request `Authorization` header or equivalent. + Auth auth = 13; + } + + // This message defines attributes for a typical network response. It + // generally models semantics of an HTTP response. + message Response { + // The HTTP response status code, such as `200` and `404`. + int64 code = 1; + + // The HTTP response size in bytes. If unknown, it must be -1. + int64 size = 2; + + // The HTTP response headers. If multiple headers share the same key, they + // must be merged according to HTTP spec. All header keys must be + // lowercased, because HTTP header keys are case-insensitive. + map headers = 3; + + // The timestamp when the `destination` service sends the last byte of + // the response. + google.protobuf.Timestamp time = 4; + + // The amount of time it takes the backend service to fully respond to a + // request. Measured from when the destination service starts to send the + // request to the backend until when the destination service receives the + // complete response from the backend. + google.protobuf.Duration backend_latency = 5; + } + + // This message defines core attributes for a resource. A resource is an + // addressable (named) entity provided by the destination service. For + // example, a file stored on a network storage service. + message Resource { + // The name of the service that this resource belongs to, such as + // `pubsub.googleapis.com`. The service may be different from the DNS + // hostname that actually serves the request. + string service = 1; + + // The stable identifier (name) of a resource on the `service`. A resource + // can be logically identified as "//{resource.service}/{resource.name}". + // The differences between a resource name and a URI are: + // + // * Resource name is a logical identifier, independent of network + // protocol and API version. For example, + // `//pubsub.googleapis.com/projects/123/topics/news-feed`. + // * URI often includes protocol and version information, so it can + // be used directly by applications. For example, + // `https://pubsub.googleapis.com/v1/projects/123/topics/news-feed`. + // + // See https://cloud.google.com/apis/design/resource_names for details. + string name = 2; + + // The type of the resource. The syntax is platform-specific because + // different platforms define their resources differently. + // + // For Google APIs, the type format must be "{service}/{kind}", such as + // "pubsub.googleapis.com/Topic". + string type = 3; + + // The labels or tags on the resource, such as AWS resource tags and + // Kubernetes resource labels. + map labels = 4; + + // The unique identifier of the resource. UID is unique in the time + // and space for this resource within the scope of the service. It is + // typically generated by the server on successful creation of a resource + // and must not be changed. UID is used to uniquely identify resources + // with resource name reuses. This should be a UUID4. + string uid = 5; + + // Annotations is an unstructured key-value map stored with a resource that + // may be set by external tools to store and retrieve arbitrary metadata. + // They are not queryable and should be preserved when modifying objects. + // + // More info: https://kubernetes.io/docs/user-guide/annotations + map annotations = 6; + + // Mutable. The display name set by clients. Must be <= 63 characters. + string display_name = 7; + + // Output only. The timestamp when the resource was created. This may + // be either the time creation was initiated or when it was completed. + google.protobuf.Timestamp create_time = 8; + + // Output only. The timestamp when the resource was last updated. Any + // change to the resource made by users must refresh this value. + // Changes to a resource made by the service should refresh this value. + google.protobuf.Timestamp update_time = 9; + + // Output only. The timestamp when the resource was deleted. + // If the resource is not deleted, this must be empty. + google.protobuf.Timestamp delete_time = 10; + + // Output only. An opaque value that uniquely identifies a version or + // generation of a resource. It can be used to confirm that the client + // and server agree on the ordering of a resource being written. + string etag = 11; + + // Immutable. The location of the resource. The location encoding is + // specific to the service provider, and new encoding may be introduced + // as the service evolves. + // + // For Google Cloud products, the encoding is what is used by Google Cloud + // APIs, such as `us-east1`, `aws-us-east-1`, and `azure-eastus2`. The + // semantics of `location` is identical to the + // `cloud.googleapis.com/location` label used by some Google Cloud APIs. + string location = 12; + } + + // The origin of a network activity. In a multi hop network activity, + // the origin represents the sender of the first hop. For the first hop, + // the `source` and the `origin` must have the same content. + Peer origin = 7; + + // The source of a network activity, such as starting a TCP connection. + // In a multi hop network activity, the source represents the sender of the + // last hop. + Peer source = 1; + + // The destination of a network activity, such as accepting a TCP connection. + // In a multi hop network activity, the destination represents the receiver of + // the last hop. + Peer destination = 2; + + // Represents a network request, such as an HTTP request. + Request request = 3; + + // Represents a network response, such as an HTTP response. + Response response = 4; + + // Represents a target resource that is involved with a network activity. + // If multiple resources are involved with an activity, this must be the + // primary one. + Resource resource = 5; + + // Represents an API operation that is involved to a network activity. + Api api = 6; + + // Supports extensions for advanced use cases, such as logs and metrics. + repeated google.protobuf.Any extensions = 8; +} diff --git a/dist/protos/google/rpc/context/audit_context.proto b/dist/protos/google/rpc/context/audit_context.proto new file mode 100644 index 0000000..7b8b705 --- /dev/null +++ b/dist/protos/google/rpc/context/audit_context.proto @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc.context; + +import "google/protobuf/struct.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/rpc/context;context"; +option java_multiple_files = true; +option java_outer_classname = "AuditContextProto"; +option java_package = "com.google.rpc.context"; + +// `AuditContext` provides information that is needed for audit logging. +message AuditContext { + // Serialized audit log. + bytes audit_log = 1; + + // An API request message that is scrubbed based on the method annotation. + // This field should only be filled if audit_log field is present. + // Service Control will use this to assemble a complete log for Cloud Audit + // Logs and Google internal audit logs. + google.protobuf.Struct scrubbed_request = 2; + + // An API response message that is scrubbed based on the method annotation. + // This field should only be filled if audit_log field is present. + // Service Control will use this to assemble a complete log for Cloud Audit + // Logs and Google internal audit logs. + google.protobuf.Struct scrubbed_response = 3; + + // Number of scrubbed response items. + int32 scrubbed_response_item_count = 4; + + // Audit resource name which is scrubbed. + string target_resource = 5; +} diff --git a/dist/protos/google/rpc/error_details.proto b/dist/protos/google/rpc/error_details.proto new file mode 100644 index 0000000..c489e83 --- /dev/null +++ b/dist/protos/google/rpc/error_details.proto @@ -0,0 +1,285 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/rpc/errdetails;errdetails"; +option java_multiple_files = true; +option java_outer_classname = "ErrorDetailsProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// Describes the cause of the error with structured details. +// +// Example of an error when contacting the "pubsub.googleapis.com" API when it +// is not enabled: +// +// { "reason": "API_DISABLED" +// "domain": "googleapis.com" +// "metadata": { +// "resource": "projects/123", +// "service": "pubsub.googleapis.com" +// } +// } +// +// This response indicates that the pubsub.googleapis.com API is not enabled. +// +// Example of an error that is returned when attempting to create a Spanner +// instance in a region that is out of stock: +// +// { "reason": "STOCKOUT" +// "domain": "spanner.googleapis.com", +// "metadata": { +// "availableRegions": "us-central1,us-east2" +// } +// } +message ErrorInfo { + // The reason of the error. This is a constant value that identifies the + // proximate cause of the error. Error reasons are unique within a particular + // domain of errors. This should be at most 63 characters and match a + // regular expression of `[A-Z][A-Z0-9_]+[A-Z0-9]`, which represents + // UPPER_SNAKE_CASE. + string reason = 1; + + // The logical grouping to which the "reason" belongs. The error domain + // is typically the registered service name of the tool or product that + // generates the error. Example: "pubsub.googleapis.com". If the error is + // generated by some common infrastructure, the error domain must be a + // globally unique value that identifies the infrastructure. For Google API + // infrastructure, the error domain is "googleapis.com". + string domain = 2; + + // Additional structured details about this error. + // + // Keys should match /[a-zA-Z0-9-_]/ and be limited to 64 characters in + // length. When identifying the current value of an exceeded limit, the units + // should be contained in the key, not the value. For example, rather than + // {"instanceLimit": "100/request"}, should be returned as, + // {"instanceLimitPerRequest": "100"}, if the client exceeds the number of + // instances that can be created in a single (batch) request. + map metadata = 3; +} + +// Describes when the clients can retry a failed request. Clients could ignore +// the recommendation here or retry when this information is missing from error +// responses. +// +// It's always recommended that clients should use exponential backoff when +// retrying. +// +// Clients should wait until `retry_delay` amount of time has passed since +// receiving the error response before retrying. If retrying requests also +// fail, clients should use an exponential backoff scheme to gradually increase +// the delay between retries based on `retry_delay`, until either a maximum +// number of retries have been reached or a maximum retry delay cap has been +// reached. +message RetryInfo { + // Clients should wait at least this long between retrying the same request. + google.protobuf.Duration retry_delay = 1; +} + +// Describes additional debugging info. +message DebugInfo { + // The stack trace entries indicating where the error occurred. + repeated string stack_entries = 1; + + // Additional debugging information provided by the server. + string detail = 2; +} + +// Describes how a quota check failed. +// +// For example if a daily limit was exceeded for the calling project, +// a service could respond with a QuotaFailure detail containing the project +// id and the description of the quota limit that was exceeded. If the +// calling project hasn't enabled the service in the developer console, then +// a service could respond with the project id and set `service_disabled` +// to true. +// +// Also see RetryInfo and Help types for other details about handling a +// quota failure. +message QuotaFailure { + // A message type used to describe a single quota violation. For example, a + // daily quota or a custom quota that was exceeded. + message Violation { + // The subject on which the quota check failed. + // For example, "clientip:" or "project:". + string subject = 1; + + // A description of how the quota check failed. Clients can use this + // description to find more about the quota configuration in the service's + // public documentation, or find the relevant quota limit to adjust through + // developer console. + // + // For example: "Service disabled" or "Daily Limit for read operations + // exceeded". + string description = 2; + } + + // Describes all quota violations. + repeated Violation violations = 1; +} + +// Describes what preconditions have failed. +// +// For example, if an RPC failed because it required the Terms of Service to be +// acknowledged, it could list the terms of service violation in the +// PreconditionFailure message. +message PreconditionFailure { + // A message type used to describe a single precondition failure. + message Violation { + // The type of PreconditionFailure. We recommend using a service-specific + // enum type to define the supported precondition violation subjects. For + // example, "TOS" for "Terms of Service violation". + string type = 1; + + // The subject, relative to the type, that failed. + // For example, "google.com/cloud" relative to the "TOS" type would indicate + // which terms of service is being referenced. + string subject = 2; + + // A description of how the precondition failed. Developers can use this + // description to understand how to fix the failure. + // + // For example: "Terms of service not accepted". + string description = 3; + } + + // Describes all precondition violations. + repeated Violation violations = 1; +} + +// Describes violations in a client request. This error type focuses on the +// syntactic aspects of the request. +message BadRequest { + // A message type used to describe a single bad request field. + message FieldViolation { + // A path that leads to a field in the request body. The value will be a + // sequence of dot-separated identifiers that identify a protocol buffer + // field. + // + // Consider the following: + // + // message CreateContactRequest { + // message EmailAddress { + // enum Type { + // TYPE_UNSPECIFIED = 0; + // HOME = 1; + // WORK = 2; + // } + // + // optional string email = 1; + // repeated EmailType type = 2; + // } + // + // string full_name = 1; + // repeated EmailAddress email_addresses = 2; + // } + // + // In this example, in proto `field` could take one of the following values: + // + // * `full_name` for a violation in the `full_name` value + // * `email_addresses[1].email` for a violation in the `email` field of the + // first `email_addresses` message + // * `email_addresses[3].type[2]` for a violation in the second `type` + // value in the third `email_addresses` message. + // + // In JSON, the same values are represented as: + // + // * `fullName` for a violation in the `fullName` value + // * `emailAddresses[1].email` for a violation in the `email` field of the + // first `emailAddresses` message + // * `emailAddresses[3].type[2]` for a violation in the second `type` + // value in the third `emailAddresses` message. + string field = 1; + + // A description of why the request element is bad. + string description = 2; + } + + // Describes all violations in a client request. + repeated FieldViolation field_violations = 1; +} + +// Contains metadata about the request that clients can attach when filing a bug +// or providing other forms of feedback. +message RequestInfo { + // An opaque string that should only be interpreted by the service generating + // it. For example, it can be used to identify requests in the service's logs. + string request_id = 1; + + // Any data that was used to serve this request. For example, an encrypted + // stack trace that can be sent back to the service provider for debugging. + string serving_data = 2; +} + +// Describes the resource that is being accessed. +message ResourceInfo { + // A name for the type of resource being accessed, e.g. "sql table", + // "cloud storage bucket", "file", "Google calendar"; or the type URL + // of the resource: e.g. "type.googleapis.com/google.pubsub.v1.Topic". + string resource_type = 1; + + // The name of the resource being accessed. For example, a shared calendar + // name: "example.com_4fghdhgsrgh@group.calendar.google.com", if the current + // error is + // [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED]. + string resource_name = 2; + + // The owner of the resource (optional). + // For example, "user:" or "project:". + string owner = 3; + + // Describes what error is encountered when accessing this resource. + // For example, updating a cloud project may require the `writer` permission + // on the developer console project. + string description = 4; +} + +// Provides links to documentation or for performing an out of band action. +// +// For example, if a quota check failed with an error indicating the calling +// project hasn't enabled the accessed service, this can contain a URL pointing +// directly to the right place in the developer console to flip the bit. +message Help { + // Describes a URL link. + message Link { + // Describes what the link offers. + string description = 1; + + // The URL of the link. + string url = 2; + } + + // URL(s) pointing to additional information on handling the current error. + repeated Link links = 1; +} + +// Provides a localized error message that is safe to return to the user +// which can be attached to an RPC error. +message LocalizedMessage { + // The locale used following the specification defined at + // https://www.rfc-editor.org/rfc/bcp/bcp47.txt. + // Examples are: "en-US", "fr-CH", "es-MX" + string locale = 1; + + // The localized error message in the above locale. + string message = 2; +} diff --git a/dist/protos/google/rpc/http.proto b/dist/protos/google/rpc/http.proto new file mode 100644 index 0000000..299a71f --- /dev/null +++ b/dist/protos/google/rpc/http.proto @@ -0,0 +1,64 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +option go_package = "google.golang.org/genproto/googleapis/rpc/http;http"; +option java_multiple_files = true; +option java_outer_classname = "HttpProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// Represents an HTTP request. +message HttpRequest { + // The HTTP request method. + string method = 1; + + // The HTTP request URI. + string uri = 2; + + // The HTTP request headers. The ordering of the headers is significant. + // Multiple headers with the same key may present for the request. + repeated HttpHeader headers = 3; + + // The HTTP request body. If the body is not expected, it should be empty. + bytes body = 4; +} + +// Represents an HTTP response. +message HttpResponse { + // The HTTP status code, such as 200 or 404. + int32 status = 1; + + // The HTTP reason phrase, such as "OK" or "Not Found". + string reason = 2; + + // The HTTP response headers. The ordering of the headers is significant. + // Multiple headers with the same key may present for the response. + repeated HttpHeader headers = 3; + + // The HTTP response body. If the body is not expected, it should be empty. + bytes body = 4; +} + +// Represents an HTTP header. +message HttpHeader { + // The HTTP header key. It is case insensitive. + string key = 1; + + // The HTTP header value. + string value = 2; +} diff --git a/dist/protos/google/rpc/status.proto b/dist/protos/google/rpc/status.proto new file mode 100644 index 0000000..923e169 --- /dev/null +++ b/dist/protos/google/rpc/status.proto @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +import "google/protobuf/any.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/rpc/status;status"; +option java_multiple_files = true; +option java_outer_classname = "StatusProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// The `Status` type defines a logical error model that is suitable for +// different programming environments, including REST APIs and RPC APIs. It is +// used by [gRPC](https://github.com/grpc). Each `Status` message contains +// three pieces of data: error code, error message, and error details. +// +// You can find out more about this error model and how to work with it in the +// [API Design Guide](https://cloud.google.com/apis/design/errors). +message Status { + // The status code, which should be an enum value of + // [google.rpc.Code][google.rpc.Code]. + int32 code = 1; + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized + // by the client. + string message = 2; + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + repeated google.protobuf.Any details = 3; +} diff --git a/dist/protos/google/type/calendar_period.proto b/dist/protos/google/type/calendar_period.proto new file mode 100644 index 0000000..82f5690 --- /dev/null +++ b/dist/protos/google/type/calendar_period.proto @@ -0,0 +1,56 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/calendarperiod;calendarperiod"; +option java_multiple_files = true; +option java_outer_classname = "CalendarPeriodProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// A `CalendarPeriod` represents the abstract concept of a time period that has +// a canonical start. Grammatically, "the start of the current +// `CalendarPeriod`." All calendar times begin at midnight UTC. +enum CalendarPeriod { + // Undefined period, raises an error. + CALENDAR_PERIOD_UNSPECIFIED = 0; + + // A day. + DAY = 1; + + // A week. Weeks begin on Monday, following + // [ISO 8601](https://en.wikipedia.org/wiki/ISO_week_date). + WEEK = 2; + + // A fortnight. The first calendar fortnight of the year begins at the start + // of week 1 according to + // [ISO 8601](https://en.wikipedia.org/wiki/ISO_week_date). + FORTNIGHT = 3; + + // A month. + MONTH = 4; + + // A quarter. Quarters start on dates 1-Jan, 1-Apr, 1-Jul, and 1-Oct of each + // year. + QUARTER = 5; + + // A half-year. Half-years start on dates 1-Jan and 1-Jul. + HALF = 6; + + // A year. + YEAR = 7; +} diff --git a/dist/protos/google/type/color.proto b/dist/protos/google/type/color.proto new file mode 100644 index 0000000..5dc85a6 --- /dev/null +++ b/dist/protos/google/type/color.proto @@ -0,0 +1,174 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +import "google/protobuf/wrappers.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/color;color"; +option java_multiple_files = true; +option java_outer_classname = "ColorProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a color in the RGBA color space. This representation is designed +// for simplicity of conversion to/from color representations in various +// languages over compactness. For example, the fields of this representation +// can be trivially provided to the constructor of `java.awt.Color` in Java; it +// can also be trivially provided to UIColor's `+colorWithRed:green:blue:alpha` +// method in iOS; and, with just a little work, it can be easily formatted into +// a CSS `rgba()` string in JavaScript. +// +// This reference page doesn't carry information about the absolute color +// space +// that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, +// DCI-P3, BT.2020, etc.). By default, applications should assume the sRGB color +// space. +// +// When color equality needs to be decided, implementations, unless +// documented otherwise, treat two colors as equal if all their red, +// green, blue, and alpha values each differ by at most 1e-5. +// +// Example (Java): +// +// import com.google.type.Color; +// +// // ... +// public static java.awt.Color fromProto(Color protocolor) { +// float alpha = protocolor.hasAlpha() +// ? protocolor.getAlpha().getValue() +// : 1.0; +// +// return new java.awt.Color( +// protocolor.getRed(), +// protocolor.getGreen(), +// protocolor.getBlue(), +// alpha); +// } +// +// public static Color toProto(java.awt.Color color) { +// float red = (float) color.getRed(); +// float green = (float) color.getGreen(); +// float blue = (float) color.getBlue(); +// float denominator = 255.0; +// Color.Builder resultBuilder = +// Color +// .newBuilder() +// .setRed(red / denominator) +// .setGreen(green / denominator) +// .setBlue(blue / denominator); +// int alpha = color.getAlpha(); +// if (alpha != 255) { +// result.setAlpha( +// FloatValue +// .newBuilder() +// .setValue(((float) alpha) / denominator) +// .build()); +// } +// return resultBuilder.build(); +// } +// // ... +// +// Example (iOS / Obj-C): +// +// // ... +// static UIColor* fromProto(Color* protocolor) { +// float red = [protocolor red]; +// float green = [protocolor green]; +// float blue = [protocolor blue]; +// FloatValue* alpha_wrapper = [protocolor alpha]; +// float alpha = 1.0; +// if (alpha_wrapper != nil) { +// alpha = [alpha_wrapper value]; +// } +// return [UIColor colorWithRed:red green:green blue:blue alpha:alpha]; +// } +// +// static Color* toProto(UIColor* color) { +// CGFloat red, green, blue, alpha; +// if (![color getRed:&red green:&green blue:&blue alpha:&alpha]) { +// return nil; +// } +// Color* result = [[Color alloc] init]; +// [result setRed:red]; +// [result setGreen:green]; +// [result setBlue:blue]; +// if (alpha <= 0.9999) { +// [result setAlpha:floatWrapperWithValue(alpha)]; +// } +// [result autorelease]; +// return result; +// } +// // ... +// +// Example (JavaScript): +// +// // ... +// +// var protoToCssColor = function(rgb_color) { +// var redFrac = rgb_color.red || 0.0; +// var greenFrac = rgb_color.green || 0.0; +// var blueFrac = rgb_color.blue || 0.0; +// var red = Math.floor(redFrac * 255); +// var green = Math.floor(greenFrac * 255); +// var blue = Math.floor(blueFrac * 255); +// +// if (!('alpha' in rgb_color)) { +// return rgbToCssColor(red, green, blue); +// } +// +// var alphaFrac = rgb_color.alpha.value || 0.0; +// var rgbParams = [red, green, blue].join(','); +// return ['rgba(', rgbParams, ',', alphaFrac, ')'].join(''); +// }; +// +// var rgbToCssColor = function(red, green, blue) { +// var rgbNumber = new Number((red << 16) | (green << 8) | blue); +// var hexString = rgbNumber.toString(16); +// var missingZeros = 6 - hexString.length; +// var resultBuilder = ['#']; +// for (var i = 0; i < missingZeros; i++) { +// resultBuilder.push('0'); +// } +// resultBuilder.push(hexString); +// return resultBuilder.join(''); +// }; +// +// // ... +message Color { + // The amount of red in the color as a value in the interval [0, 1]. + float red = 1; + + // The amount of green in the color as a value in the interval [0, 1]. + float green = 2; + + // The amount of blue in the color as a value in the interval [0, 1]. + float blue = 3; + + // The fraction of this color that should be applied to the pixel. That is, + // the final pixel color is defined by the equation: + // + // `pixel color = alpha * (this color) + (1.0 - alpha) * (background color)` + // + // This means that a value of 1.0 corresponds to a solid color, whereas + // a value of 0.0 corresponds to a completely transparent color. This + // uses a wrapper message rather than a simple float scalar so that it is + // possible to distinguish between a default value and the value being unset. + // If omitted, this color object is rendered as a solid color + // (as if the alpha value had been explicitly given a value of 1.0). + google.protobuf.FloatValue alpha = 4; +} diff --git a/dist/protos/google/type/date.proto b/dist/protos/google/type/date.proto new file mode 100644 index 0000000..e4e730e --- /dev/null +++ b/dist/protos/google/type/date.proto @@ -0,0 +1,52 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/date;date"; +option java_multiple_files = true; +option java_outer_classname = "DateProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a whole or partial calendar date, such as a birthday. The time of +// day and time zone are either specified elsewhere or are insignificant. The +// date is relative to the Gregorian Calendar. This can represent one of the +// following: +// +// * A full date, with non-zero year, month, and day values +// * A month and day value, with a zero year, such as an anniversary +// * A year on its own, with zero month and day values +// * A year and month value, with a zero day, such as a credit card expiration +// date +// +// Related types are [google.type.TimeOfDay][google.type.TimeOfDay] and +// `google.protobuf.Timestamp`. +message Date { + // Year of the date. Must be from 1 to 9999, or 0 to specify a date without + // a year. + int32 year = 1; + + // Month of a year. Must be from 1 to 12, or 0 to specify a year without a + // month and day. + int32 month = 2; + + // Day of a month. Must be from 1 to 31 and valid for the year and month, or 0 + // to specify a year by itself or a year and month where the day isn't + // significant. + int32 day = 3; +} diff --git a/dist/protos/google/type/datetime.proto b/dist/protos/google/type/datetime.proto new file mode 100644 index 0000000..cfed85d --- /dev/null +++ b/dist/protos/google/type/datetime.proto @@ -0,0 +1,104 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +import "google/protobuf/duration.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/datetime;datetime"; +option java_multiple_files = true; +option java_outer_classname = "DateTimeProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents civil time (or occasionally physical time). +// +// This type can represent a civil time in one of a few possible ways: +// +// * When utc_offset is set and time_zone is unset: a civil time on a calendar +// day with a particular offset from UTC. +// * When time_zone is set and utc_offset is unset: a civil time on a calendar +// day in a particular time zone. +// * When neither time_zone nor utc_offset is set: a civil time on a calendar +// day in local time. +// +// The date is relative to the Proleptic Gregorian Calendar. +// +// If year is 0, the DateTime is considered not to have a specific year. month +// and day must have valid, non-zero values. +// +// This type may also be used to represent a physical time if all the date and +// time fields are set and either case of the `time_offset` oneof is set. +// Consider using `Timestamp` message for physical time instead. If your use +// case also would like to store the user's timezone, that can be done in +// another field. +// +// This type is more flexible than some applications may want. Make sure to +// document and validate your application's limitations. +message DateTime { + // Optional. Year of date. Must be from 1 to 9999, or 0 if specifying a + // datetime without a year. + int32 year = 1; + + // Required. Month of year. Must be from 1 to 12. + int32 month = 2; + + // Required. Day of month. Must be from 1 to 31 and valid for the year and + // month. + int32 day = 3; + + // Required. Hours of day in 24 hour format. Should be from 0 to 23. An API + // may choose to allow the value "24:00:00" for scenarios like business + // closing time. + int32 hours = 4; + + // Required. Minutes of hour of day. Must be from 0 to 59. + int32 minutes = 5; + + // Required. Seconds of minutes of the time. Must normally be from 0 to 59. An + // API may allow the value 60 if it allows leap-seconds. + int32 seconds = 6; + + // Required. Fractions of seconds in nanoseconds. Must be from 0 to + // 999,999,999. + int32 nanos = 7; + + // Optional. Specifies either the UTC offset or the time zone of the DateTime. + // Choose carefully between them, considering that time zone data may change + // in the future (for example, a country modifies their DST start/end dates, + // and future DateTimes in the affected range had already been stored). + // If omitted, the DateTime is considered to be in local time. + oneof time_offset { + // UTC offset. Must be whole seconds, between -18 hours and +18 hours. + // For example, a UTC offset of -4:00 would be represented as + // { seconds: -14400 }. + google.protobuf.Duration utc_offset = 8; + + // Time zone. + TimeZone time_zone = 9; + } +} + +// Represents a time zone from the +// [IANA Time Zone Database](https://www.iana.org/time-zones). +message TimeZone { + // IANA Time Zone Database time zone, e.g. "America/New_York". + string id = 1; + + // Optional. IANA Time Zone Database version number, e.g. "2019a". + string version = 2; +} diff --git a/dist/protos/google/type/dayofweek.proto b/dist/protos/google/type/dayofweek.proto new file mode 100644 index 0000000..4c80c62 --- /dev/null +++ b/dist/protos/google/type/dayofweek.proto @@ -0,0 +1,50 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/dayofweek;dayofweek"; +option java_multiple_files = true; +option java_outer_classname = "DayOfWeekProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a day of the week. +enum DayOfWeek { + // The day of the week is unspecified. + DAY_OF_WEEK_UNSPECIFIED = 0; + + // Monday + MONDAY = 1; + + // Tuesday + TUESDAY = 2; + + // Wednesday + WEDNESDAY = 3; + + // Thursday + THURSDAY = 4; + + // Friday + FRIDAY = 5; + + // Saturday + SATURDAY = 6; + + // Sunday + SUNDAY = 7; +} diff --git a/dist/protos/google/type/decimal.proto b/dist/protos/google/type/decimal.proto new file mode 100644 index 0000000..beb18a5 --- /dev/null +++ b/dist/protos/google/type/decimal.proto @@ -0,0 +1,95 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/decimal;decimal"; +option java_multiple_files = true; +option java_outer_classname = "DecimalProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// A representation of a decimal value, such as 2.5. Clients may convert values +// into language-native decimal formats, such as Java's [BigDecimal][] or +// Python's [decimal.Decimal][]. +// +// [BigDecimal]: +// https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/math/BigDecimal.html +// [decimal.Decimal]: https://docs.python.org/3/library/decimal.html +message Decimal { + // The decimal value, as a string. + // + // The string representation consists of an optional sign, `+` (`U+002B`) + // or `-` (`U+002D`), followed by a sequence of zero or more decimal digits + // ("the integer"), optionally followed by a fraction, optionally followed + // by an exponent. + // + // The fraction consists of a decimal point followed by zero or more decimal + // digits. The string must contain at least one digit in either the integer + // or the fraction. The number formed by the sign, the integer and the + // fraction is referred to as the significand. + // + // The exponent consists of the character `e` (`U+0065`) or `E` (`U+0045`) + // followed by one or more decimal digits. + // + // Services **should** normalize decimal values before storing them by: + // + // - Removing an explicitly-provided `+` sign (`+2.5` -> `2.5`). + // - Replacing a zero-length integer value with `0` (`.5` -> `0.5`). + // - Coercing the exponent character to lower-case (`2.5E8` -> `2.5e8`). + // - Removing an explicitly-provided zero exponent (`2.5e0` -> `2.5`). + // + // Services **may** perform additional normalization based on its own needs + // and the internal decimal implementation selected, such as shifting the + // decimal point and exponent value together (example: `2.5e-1` <-> `0.25`). + // Additionally, services **may** preserve trailing zeroes in the fraction + // to indicate increased precision, but are not required to do so. + // + // Note that only the `.` character is supported to divide the integer + // and the fraction; `,` **should not** be supported regardless of locale. + // Additionally, thousand separators **should not** be supported. If a + // service does support them, values **must** be normalized. + // + // The ENBF grammar is: + // + // DecimalString = + // [Sign] Significand [Exponent]; + // + // Sign = '+' | '-'; + // + // Significand = + // Digits ['.'] [Digits] | [Digits] '.' Digits; + // + // Exponent = ('e' | 'E') [Sign] Digits; + // + // Digits = { '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' }; + // + // Services **should** clearly document the range of supported values, the + // maximum supported precision (total number of digits), and, if applicable, + // the scale (number of digits after the decimal point), as well as how it + // behaves when receiving out-of-bounds values. + // + // Services **may** choose to accept values passed as input even when the + // value has a higher precision or scale than the service supports, and + // **should** round the value to fit the supported scale. Alternatively, the + // service **may** error with `400 Bad Request` (`INVALID_ARGUMENT` in gRPC) + // if precision would be lost. + // + // Services **should** error with `400 Bad Request` (`INVALID_ARGUMENT` in + // gRPC) if the service receives a value outside of the supported range. + string value = 1; +} diff --git a/dist/protos/google/type/expr.proto b/dist/protos/google/type/expr.proto new file mode 100644 index 0000000..af0778c --- /dev/null +++ b/dist/protos/google/type/expr.proto @@ -0,0 +1,73 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/expr;expr"; +option java_multiple_files = true; +option java_outer_classname = "ExprProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a textual expression in the Common Expression Language (CEL) +// syntax. CEL is a C-like expression language. The syntax and semantics of CEL +// are documented at https://github.com/google/cel-spec. +// +// Example (Comparison): +// +// title: "Summary size limit" +// description: "Determines if a summary is less than 100 chars" +// expression: "document.summary.size() < 100" +// +// Example (Equality): +// +// title: "Requestor is owner" +// description: "Determines if requestor is the document owner" +// expression: "document.owner == request.auth.claims.email" +// +// Example (Logic): +// +// title: "Public documents" +// description: "Determine whether the document should be publicly visible" +// expression: "document.type != 'private' && document.type != 'internal'" +// +// Example (Data Manipulation): +// +// title: "Notification string" +// description: "Create a notification string with a timestamp." +// expression: "'New message received at ' + string(document.create_time)" +// +// The exact variables and functions that may be referenced within an expression +// are determined by the service that evaluates it. See the service +// documentation for additional information. +message Expr { + // Textual representation of an expression in Common Expression Language + // syntax. + string expression = 1; + + // Optional. Title for the expression, i.e. a short string describing + // its purpose. This can be used e.g. in UIs which allow to enter the + // expression. + string title = 2; + + // Optional. Description of the expression. This is a longer text which + // describes the expression, e.g. when hovered over it in a UI. + string description = 3; + + // Optional. String indicating the location of the expression for error + // reporting, e.g. a file name and a position in the file. + string location = 4; +} diff --git a/dist/protos/google/type/fraction.proto b/dist/protos/google/type/fraction.proto new file mode 100644 index 0000000..6c5ae6e --- /dev/null +++ b/dist/protos/google/type/fraction.proto @@ -0,0 +1,33 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/fraction;fraction"; +option java_multiple_files = true; +option java_outer_classname = "FractionProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a fraction in terms of a numerator divided by a denominator. +message Fraction { + // The numerator in the fraction, e.g. 2 in 2/3. + int64 numerator = 1; + + // The value by which the numerator is divided, e.g. 3 in 2/3. Must be + // positive. + int64 denominator = 2; +} diff --git a/dist/protos/google/type/interval.proto b/dist/protos/google/type/interval.proto new file mode 100644 index 0000000..9702324 --- /dev/null +++ b/dist/protos/google/type/interval.proto @@ -0,0 +1,46 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/interval;interval"; +option java_multiple_files = true; +option java_outer_classname = "IntervalProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a time interval, encoded as a Timestamp start (inclusive) and a +// Timestamp end (exclusive). +// +// The start must be less than or equal to the end. +// When the start equals the end, the interval is empty (matches no time). +// When both start and end are unspecified, the interval matches any time. +message Interval { + // Optional. Inclusive start of the interval. + // + // If specified, a Timestamp matching this interval will have to be the same + // or after the start. + google.protobuf.Timestamp start_time = 1; + + // Optional. Exclusive end of the interval. + // + // If specified, a Timestamp matching this interval will have to be before the + // end. + google.protobuf.Timestamp end_time = 2; +} diff --git a/dist/protos/google/type/latlng.proto b/dist/protos/google/type/latlng.proto new file mode 100644 index 0000000..9231456 --- /dev/null +++ b/dist/protos/google/type/latlng.proto @@ -0,0 +1,37 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/latlng;latlng"; +option java_multiple_files = true; +option java_outer_classname = "LatLngProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// An object that represents a latitude/longitude pair. This is expressed as a +// pair of doubles to represent degrees latitude and degrees longitude. Unless +// specified otherwise, this must conform to the +// WGS84 +// standard. Values must be within normalized ranges. +message LatLng { + // The latitude in degrees. It must be in the range [-90.0, +90.0]. + double latitude = 1; + + // The longitude in degrees. It must be in the range [-180.0, +180.0]. + double longitude = 2; +} diff --git a/dist/protos/google/type/localized_text.proto b/dist/protos/google/type/localized_text.proto new file mode 100644 index 0000000..5c6922b --- /dev/null +++ b/dist/protos/google/type/localized_text.proto @@ -0,0 +1,36 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/localized_text;localized_text"; +option java_multiple_files = true; +option java_outer_classname = "LocalizedTextProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Localized variant of a text in a particular language. +message LocalizedText { + // Localized string in the language corresponding to `language_code' below. + string text = 1; + + // The text's BCP-47 language code, such as "en-US" or "sr-Latn". + // + // For more information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + string language_code = 2; +} diff --git a/dist/protos/google/type/money.proto b/dist/protos/google/type/money.proto new file mode 100644 index 0000000..98d6494 --- /dev/null +++ b/dist/protos/google/type/money.proto @@ -0,0 +1,42 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/money;money"; +option java_multiple_files = true; +option java_outer_classname = "MoneyProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents an amount of money with its currency type. +message Money { + // The three-letter currency code defined in ISO 4217. + string currency_code = 1; + + // The whole units of the amount. + // For example if `currencyCode` is `"USD"`, then 1 unit is one US dollar. + int64 units = 2; + + // Number of nano (10^-9) units of the amount. + // The value must be between -999,999,999 and +999,999,999 inclusive. + // If `units` is positive, `nanos` must be positive or zero. + // If `units` is zero, `nanos` can be positive, zero, or negative. + // If `units` is negative, `nanos` must be negative or zero. + // For example $-1.75 is represented as `units`=-1 and `nanos`=-750,000,000. + int32 nanos = 3; +} diff --git a/dist/protos/google/type/month.proto b/dist/protos/google/type/month.proto new file mode 100644 index 0000000..99e7551 --- /dev/null +++ b/dist/protos/google/type/month.proto @@ -0,0 +1,65 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/month;month"; +option java_multiple_files = true; +option java_outer_classname = "MonthProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a month in the Gregorian calendar. +enum Month { + // The unspecified month. + MONTH_UNSPECIFIED = 0; + + // The month of January. + JANUARY = 1; + + // The month of February. + FEBRUARY = 2; + + // The month of March. + MARCH = 3; + + // The month of April. + APRIL = 4; + + // The month of May. + MAY = 5; + + // The month of June. + JUNE = 6; + + // The month of July. + JULY = 7; + + // The month of August. + AUGUST = 8; + + // The month of September. + SEPTEMBER = 9; + + // The month of October. + OCTOBER = 10; + + // The month of November. + NOVEMBER = 11; + + // The month of December. + DECEMBER = 12; +} diff --git a/dist/protos/google/type/phone_number.proto b/dist/protos/google/type/phone_number.proto new file mode 100644 index 0000000..7bbb7d8 --- /dev/null +++ b/dist/protos/google/type/phone_number.proto @@ -0,0 +1,113 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/phone_number;phone_number"; +option java_multiple_files = true; +option java_outer_classname = "PhoneNumberProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// An object representing a phone number, suitable as an API wire format. +// +// This representation: +// +// - should not be used for locale-specific formatting of a phone number, such +// as "+1 (650) 253-0000 ext. 123" +// +// - is not designed for efficient storage +// - may not be suitable for dialing - specialized libraries (see references) +// should be used to parse the number for that purpose +// +// To do something meaningful with this number, such as format it for various +// use-cases, convert it to an `i18n.phonenumbers.PhoneNumber` object first. +// +// For instance, in Java this would be: +// +// com.google.type.PhoneNumber wireProto = +// com.google.type.PhoneNumber.newBuilder().build(); +// com.google.i18n.phonenumbers.Phonenumber.PhoneNumber phoneNumber = +// PhoneNumberUtil.getInstance().parse(wireProto.getE164Number(), "ZZ"); +// if (!wireProto.getExtension().isEmpty()) { +// phoneNumber.setExtension(wireProto.getExtension()); +// } +// +// Reference(s): +// - https://github.com/google/libphonenumber +message PhoneNumber { + // An object representing a short code, which is a phone number that is + // typically much shorter than regular phone numbers and can be used to + // address messages in MMS and SMS systems, as well as for abbreviated dialing + // (e.g. "Text 611 to see how many minutes you have remaining on your plan."). + // + // Short codes are restricted to a region and are not internationally + // dialable, which means the same short code can exist in different regions, + // with different usage and pricing, even if those regions share the same + // country calling code (e.g. US and CA). + message ShortCode { + // Required. The BCP-47 region code of the location where calls to this + // short code can be made, such as "US" and "BB". + // + // Reference(s): + // - http://www.unicode.org/reports/tr35/#unicode_region_subtag + string region_code = 1; + + // Required. The short code digits, without a leading plus ('+') or country + // calling code, e.g. "611". + string number = 2; + } + + // Required. Either a regular number, or a short code. New fields may be + // added to the oneof below in the future, so clients should ignore phone + // numbers for which none of the fields they coded against are set. + oneof kind { + // The phone number, represented as a leading plus sign ('+'), followed by a + // phone number that uses a relaxed ITU E.164 format consisting of the + // country calling code (1 to 3 digits) and the subscriber number, with no + // additional spaces or formatting, e.g.: + // - correct: "+15552220123" + // - incorrect: "+1 (555) 222-01234 x123". + // + // The ITU E.164 format limits the latter to 12 digits, but in practice not + // all countries respect that, so we relax that restriction here. + // National-only numbers are not allowed. + // + // References: + // - https://www.itu.int/rec/T-REC-E.164-201011-I + // - https://en.wikipedia.org/wiki/E.164. + // - https://en.wikipedia.org/wiki/List_of_country_calling_codes + string e164_number = 1; + + // A short code. + // + // Reference(s): + // - https://en.wikipedia.org/wiki/Short_code + ShortCode short_code = 2; + } + + // The phone number's extension. The extension is not standardized in ITU + // recommendations, except for being defined as a series of numbers with a + // maximum length of 40 digits. Other than digits, some other dialing + // characters such as ',' (indicating a wait) or '#' may be stored here. + // + // Note that no regions currently use extensions with short codes, so this + // field is normally only set in conjunction with an E.164 number. It is held + // separately from the E.164 number to allow for short code extensions in the + // future. + string extension = 3; +} diff --git a/dist/protos/google/type/postal_address.proto b/dist/protos/google/type/postal_address.proto new file mode 100644 index 0000000..c57c7c3 --- /dev/null +++ b/dist/protos/google/type/postal_address.proto @@ -0,0 +1,134 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/postaladdress;postaladdress"; +option java_multiple_files = true; +option java_outer_classname = "PostalAddressProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a postal address, e.g. for postal delivery or payments addresses. +// Given a postal address, a postal service can deliver items to a premise, P.O. +// Box or similar. +// It is not intended to model geographical locations (roads, towns, +// mountains). +// +// In typical usage an address would be created via user input or from importing +// existing data, depending on the type of process. +// +// Advice on address input / editing: +// - Use an i18n-ready address widget such as +// https://github.com/google/libaddressinput) +// - Users should not be presented with UI elements for input or editing of +// fields outside countries where that field is used. +// +// For more guidance on how to use this schema, please see: +// https://support.google.com/business/answer/6397478 +message PostalAddress { + // The schema revision of the `PostalAddress`. This must be set to 0, which is + // the latest revision. + // + // All new revisions **must** be backward compatible with old revisions. + int32 revision = 1; + + // Required. CLDR region code of the country/region of the address. This + // is never inferred and it is up to the user to ensure the value is + // correct. See http://cldr.unicode.org/ and + // http://www.unicode.org/cldr/charts/30/supplemental/territory_information.html + // for details. Example: "CH" for Switzerland. + string region_code = 2; + + // Optional. BCP-47 language code of the contents of this address (if + // known). This is often the UI language of the input form or is expected + // to match one of the languages used in the address' country/region, or their + // transliterated equivalents. + // This can affect formatting in certain countries, but is not critical + // to the correctness of the data and will never affect any validation or + // other non-formatting related operations. + // + // If this value is not known, it should be omitted (rather than specifying a + // possibly incorrect default). + // + // Examples: "zh-Hant", "ja", "ja-Latn", "en". + string language_code = 3; + + // Optional. Postal code of the address. Not all countries use or require + // postal codes to be present, but where they are used, they may trigger + // additional validation with other parts of the address (e.g. state/zip + // validation in the U.S.A.). + string postal_code = 4; + + // Optional. Additional, country-specific, sorting code. This is not used + // in most regions. Where it is used, the value is either a string like + // "CEDEX", optionally followed by a number (e.g. "CEDEX 7"), or just a number + // alone, representing the "sector code" (Jamaica), "delivery area indicator" + // (Malawi) or "post office indicator" (e.g. Côte d'Ivoire). + string sorting_code = 5; + + // Optional. Highest administrative subdivision which is used for postal + // addresses of a country or region. + // For example, this can be a state, a province, an oblast, or a prefecture. + // Specifically, for Spain this is the province and not the autonomous + // community (e.g. "Barcelona" and not "Catalonia"). + // Many countries don't use an administrative area in postal addresses. E.g. + // in Switzerland this should be left unpopulated. + string administrative_area = 6; + + // Optional. Generally refers to the city/town portion of the address. + // Examples: US city, IT comune, UK post town. + // In regions of the world where localities are not well defined or do not fit + // into this structure well, leave locality empty and use address_lines. + string locality = 7; + + // Optional. Sublocality of the address. + // For example, this can be neighborhoods, boroughs, districts. + string sublocality = 8; + + // Unstructured address lines describing the lower levels of an address. + // + // Because values in address_lines do not have type information and may + // sometimes contain multiple values in a single field (e.g. + // "Austin, TX"), it is important that the line order is clear. The order of + // address lines should be "envelope order" for the country/region of the + // address. In places where this can vary (e.g. Japan), address_language is + // used to make it explicit (e.g. "ja" for large-to-small ordering and + // "ja-Latn" or "en" for small-to-large). This way, the most specific line of + // an address can be selected based on the language. + // + // The minimum permitted structural representation of an address consists + // of a region_code with all remaining information placed in the + // address_lines. It would be possible to format such an address very + // approximately without geocoding, but no semantic reasoning could be + // made about any of the address components until it was at least + // partially resolved. + // + // Creating an address only containing a region_code and address_lines, and + // then geocoding is the recommended way to handle completely unstructured + // addresses (as opposed to guessing which parts of the address should be + // localities or administrative areas). + repeated string address_lines = 9; + + // Optional. The recipient at the address. + // This field may, under certain circumstances, contain multiline information. + // For example, it might contain "care of" information. + repeated string recipients = 10; + + // Optional. The name of the organization at the address. + string organization = 11; +} diff --git a/dist/protos/google/type/quaternion.proto b/dist/protos/google/type/quaternion.proto new file mode 100644 index 0000000..dfb822d --- /dev/null +++ b/dist/protos/google/type/quaternion.proto @@ -0,0 +1,94 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/quaternion;quaternion"; +option java_multiple_files = true; +option java_outer_classname = "QuaternionProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// A quaternion is defined as the quotient of two directed lines in a +// three-dimensional space or equivalently as the quotient of two Euclidean +// vectors (https://en.wikipedia.org/wiki/Quaternion). +// +// Quaternions are often used in calculations involving three-dimensional +// rotations (https://en.wikipedia.org/wiki/Quaternions_and_spatial_rotation), +// as they provide greater mathematical robustness by avoiding the gimbal lock +// problems that can be encountered when using Euler angles +// (https://en.wikipedia.org/wiki/Gimbal_lock). +// +// Quaternions are generally represented in this form: +// +// w + xi + yj + zk +// +// where x, y, z, and w are real numbers, and i, j, and k are three imaginary +// numbers. +// +// Our naming choice `(x, y, z, w)` comes from the desire to avoid confusion for +// those interested in the geometric properties of the quaternion in the 3D +// Cartesian space. Other texts often use alternative names or subscripts, such +// as `(a, b, c, d)`, `(1, i, j, k)`, or `(0, 1, 2, 3)`, which are perhaps +// better suited for mathematical interpretations. +// +// To avoid any confusion, as well as to maintain compatibility with a large +// number of software libraries, the quaternions represented using the protocol +// buffer below *must* follow the Hamilton convention, which defines `ij = k` +// (i.e. a right-handed algebra), and therefore: +// +// i^2 = j^2 = k^2 = ijk = −1 +// ij = −ji = k +// jk = −kj = i +// ki = −ik = j +// +// Please DO NOT use this to represent quaternions that follow the JPL +// convention, or any of the other quaternion flavors out there. +// +// Definitions: +// +// - Quaternion norm (or magnitude): `sqrt(x^2 + y^2 + z^2 + w^2)`. +// - Unit (or normalized) quaternion: a quaternion whose norm is 1. +// - Pure quaternion: a quaternion whose scalar component (`w`) is 0. +// - Rotation quaternion: a unit quaternion used to represent rotation. +// - Orientation quaternion: a unit quaternion used to represent orientation. +// +// A quaternion can be normalized by dividing it by its norm. The resulting +// quaternion maintains the same direction, but has a norm of 1, i.e. it moves +// on the unit sphere. This is generally necessary for rotation and orientation +// quaternions, to avoid rounding errors: +// https://en.wikipedia.org/wiki/Rotation_formalisms_in_three_dimensions +// +// Note that `(x, y, z, w)` and `(-x, -y, -z, -w)` represent the same rotation, +// but normalization would be even more useful, e.g. for comparison purposes, if +// it would produce a unique representation. It is thus recommended that `w` be +// kept positive, which can be achieved by changing all the signs when `w` is +// negative. +// +message Quaternion { + // The x component. + double x = 1; + + // The y component. + double y = 2; + + // The z component. + double z = 3; + + // The scalar component. + double w = 4; +} diff --git a/dist/protos/google/type/timeofday.proto b/dist/protos/google/type/timeofday.proto new file mode 100644 index 0000000..5cb48aa --- /dev/null +++ b/dist/protos/google/type/timeofday.proto @@ -0,0 +1,44 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/timeofday;timeofday"; +option java_multiple_files = true; +option java_outer_classname = "TimeOfDayProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a time of day. The date and time zone are either not significant +// or are specified elsewhere. An API may choose to allow leap seconds. Related +// types are [google.type.Date][google.type.Date] and +// `google.protobuf.Timestamp`. +message TimeOfDay { + // Hours of day in 24 hour format. Should be from 0 to 23. An API may choose + // to allow the value "24:00:00" for scenarios like business closing time. + int32 hours = 1; + + // Minutes of hour of day. Must be from 0 to 59. + int32 minutes = 2; + + // Seconds of minutes of the time. Must normally be from 0 to 59. An API may + // allow the value 60 if it allows leap-seconds. + int32 seconds = 3; + + // Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. + int32 nanos = 4; +} diff --git a/dist/protos/http.d.ts b/dist/protos/http.d.ts new file mode 100644 index 0000000..1f03560 --- /dev/null +++ b/dist/protos/http.d.ts @@ -0,0 +1,347 @@ +import * as $protobuf from "protobufjs"; +/** Namespace google. */ +export namespace google { + + /** Namespace api. */ + namespace api { + + /** Properties of a Http. */ + interface IHttp { + + /** Http rules */ + rules?: (google.api.IHttpRule[]|null); + + /** Http fully_decode_reserved_expansion */ + fully_decode_reserved_expansion?: (boolean|null); + } + + /** Represents a Http. */ + class Http implements IHttp { + + /** + * Constructs a new Http. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttp); + + /** Http rules. */ + public rules: google.api.IHttpRule[]; + + /** Http fully_decode_reserved_expansion. */ + public fully_decode_reserved_expansion: boolean; + + /** + * Creates a new Http instance using the specified properties. + * @param [properties] Properties to set + * @returns Http instance + */ + public static create(properties?: google.api.IHttp): google.api.Http; + + /** + * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Http message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.Http; + + /** + * Decodes a Http message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.Http; + + /** + * Verifies a Http message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Http message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Http + */ + public static fromObject(object: { [k: string]: any }): google.api.Http; + + /** + * Creates a plain object from a Http message. Also converts values to other types if specified. + * @param message Http + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.Http, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Http to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a HttpRule. */ + interface IHttpRule { + + /** HttpRule selector */ + selector?: (string|null); + + /** HttpRule get */ + get?: (string|null); + + /** HttpRule put */ + put?: (string|null); + + /** HttpRule post */ + post?: (string|null); + + /** HttpRule delete */ + "delete"?: (string|null); + + /** HttpRule patch */ + patch?: (string|null); + + /** HttpRule custom */ + custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body */ + body?: (string|null); + + /** HttpRule response_body */ + response_body?: (string|null); + + /** HttpRule additional_bindings */ + additional_bindings?: (google.api.IHttpRule[]|null); + } + + /** Represents a HttpRule. */ + class HttpRule implements IHttpRule { + + /** + * Constructs a new HttpRule. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttpRule); + + /** HttpRule selector. */ + public selector: string; + + /** HttpRule get. */ + public get: string; + + /** HttpRule put. */ + public put: string; + + /** HttpRule post. */ + public post: string; + + /** HttpRule delete. */ + public delete: string; + + /** HttpRule patch. */ + public patch: string; + + /** HttpRule custom. */ + public custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body. */ + public body: string; + + /** HttpRule response_body. */ + public response_body: string; + + /** HttpRule additional_bindings. */ + public additional_bindings: google.api.IHttpRule[]; + + /** HttpRule pattern. */ + public pattern?: ("get"|"put"|"post"|"delete"|"patch"|"custom"); + + /** + * Creates a new HttpRule instance using the specified properties. + * @param [properties] Properties to set + * @returns HttpRule instance + */ + public static create(properties?: google.api.IHttpRule): google.api.HttpRule; + + /** + * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a HttpRule message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.HttpRule; + + /** + * Decodes a HttpRule message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.HttpRule; + + /** + * Verifies a HttpRule message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns HttpRule + */ + public static fromObject(object: { [k: string]: any }): google.api.HttpRule; + + /** + * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * @param message HttpRule + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.HttpRule, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this HttpRule to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a CustomHttpPattern. */ + interface ICustomHttpPattern { + + /** CustomHttpPattern kind */ + kind?: (string|null); + + /** CustomHttpPattern path */ + path?: (string|null); + } + + /** Represents a CustomHttpPattern. */ + class CustomHttpPattern implements ICustomHttpPattern { + + /** + * Constructs a new CustomHttpPattern. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ICustomHttpPattern); + + /** CustomHttpPattern kind. */ + public kind: string; + + /** CustomHttpPattern path. */ + public path: string; + + /** + * Creates a new CustomHttpPattern instance using the specified properties. + * @param [properties] Properties to set + * @returns CustomHttpPattern instance + */ + public static create(properties?: google.api.ICustomHttpPattern): google.api.CustomHttpPattern; + + /** + * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CustomHttpPattern; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CustomHttpPattern; + + /** + * Verifies a CustomHttpPattern message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CustomHttpPattern + */ + public static fromObject(object: { [k: string]: any }): google.api.CustomHttpPattern; + + /** + * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * @param message CustomHttpPattern + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.CustomHttpPattern, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CustomHttpPattern to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } +} diff --git a/dist/protos/http.js b/dist/protos/http.js new file mode 100644 index 0000000..8ad3fc0 --- /dev/null +++ b/dist/protos/http.js @@ -0,0 +1 @@ +(e=>{"function"==typeof define&&define.amd?define(["protobufjs/minimal"],e):"function"==typeof require&&"object"==typeof module&&module&&module.exports&&(module.exports=e(require("protobufjs/minimal")))})(function(e){var t,n,r,i=e.Reader,o=e.Writer,l=e.util,s=e.roots.default||(e.roots.default={});function a(e){if(this.rules=[],e)for(var t=Object.keys(e),n=0;n>>3){case 1:r.rules&&r.rules.length||(r.rules=[]),r.rules.push(s.google.api.HttpRule.decode(e,e.uint32()));break;case 2:r.fully_decode_reserved_expansion=e.bool();break;default:e.skipType(7&o)}}return r},a.decodeDelimited=function(e){return e instanceof i||(e=new i(e)),this.decode(e,e.uint32())},a.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.rules&&e.hasOwnProperty("rules")){if(!Array.isArray(e.rules))return"rules: array expected";for(var t=0;t>>3){case 1:r.selector=e.string();break;case 2:r.get=e.string();break;case 3:r.put=e.string();break;case 4:r.post=e.string();break;case 5:r.delete=e.string();break;case 6:r.patch=e.string();break;case 8:r.custom=s.google.api.CustomHttpPattern.decode(e,e.uint32());break;case 7:r.body=e.string();break;case 12:r.response_body=e.string();break;case 11:r.additional_bindings&&r.additional_bindings.length||(r.additional_bindings=[]),r.additional_bindings.push(s.google.api.HttpRule.decode(e,e.uint32()));break;default:e.skipType(7&o)}}return r},p.decodeDelimited=function(e){return e instanceof i||(e=new i(e)),this.decode(e,e.uint32())},p.verify=function(e){if("object"!=typeof e||null===e)return"object expected";var t={};if(null!=e.selector&&e.hasOwnProperty("selector")&&!l.isString(e.selector))return"selector: string expected";if(null!=e.get&&e.hasOwnProperty("get")&&(t.pattern=1,!l.isString(e.get)))return"get: string expected";if(null!=e.put&&e.hasOwnProperty("put")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!l.isString(e.put))return"put: string expected"}if(null!=e.post&&e.hasOwnProperty("post")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!l.isString(e.post))return"post: string expected"}if(null!=e.delete&&e.hasOwnProperty("delete")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!l.isString(e.delete))return"delete: string expected"}if(null!=e.patch&&e.hasOwnProperty("patch")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!l.isString(e.patch))return"patch: string expected"}if(null!=e.custom&&e.hasOwnProperty("custom")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,n=s.google.api.CustomHttpPattern.verify(e.custom))return"custom."+n}if(null!=e.body&&e.hasOwnProperty("body")&&!l.isString(e.body))return"body: string expected";if(null!=e.response_body&&e.hasOwnProperty("response_body")&&!l.isString(e.response_body))return"response_body: string expected";if(null!=e.additional_bindings&&e.hasOwnProperty("additional_bindings")){if(!Array.isArray(e.additional_bindings))return"additional_bindings: array expected";for(var n,r=0;r>>3){case 1:r.kind=e.string();break;case 2:r.path=e.string();break;default:e.skipType(7&o)}}return r},u.decodeDelimited=function(e){return e instanceof i||(e=new i(e)),this.decode(e,e.uint32())},u.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.kind&&e.hasOwnProperty("kind")&&!l.isString(e.kind)?"kind: string expected":null!=e.path&&e.hasOwnProperty("path")&&!l.isString(e.path)?"path: string expected":null},u.fromObject=function(e){var t;return e instanceof s.google.api.CustomHttpPattern?e:(t=new s.google.api.CustomHttpPattern,null!=e.kind&&(t.kind=String(e.kind)),null!=e.path&&(t.path=String(e.path)),t)},u.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.kind="",n.path=""),null!=e.kind&&e.hasOwnProperty("kind")&&(n.kind=e.kind),null!=e.path&&e.hasOwnProperty("path")&&(n.path=e.path),n},u.prototype.toJSON=function(){return this.constructor.toObject(this,e.util.toJSONOptions)},u),n),r),s}); \ No newline at end of file diff --git a/dist/protos/iam_service.d.ts b/dist/protos/iam_service.d.ts new file mode 100644 index 0000000..37203e4 --- /dev/null +++ b/dist/protos/iam_service.d.ts @@ -0,0 +1,5035 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import Long = require('long'); +import * as $protobuf from "protobufjs"; +/** Namespace google. */ +export namespace google { + + /** Namespace iam. */ + namespace iam { + + /** Namespace v1. */ + namespace v1 { + + /** Represents a IAMPolicy */ + class IAMPolicy extends $protobuf.rpc.Service { + + /** + * Constructs a new IAMPolicy service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new IAMPolicy service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): IAMPolicy; + + /** + * Calls SetIamPolicy. + * @param request SetIamPolicyRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Policy + */ + public setIamPolicy(request: google.iam.v1.ISetIamPolicyRequest, callback: google.iam.v1.IAMPolicy.SetIamPolicyCallback): void; + + /** + * Calls SetIamPolicy. + * @param request SetIamPolicyRequest message or plain object + * @returns Promise + */ + public setIamPolicy(request: google.iam.v1.ISetIamPolicyRequest): Promise; + + /** + * Calls GetIamPolicy. + * @param request GetIamPolicyRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Policy + */ + public getIamPolicy(request: google.iam.v1.IGetIamPolicyRequest, callback: google.iam.v1.IAMPolicy.GetIamPolicyCallback): void; + + /** + * Calls GetIamPolicy. + * @param request GetIamPolicyRequest message or plain object + * @returns Promise + */ + public getIamPolicy(request: google.iam.v1.IGetIamPolicyRequest): Promise; + + /** + * Calls TestIamPermissions. + * @param request TestIamPermissionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and TestIamPermissionsResponse + */ + public testIamPermissions(request: google.iam.v1.ITestIamPermissionsRequest, callback: google.iam.v1.IAMPolicy.TestIamPermissionsCallback): void; + + /** + * Calls TestIamPermissions. + * @param request TestIamPermissionsRequest message or plain object + * @returns Promise + */ + public testIamPermissions(request: google.iam.v1.ITestIamPermissionsRequest): Promise; + } + + namespace IAMPolicy { + + /** + * Callback as used by {@link google.iam.v1.IAMPolicy#setIamPolicy}. + * @param error Error, if any + * @param [response] Policy + */ + type SetIamPolicyCallback = (error: (Error|null), response?: google.iam.v1.Policy) => void; + + /** + * Callback as used by {@link google.iam.v1.IAMPolicy#getIamPolicy}. + * @param error Error, if any + * @param [response] Policy + */ + type GetIamPolicyCallback = (error: (Error|null), response?: google.iam.v1.Policy) => void; + + /** + * Callback as used by {@link google.iam.v1.IAMPolicy#testIamPermissions}. + * @param error Error, if any + * @param [response] TestIamPermissionsResponse + */ + type TestIamPermissionsCallback = (error: (Error|null), response?: google.iam.v1.TestIamPermissionsResponse) => void; + } + + /** Properties of a SetIamPolicyRequest. */ + interface ISetIamPolicyRequest { + + /** SetIamPolicyRequest resource */ + resource?: (string|null); + + /** SetIamPolicyRequest policy */ + policy?: (google.iam.v1.IPolicy|null); + } + + /** Represents a SetIamPolicyRequest. */ + class SetIamPolicyRequest implements ISetIamPolicyRequest { + + /** + * Constructs a new SetIamPolicyRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.ISetIamPolicyRequest); + + /** SetIamPolicyRequest resource. */ + public resource: string; + + /** SetIamPolicyRequest policy. */ + public policy?: (google.iam.v1.IPolicy|null); + + /** + * Creates a new SetIamPolicyRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns SetIamPolicyRequest instance + */ + public static create(properties?: google.iam.v1.ISetIamPolicyRequest): google.iam.v1.SetIamPolicyRequest; + + /** + * Encodes the specified SetIamPolicyRequest message. Does not implicitly {@link google.iam.v1.SetIamPolicyRequest.verify|verify} messages. + * @param message SetIamPolicyRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.ISetIamPolicyRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SetIamPolicyRequest message, length delimited. Does not implicitly {@link google.iam.v1.SetIamPolicyRequest.verify|verify} messages. + * @param message SetIamPolicyRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.ISetIamPolicyRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SetIamPolicyRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SetIamPolicyRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.SetIamPolicyRequest; + + /** + * Decodes a SetIamPolicyRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SetIamPolicyRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.SetIamPolicyRequest; + + /** + * Verifies a SetIamPolicyRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SetIamPolicyRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SetIamPolicyRequest + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.SetIamPolicyRequest; + + /** + * Creates a plain object from a SetIamPolicyRequest message. Also converts values to other types if specified. + * @param message SetIamPolicyRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.SetIamPolicyRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SetIamPolicyRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a GetIamPolicyRequest. */ + interface IGetIamPolicyRequest { + + /** GetIamPolicyRequest resource */ + resource?: (string|null); + + /** GetIamPolicyRequest options */ + options?: (google.iam.v1.IGetPolicyOptions|null); + } + + /** Represents a GetIamPolicyRequest. */ + class GetIamPolicyRequest implements IGetIamPolicyRequest { + + /** + * Constructs a new GetIamPolicyRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.IGetIamPolicyRequest); + + /** GetIamPolicyRequest resource. */ + public resource: string; + + /** GetIamPolicyRequest options. */ + public options?: (google.iam.v1.IGetPolicyOptions|null); + + /** + * Creates a new GetIamPolicyRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns GetIamPolicyRequest instance + */ + public static create(properties?: google.iam.v1.IGetIamPolicyRequest): google.iam.v1.GetIamPolicyRequest; + + /** + * Encodes the specified GetIamPolicyRequest message. Does not implicitly {@link google.iam.v1.GetIamPolicyRequest.verify|verify} messages. + * @param message GetIamPolicyRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.IGetIamPolicyRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GetIamPolicyRequest message, length delimited. Does not implicitly {@link google.iam.v1.GetIamPolicyRequest.verify|verify} messages. + * @param message GetIamPolicyRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.IGetIamPolicyRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GetIamPolicyRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GetIamPolicyRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.GetIamPolicyRequest; + + /** + * Decodes a GetIamPolicyRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GetIamPolicyRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.GetIamPolicyRequest; + + /** + * Verifies a GetIamPolicyRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GetIamPolicyRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GetIamPolicyRequest + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.GetIamPolicyRequest; + + /** + * Creates a plain object from a GetIamPolicyRequest message. Also converts values to other types if specified. + * @param message GetIamPolicyRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.GetIamPolicyRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GetIamPolicyRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a TestIamPermissionsRequest. */ + interface ITestIamPermissionsRequest { + + /** TestIamPermissionsRequest resource */ + resource?: (string|null); + + /** TestIamPermissionsRequest permissions */ + permissions?: (string[]|null); + } + + /** Represents a TestIamPermissionsRequest. */ + class TestIamPermissionsRequest implements ITestIamPermissionsRequest { + + /** + * Constructs a new TestIamPermissionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.ITestIamPermissionsRequest); + + /** TestIamPermissionsRequest resource. */ + public resource: string; + + /** TestIamPermissionsRequest permissions. */ + public permissions: string[]; + + /** + * Creates a new TestIamPermissionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns TestIamPermissionsRequest instance + */ + public static create(properties?: google.iam.v1.ITestIamPermissionsRequest): google.iam.v1.TestIamPermissionsRequest; + + /** + * Encodes the specified TestIamPermissionsRequest message. Does not implicitly {@link google.iam.v1.TestIamPermissionsRequest.verify|verify} messages. + * @param message TestIamPermissionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.ITestIamPermissionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TestIamPermissionsRequest message, length delimited. Does not implicitly {@link google.iam.v1.TestIamPermissionsRequest.verify|verify} messages. + * @param message TestIamPermissionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.ITestIamPermissionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TestIamPermissionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TestIamPermissionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.TestIamPermissionsRequest; + + /** + * Decodes a TestIamPermissionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TestIamPermissionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.TestIamPermissionsRequest; + + /** + * Verifies a TestIamPermissionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TestIamPermissionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TestIamPermissionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.TestIamPermissionsRequest; + + /** + * Creates a plain object from a TestIamPermissionsRequest message. Also converts values to other types if specified. + * @param message TestIamPermissionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.TestIamPermissionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TestIamPermissionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a TestIamPermissionsResponse. */ + interface ITestIamPermissionsResponse { + + /** TestIamPermissionsResponse permissions */ + permissions?: (string[]|null); + } + + /** Represents a TestIamPermissionsResponse. */ + class TestIamPermissionsResponse implements ITestIamPermissionsResponse { + + /** + * Constructs a new TestIamPermissionsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.ITestIamPermissionsResponse); + + /** TestIamPermissionsResponse permissions. */ + public permissions: string[]; + + /** + * Creates a new TestIamPermissionsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns TestIamPermissionsResponse instance + */ + public static create(properties?: google.iam.v1.ITestIamPermissionsResponse): google.iam.v1.TestIamPermissionsResponse; + + /** + * Encodes the specified TestIamPermissionsResponse message. Does not implicitly {@link google.iam.v1.TestIamPermissionsResponse.verify|verify} messages. + * @param message TestIamPermissionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.ITestIamPermissionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TestIamPermissionsResponse message, length delimited. Does not implicitly {@link google.iam.v1.TestIamPermissionsResponse.verify|verify} messages. + * @param message TestIamPermissionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.ITestIamPermissionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TestIamPermissionsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TestIamPermissionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.TestIamPermissionsResponse; + + /** + * Decodes a TestIamPermissionsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TestIamPermissionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.TestIamPermissionsResponse; + + /** + * Verifies a TestIamPermissionsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TestIamPermissionsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TestIamPermissionsResponse + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.TestIamPermissionsResponse; + + /** + * Creates a plain object from a TestIamPermissionsResponse message. Also converts values to other types if specified. + * @param message TestIamPermissionsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.TestIamPermissionsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TestIamPermissionsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a GetPolicyOptions. */ + interface IGetPolicyOptions { + + /** GetPolicyOptions requestedPolicyVersion */ + requestedPolicyVersion?: (number|null); + } + + /** Represents a GetPolicyOptions. */ + class GetPolicyOptions implements IGetPolicyOptions { + + /** + * Constructs a new GetPolicyOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.IGetPolicyOptions); + + /** GetPolicyOptions requestedPolicyVersion. */ + public requestedPolicyVersion: number; + + /** + * Creates a new GetPolicyOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns GetPolicyOptions instance + */ + public static create(properties?: google.iam.v1.IGetPolicyOptions): google.iam.v1.GetPolicyOptions; + + /** + * Encodes the specified GetPolicyOptions message. Does not implicitly {@link google.iam.v1.GetPolicyOptions.verify|verify} messages. + * @param message GetPolicyOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.IGetPolicyOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GetPolicyOptions message, length delimited. Does not implicitly {@link google.iam.v1.GetPolicyOptions.verify|verify} messages. + * @param message GetPolicyOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.IGetPolicyOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GetPolicyOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GetPolicyOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.GetPolicyOptions; + + /** + * Decodes a GetPolicyOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GetPolicyOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.GetPolicyOptions; + + /** + * Verifies a GetPolicyOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GetPolicyOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GetPolicyOptions + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.GetPolicyOptions; + + /** + * Creates a plain object from a GetPolicyOptions message. Also converts values to other types if specified. + * @param message GetPolicyOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.GetPolicyOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GetPolicyOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a Policy. */ + interface IPolicy { + + /** Policy version */ + version?: (number|null); + + /** Policy bindings */ + bindings?: (google.iam.v1.IBinding[]|null); + + /** Policy etag */ + etag?: (Uint8Array|null); + } + + /** Represents a Policy. */ + class Policy implements IPolicy { + + /** + * Constructs a new Policy. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.IPolicy); + + /** Policy version. */ + public version: number; + + /** Policy bindings. */ + public bindings: google.iam.v1.IBinding[]; + + /** Policy etag. */ + public etag: Uint8Array; + + /** + * Creates a new Policy instance using the specified properties. + * @param [properties] Properties to set + * @returns Policy instance + */ + public static create(properties?: google.iam.v1.IPolicy): google.iam.v1.Policy; + + /** + * Encodes the specified Policy message. Does not implicitly {@link google.iam.v1.Policy.verify|verify} messages. + * @param message Policy message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.IPolicy, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Policy message, length delimited. Does not implicitly {@link google.iam.v1.Policy.verify|verify} messages. + * @param message Policy message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.IPolicy, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Policy message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Policy + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.Policy; + + /** + * Decodes a Policy message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Policy + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.Policy; + + /** + * Verifies a Policy message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Policy message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Policy + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.Policy; + + /** + * Creates a plain object from a Policy message. Also converts values to other types if specified. + * @param message Policy + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.Policy, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Policy to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a Binding. */ + interface IBinding { + + /** Binding role */ + role?: (string|null); + + /** Binding members */ + members?: (string[]|null); + + /** Binding condition */ + condition?: (google.type.IExpr|null); + } + + /** Represents a Binding. */ + class Binding implements IBinding { + + /** + * Constructs a new Binding. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.IBinding); + + /** Binding role. */ + public role: string; + + /** Binding members. */ + public members: string[]; + + /** Binding condition. */ + public condition?: (google.type.IExpr|null); + + /** + * Creates a new Binding instance using the specified properties. + * @param [properties] Properties to set + * @returns Binding instance + */ + public static create(properties?: google.iam.v1.IBinding): google.iam.v1.Binding; + + /** + * Encodes the specified Binding message. Does not implicitly {@link google.iam.v1.Binding.verify|verify} messages. + * @param message Binding message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.IBinding, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Binding message, length delimited. Does not implicitly {@link google.iam.v1.Binding.verify|verify} messages. + * @param message Binding message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.IBinding, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Binding message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Binding + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.Binding; + + /** + * Decodes a Binding message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Binding + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.Binding; + + /** + * Verifies a Binding message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Binding message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Binding + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.Binding; + + /** + * Creates a plain object from a Binding message. Also converts values to other types if specified. + * @param message Binding + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.Binding, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Binding to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a PolicyDelta. */ + interface IPolicyDelta { + + /** PolicyDelta bindingDeltas */ + bindingDeltas?: (google.iam.v1.IBindingDelta[]|null); + + /** PolicyDelta auditConfigDeltas */ + auditConfigDeltas?: (google.iam.v1.IAuditConfigDelta[]|null); + } + + /** Represents a PolicyDelta. */ + class PolicyDelta implements IPolicyDelta { + + /** + * Constructs a new PolicyDelta. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.IPolicyDelta); + + /** PolicyDelta bindingDeltas. */ + public bindingDeltas: google.iam.v1.IBindingDelta[]; + + /** PolicyDelta auditConfigDeltas. */ + public auditConfigDeltas: google.iam.v1.IAuditConfigDelta[]; + + /** + * Creates a new PolicyDelta instance using the specified properties. + * @param [properties] Properties to set + * @returns PolicyDelta instance + */ + public static create(properties?: google.iam.v1.IPolicyDelta): google.iam.v1.PolicyDelta; + + /** + * Encodes the specified PolicyDelta message. Does not implicitly {@link google.iam.v1.PolicyDelta.verify|verify} messages. + * @param message PolicyDelta message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.IPolicyDelta, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified PolicyDelta message, length delimited. Does not implicitly {@link google.iam.v1.PolicyDelta.verify|verify} messages. + * @param message PolicyDelta message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.IPolicyDelta, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a PolicyDelta message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns PolicyDelta + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.PolicyDelta; + + /** + * Decodes a PolicyDelta message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns PolicyDelta + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.PolicyDelta; + + /** + * Verifies a PolicyDelta message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a PolicyDelta message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns PolicyDelta + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.PolicyDelta; + + /** + * Creates a plain object from a PolicyDelta message. Also converts values to other types if specified. + * @param message PolicyDelta + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.PolicyDelta, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this PolicyDelta to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a BindingDelta. */ + interface IBindingDelta { + + /** BindingDelta action */ + action?: (google.iam.v1.BindingDelta.Action|null); + + /** BindingDelta role */ + role?: (string|null); + + /** BindingDelta member */ + member?: (string|null); + + /** BindingDelta condition */ + condition?: (google.type.IExpr|null); + } + + /** Represents a BindingDelta. */ + class BindingDelta implements IBindingDelta { + + /** + * Constructs a new BindingDelta. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.IBindingDelta); + + /** BindingDelta action. */ + public action: google.iam.v1.BindingDelta.Action; + + /** BindingDelta role. */ + public role: string; + + /** BindingDelta member. */ + public member: string; + + /** BindingDelta condition. */ + public condition?: (google.type.IExpr|null); + + /** + * Creates a new BindingDelta instance using the specified properties. + * @param [properties] Properties to set + * @returns BindingDelta instance + */ + public static create(properties?: google.iam.v1.IBindingDelta): google.iam.v1.BindingDelta; + + /** + * Encodes the specified BindingDelta message. Does not implicitly {@link google.iam.v1.BindingDelta.verify|verify} messages. + * @param message BindingDelta message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.IBindingDelta, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BindingDelta message, length delimited. Does not implicitly {@link google.iam.v1.BindingDelta.verify|verify} messages. + * @param message BindingDelta message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.IBindingDelta, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BindingDelta message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BindingDelta + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.BindingDelta; + + /** + * Decodes a BindingDelta message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BindingDelta + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.BindingDelta; + + /** + * Verifies a BindingDelta message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BindingDelta message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BindingDelta + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.BindingDelta; + + /** + * Creates a plain object from a BindingDelta message. Also converts values to other types if specified. + * @param message BindingDelta + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.BindingDelta, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BindingDelta to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace BindingDelta { + + /** Action enum. */ + enum Action { + ACTION_UNSPECIFIED = 0, + ADD = 1, + REMOVE = 2 + } + } + + /** Properties of an AuditConfigDelta. */ + interface IAuditConfigDelta { + + /** AuditConfigDelta action */ + action?: (google.iam.v1.AuditConfigDelta.Action|null); + + /** AuditConfigDelta service */ + service?: (string|null); + + /** AuditConfigDelta exemptedMember */ + exemptedMember?: (string|null); + + /** AuditConfigDelta logType */ + logType?: (string|null); + } + + /** Represents an AuditConfigDelta. */ + class AuditConfigDelta implements IAuditConfigDelta { + + /** + * Constructs a new AuditConfigDelta. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.IAuditConfigDelta); + + /** AuditConfigDelta action. */ + public action: google.iam.v1.AuditConfigDelta.Action; + + /** AuditConfigDelta service. */ + public service: string; + + /** AuditConfigDelta exemptedMember. */ + public exemptedMember: string; + + /** AuditConfigDelta logType. */ + public logType: string; + + /** + * Creates a new AuditConfigDelta instance using the specified properties. + * @param [properties] Properties to set + * @returns AuditConfigDelta instance + */ + public static create(properties?: google.iam.v1.IAuditConfigDelta): google.iam.v1.AuditConfigDelta; + + /** + * Encodes the specified AuditConfigDelta message. Does not implicitly {@link google.iam.v1.AuditConfigDelta.verify|verify} messages. + * @param message AuditConfigDelta message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.IAuditConfigDelta, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AuditConfigDelta message, length delimited. Does not implicitly {@link google.iam.v1.AuditConfigDelta.verify|verify} messages. + * @param message AuditConfigDelta message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.IAuditConfigDelta, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AuditConfigDelta message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AuditConfigDelta + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.AuditConfigDelta; + + /** + * Decodes an AuditConfigDelta message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AuditConfigDelta + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.AuditConfigDelta; + + /** + * Verifies an AuditConfigDelta message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AuditConfigDelta message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AuditConfigDelta + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.AuditConfigDelta; + + /** + * Creates a plain object from an AuditConfigDelta message. Also converts values to other types if specified. + * @param message AuditConfigDelta + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.AuditConfigDelta, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AuditConfigDelta to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace AuditConfigDelta { + + /** Action enum. */ + enum Action { + ACTION_UNSPECIFIED = 0, + ADD = 1, + REMOVE = 2 + } + } + + /** Namespace logging. */ + namespace logging { + + /** Properties of an AuditData. */ + interface IAuditData { + + /** AuditData policyDelta */ + policyDelta?: (google.iam.v1.IPolicyDelta|null); + } + + /** Represents an AuditData. */ + class AuditData implements IAuditData { + + /** + * Constructs a new AuditData. + * @param [properties] Properties to set + */ + constructor(properties?: google.iam.v1.logging.IAuditData); + + /** AuditData policyDelta. */ + public policyDelta?: (google.iam.v1.IPolicyDelta|null); + + /** + * Creates a new AuditData instance using the specified properties. + * @param [properties] Properties to set + * @returns AuditData instance + */ + public static create(properties?: google.iam.v1.logging.IAuditData): google.iam.v1.logging.AuditData; + + /** + * Encodes the specified AuditData message. Does not implicitly {@link google.iam.v1.logging.AuditData.verify|verify} messages. + * @param message AuditData message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.iam.v1.logging.IAuditData, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AuditData message, length delimited. Does not implicitly {@link google.iam.v1.logging.AuditData.verify|verify} messages. + * @param message AuditData message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.iam.v1.logging.IAuditData, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AuditData message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AuditData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.iam.v1.logging.AuditData; + + /** + * Decodes an AuditData message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AuditData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.iam.v1.logging.AuditData; + + /** + * Verifies an AuditData message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AuditData message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AuditData + */ + public static fromObject(object: { [k: string]: any }): google.iam.v1.logging.AuditData; + + /** + * Creates a plain object from an AuditData message. Also converts values to other types if specified. + * @param message AuditData + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.iam.v1.logging.AuditData, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AuditData to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + } + } + + /** Namespace api. */ + namespace api { + + /** Properties of a Http. */ + interface IHttp { + + /** Http rules */ + rules?: (google.api.IHttpRule[]|null); + + /** Http fullyDecodeReservedExpansion */ + fullyDecodeReservedExpansion?: (boolean|null); + } + + /** Represents a Http. */ + class Http implements IHttp { + + /** + * Constructs a new Http. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttp); + + /** Http rules. */ + public rules: google.api.IHttpRule[]; + + /** Http fullyDecodeReservedExpansion. */ + public fullyDecodeReservedExpansion: boolean; + + /** + * Creates a new Http instance using the specified properties. + * @param [properties] Properties to set + * @returns Http instance + */ + public static create(properties?: google.api.IHttp): google.api.Http; + + /** + * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Http message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.Http; + + /** + * Decodes a Http message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.Http; + + /** + * Verifies a Http message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Http message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Http + */ + public static fromObject(object: { [k: string]: any }): google.api.Http; + + /** + * Creates a plain object from a Http message. Also converts values to other types if specified. + * @param message Http + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.Http, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Http to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a HttpRule. */ + interface IHttpRule { + + /** HttpRule selector */ + selector?: (string|null); + + /** HttpRule get */ + get?: (string|null); + + /** HttpRule put */ + put?: (string|null); + + /** HttpRule post */ + post?: (string|null); + + /** HttpRule delete */ + "delete"?: (string|null); + + /** HttpRule patch */ + patch?: (string|null); + + /** HttpRule custom */ + custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body */ + body?: (string|null); + + /** HttpRule responseBody */ + responseBody?: (string|null); + + /** HttpRule additionalBindings */ + additionalBindings?: (google.api.IHttpRule[]|null); + } + + /** Represents a HttpRule. */ + class HttpRule implements IHttpRule { + + /** + * Constructs a new HttpRule. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttpRule); + + /** HttpRule selector. */ + public selector: string; + + /** HttpRule get. */ + public get: string; + + /** HttpRule put. */ + public put: string; + + /** HttpRule post. */ + public post: string; + + /** HttpRule delete. */ + public delete: string; + + /** HttpRule patch. */ + public patch: string; + + /** HttpRule custom. */ + public custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body. */ + public body: string; + + /** HttpRule responseBody. */ + public responseBody: string; + + /** HttpRule additionalBindings. */ + public additionalBindings: google.api.IHttpRule[]; + + /** HttpRule pattern. */ + public pattern?: ("get"|"put"|"post"|"delete"|"patch"|"custom"); + + /** + * Creates a new HttpRule instance using the specified properties. + * @param [properties] Properties to set + * @returns HttpRule instance + */ + public static create(properties?: google.api.IHttpRule): google.api.HttpRule; + + /** + * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a HttpRule message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.HttpRule; + + /** + * Decodes a HttpRule message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.HttpRule; + + /** + * Verifies a HttpRule message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns HttpRule + */ + public static fromObject(object: { [k: string]: any }): google.api.HttpRule; + + /** + * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * @param message HttpRule + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.HttpRule, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this HttpRule to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a CustomHttpPattern. */ + interface ICustomHttpPattern { + + /** CustomHttpPattern kind */ + kind?: (string|null); + + /** CustomHttpPattern path */ + path?: (string|null); + } + + /** Represents a CustomHttpPattern. */ + class CustomHttpPattern implements ICustomHttpPattern { + + /** + * Constructs a new CustomHttpPattern. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ICustomHttpPattern); + + /** CustomHttpPattern kind. */ + public kind: string; + + /** CustomHttpPattern path. */ + public path: string; + + /** + * Creates a new CustomHttpPattern instance using the specified properties. + * @param [properties] Properties to set + * @returns CustomHttpPattern instance + */ + public static create(properties?: google.api.ICustomHttpPattern): google.api.CustomHttpPattern; + + /** + * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CustomHttpPattern; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CustomHttpPattern; + + /** + * Verifies a CustomHttpPattern message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CustomHttpPattern + */ + public static fromObject(object: { [k: string]: any }): google.api.CustomHttpPattern; + + /** + * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * @param message CustomHttpPattern + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.CustomHttpPattern, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CustomHttpPattern to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** FieldBehavior enum. */ + enum FieldBehavior { + FIELD_BEHAVIOR_UNSPECIFIED = 0, + OPTIONAL = 1, + REQUIRED = 2, + OUTPUT_ONLY = 3, + INPUT_ONLY = 4, + IMMUTABLE = 5 + } + + /** Properties of a ResourceDescriptor. */ + interface IResourceDescriptor { + + /** ResourceDescriptor type */ + type?: (string|null); + + /** ResourceDescriptor pattern */ + pattern?: (string[]|null); + + /** ResourceDescriptor nameField */ + nameField?: (string|null); + + /** ResourceDescriptor history */ + history?: (google.api.ResourceDescriptor.History|null); + + /** ResourceDescriptor plural */ + plural?: (string|null); + + /** ResourceDescriptor singular */ + singular?: (string|null); + } + + /** Represents a ResourceDescriptor. */ + class ResourceDescriptor implements IResourceDescriptor { + + /** + * Constructs a new ResourceDescriptor. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IResourceDescriptor); + + /** ResourceDescriptor type. */ + public type: string; + + /** ResourceDescriptor pattern. */ + public pattern: string[]; + + /** ResourceDescriptor nameField. */ + public nameField: string; + + /** ResourceDescriptor history. */ + public history: google.api.ResourceDescriptor.History; + + /** ResourceDescriptor plural. */ + public plural: string; + + /** ResourceDescriptor singular. */ + public singular: string; + + /** + * Creates a new ResourceDescriptor instance using the specified properties. + * @param [properties] Properties to set + * @returns ResourceDescriptor instance + */ + public static create(properties?: google.api.IResourceDescriptor): google.api.ResourceDescriptor; + + /** + * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @param message ResourceDescriptor message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @param message ResourceDescriptor message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ResourceDescriptor message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceDescriptor; + + /** + * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceDescriptor; + + /** + * Verifies a ResourceDescriptor message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ResourceDescriptor + */ + public static fromObject(object: { [k: string]: any }): google.api.ResourceDescriptor; + + /** + * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. + * @param message ResourceDescriptor + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.ResourceDescriptor, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ResourceDescriptor to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace ResourceDescriptor { + + /** History enum. */ + enum History { + HISTORY_UNSPECIFIED = 0, + ORIGINALLY_SINGLE_PATTERN = 1, + FUTURE_MULTI_PATTERN = 2 + } + } + + /** Properties of a ResourceReference. */ + interface IResourceReference { + + /** ResourceReference type */ + type?: (string|null); + + /** ResourceReference childType */ + childType?: (string|null); + } + + /** Represents a ResourceReference. */ + class ResourceReference implements IResourceReference { + + /** + * Constructs a new ResourceReference. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IResourceReference); + + /** ResourceReference type. */ + public type: string; + + /** ResourceReference childType. */ + public childType: string; + + /** + * Creates a new ResourceReference instance using the specified properties. + * @param [properties] Properties to set + * @returns ResourceReference instance + */ + public static create(properties?: google.api.IResourceReference): google.api.ResourceReference; + + /** + * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @param message ResourceReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @param message ResourceReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ResourceReference message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ResourceReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceReference; + + /** + * Decodes a ResourceReference message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ResourceReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceReference; + + /** + * Verifies a ResourceReference message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ResourceReference + */ + public static fromObject(object: { [k: string]: any }): google.api.ResourceReference; + + /** + * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. + * @param message ResourceReference + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.ResourceReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ResourceReference to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Namespace protobuf. */ + namespace protobuf { + + /** Properties of a FileDescriptorSet. */ + interface IFileDescriptorSet { + + /** FileDescriptorSet file */ + file?: (google.protobuf.IFileDescriptorProto[]|null); + } + + /** Represents a FileDescriptorSet. */ + class FileDescriptorSet implements IFileDescriptorSet { + + /** + * Constructs a new FileDescriptorSet. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorSet); + + /** FileDescriptorSet file. */ + public file: google.protobuf.IFileDescriptorProto[]; + + /** + * Creates a new FileDescriptorSet instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorSet instance + */ + public static create(properties?: google.protobuf.IFileDescriptorSet): google.protobuf.FileDescriptorSet; + + /** + * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorSet; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorSet; + + /** + * Verifies a FileDescriptorSet message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorSet + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorSet; + + /** + * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. + * @param message FileDescriptorSet + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorSet, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorSet to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FileDescriptorProto. */ + interface IFileDescriptorProto { + + /** FileDescriptorProto name */ + name?: (string|null); + + /** FileDescriptorProto package */ + "package"?: (string|null); + + /** FileDescriptorProto dependency */ + dependency?: (string[]|null); + + /** FileDescriptorProto publicDependency */ + publicDependency?: (number[]|null); + + /** FileDescriptorProto weakDependency */ + weakDependency?: (number[]|null); + + /** FileDescriptorProto messageType */ + messageType?: (google.protobuf.IDescriptorProto[]|null); + + /** FileDescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** FileDescriptorProto service */ + service?: (google.protobuf.IServiceDescriptorProto[]|null); + + /** FileDescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** FileDescriptorProto options */ + options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo */ + sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax */ + syntax?: (string|null); + } + + /** Represents a FileDescriptorProto. */ + class FileDescriptorProto implements IFileDescriptorProto { + + /** + * Constructs a new FileDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorProto); + + /** FileDescriptorProto name. */ + public name: string; + + /** FileDescriptorProto package. */ + public package: string; + + /** FileDescriptorProto dependency. */ + public dependency: string[]; + + /** FileDescriptorProto publicDependency. */ + public publicDependency: number[]; + + /** FileDescriptorProto weakDependency. */ + public weakDependency: number[]; + + /** FileDescriptorProto messageType. */ + public messageType: google.protobuf.IDescriptorProto[]; + + /** FileDescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** FileDescriptorProto service. */ + public service: google.protobuf.IServiceDescriptorProto[]; + + /** FileDescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** FileDescriptorProto options. */ + public options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo. */ + public sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax. */ + public syntax: string; + + /** + * Creates a new FileDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFileDescriptorProto): google.protobuf.FileDescriptorProto; + + /** + * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorProto; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorProto; + + /** + * Verifies a FileDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorProto; + + /** + * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. + * @param message FileDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a DescriptorProto. */ + interface IDescriptorProto { + + /** DescriptorProto name */ + name?: (string|null); + + /** DescriptorProto field */ + field?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto nestedType */ + nestedType?: (google.protobuf.IDescriptorProto[]|null); + + /** DescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** DescriptorProto extensionRange */ + extensionRange?: (google.protobuf.DescriptorProto.IExtensionRange[]|null); + + /** DescriptorProto oneofDecl */ + oneofDecl?: (google.protobuf.IOneofDescriptorProto[]|null); + + /** DescriptorProto options */ + options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange */ + reservedRange?: (google.protobuf.DescriptorProto.IReservedRange[]|null); + + /** DescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents a DescriptorProto. */ + class DescriptorProto implements IDescriptorProto { + + /** + * Constructs a new DescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IDescriptorProto); + + /** DescriptorProto name. */ + public name: string; + + /** DescriptorProto field. */ + public field: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto nestedType. */ + public nestedType: google.protobuf.IDescriptorProto[]; + + /** DescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** DescriptorProto extensionRange. */ + public extensionRange: google.protobuf.DescriptorProto.IExtensionRange[]; + + /** DescriptorProto oneofDecl. */ + public oneofDecl: google.protobuf.IOneofDescriptorProto[]; + + /** DescriptorProto options. */ + public options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange. */ + public reservedRange: google.protobuf.DescriptorProto.IReservedRange[]; + + /** DescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new DescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns DescriptorProto instance + */ + public static create(properties?: google.protobuf.IDescriptorProto): google.protobuf.DescriptorProto; + + /** + * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto; + + /** + * Verifies a DescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto; + + /** + * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. + * @param message DescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace DescriptorProto { + + /** Properties of an ExtensionRange. */ + interface IExtensionRange { + + /** ExtensionRange start */ + start?: (number|null); + + /** ExtensionRange end */ + end?: (number|null); + + /** ExtensionRange options */ + options?: (google.protobuf.IExtensionRangeOptions|null); + } + + /** Represents an ExtensionRange. */ + class ExtensionRange implements IExtensionRange { + + /** + * Constructs a new ExtensionRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IExtensionRange); + + /** ExtensionRange start. */ + public start: number; + + /** ExtensionRange end. */ + public end: number; + + /** ExtensionRange options. */ + public options?: (google.protobuf.IExtensionRangeOptions|null); + + /** + * Creates a new ExtensionRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IExtensionRange): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Verifies an ExtensionRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. + * @param message ExtensionRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ExtensionRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ReservedRange. */ + interface IReservedRange { + + /** ReservedRange start */ + start?: (number|null); + + /** ReservedRange end */ + end?: (number|null); + } + + /** Represents a ReservedRange. */ + class ReservedRange implements IReservedRange { + + /** + * Constructs a new ReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IReservedRange); + + /** ReservedRange start. */ + public start: number; + + /** ReservedRange end. */ + public end: number; + + /** + * Creates a new ReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ReservedRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IReservedRange): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Decodes a ReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Verifies a ReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. + * @param message ReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an ExtensionRangeOptions. */ + interface IExtensionRangeOptions { + + /** ExtensionRangeOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an ExtensionRangeOptions. */ + class ExtensionRangeOptions implements IExtensionRangeOptions { + + /** + * Constructs a new ExtensionRangeOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IExtensionRangeOptions); + + /** ExtensionRangeOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ExtensionRangeOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRangeOptions instance + */ + public static create(properties?: google.protobuf.IExtensionRangeOptions): google.protobuf.ExtensionRangeOptions; + + /** + * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ExtensionRangeOptions; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ExtensionRangeOptions; + + /** + * Verifies an ExtensionRangeOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRangeOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ExtensionRangeOptions; + + /** + * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. + * @param message ExtensionRangeOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ExtensionRangeOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRangeOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FieldDescriptorProto. */ + interface IFieldDescriptorProto { + + /** FieldDescriptorProto name */ + name?: (string|null); + + /** FieldDescriptorProto number */ + number?: (number|null); + + /** FieldDescriptorProto label */ + label?: (google.protobuf.FieldDescriptorProto.Label|null); + + /** FieldDescriptorProto type */ + type?: (google.protobuf.FieldDescriptorProto.Type|null); + + /** FieldDescriptorProto typeName */ + typeName?: (string|null); + + /** FieldDescriptorProto extendee */ + extendee?: (string|null); + + /** FieldDescriptorProto defaultValue */ + defaultValue?: (string|null); + + /** FieldDescriptorProto oneofIndex */ + oneofIndex?: (number|null); + + /** FieldDescriptorProto jsonName */ + jsonName?: (string|null); + + /** FieldDescriptorProto options */ + options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional */ + proto3Optional?: (boolean|null); + } + + /** Represents a FieldDescriptorProto. */ + class FieldDescriptorProto implements IFieldDescriptorProto { + + /** + * Constructs a new FieldDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldDescriptorProto); + + /** FieldDescriptorProto name. */ + public name: string; + + /** FieldDescriptorProto number. */ + public number: number; + + /** FieldDescriptorProto label. */ + public label: google.protobuf.FieldDescriptorProto.Label; + + /** FieldDescriptorProto type. */ + public type: google.protobuf.FieldDescriptorProto.Type; + + /** FieldDescriptorProto typeName. */ + public typeName: string; + + /** FieldDescriptorProto extendee. */ + public extendee: string; + + /** FieldDescriptorProto defaultValue. */ + public defaultValue: string; + + /** FieldDescriptorProto oneofIndex. */ + public oneofIndex: number; + + /** FieldDescriptorProto jsonName. */ + public jsonName: string; + + /** FieldDescriptorProto options. */ + public options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional. */ + public proto3Optional: boolean; + + /** + * Creates a new FieldDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFieldDescriptorProto): google.protobuf.FieldDescriptorProto; + + /** + * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldDescriptorProto; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldDescriptorProto; + + /** + * Verifies a FieldDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldDescriptorProto; + + /** + * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. + * @param message FieldDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FieldDescriptorProto { + + /** Type enum. */ + enum Type { + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + TYPE_GROUP = 10, + TYPE_MESSAGE = 11, + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + TYPE_SINT32 = 17, + TYPE_SINT64 = 18 + } + + /** Label enum. */ + enum Label { + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3 + } + } + + /** Properties of an OneofDescriptorProto. */ + interface IOneofDescriptorProto { + + /** OneofDescriptorProto name */ + name?: (string|null); + + /** OneofDescriptorProto options */ + options?: (google.protobuf.IOneofOptions|null); + } + + /** Represents an OneofDescriptorProto. */ + class OneofDescriptorProto implements IOneofDescriptorProto { + + /** + * Constructs a new OneofDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofDescriptorProto); + + /** OneofDescriptorProto name. */ + public name: string; + + /** OneofDescriptorProto options. */ + public options?: (google.protobuf.IOneofOptions|null); + + /** + * Creates a new OneofDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofDescriptorProto instance + */ + public static create(properties?: google.protobuf.IOneofDescriptorProto): google.protobuf.OneofDescriptorProto; + + /** + * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofDescriptorProto; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofDescriptorProto; + + /** + * Verifies an OneofDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofDescriptorProto; + + /** + * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. + * @param message OneofDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumDescriptorProto. */ + interface IEnumDescriptorProto { + + /** EnumDescriptorProto name */ + name?: (string|null); + + /** EnumDescriptorProto value */ + value?: (google.protobuf.IEnumValueDescriptorProto[]|null); + + /** EnumDescriptorProto options */ + options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange */ + reservedRange?: (google.protobuf.EnumDescriptorProto.IEnumReservedRange[]|null); + + /** EnumDescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents an EnumDescriptorProto. */ + class EnumDescriptorProto implements IEnumDescriptorProto { + + /** + * Constructs a new EnumDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumDescriptorProto); + + /** EnumDescriptorProto name. */ + public name: string; + + /** EnumDescriptorProto value. */ + public value: google.protobuf.IEnumValueDescriptorProto[]; + + /** EnumDescriptorProto options. */ + public options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange. */ + public reservedRange: google.protobuf.EnumDescriptorProto.IEnumReservedRange[]; + + /** EnumDescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new EnumDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumDescriptorProto): google.protobuf.EnumDescriptorProto; + + /** + * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto; + + /** + * Verifies an EnumDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto; + + /** + * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. + * @param message EnumDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace EnumDescriptorProto { + + /** Properties of an EnumReservedRange. */ + interface IEnumReservedRange { + + /** EnumReservedRange start */ + start?: (number|null); + + /** EnumReservedRange end */ + end?: (number|null); + } + + /** Represents an EnumReservedRange. */ + class EnumReservedRange implements IEnumReservedRange { + + /** + * Constructs a new EnumReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange); + + /** EnumReservedRange start. */ + public start: number; + + /** EnumReservedRange end. */ + public end: number; + + /** + * Creates a new EnumReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumReservedRange instance + */ + public static create(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Verifies an EnumReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. + * @param message EnumReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto.EnumReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an EnumValueDescriptorProto. */ + interface IEnumValueDescriptorProto { + + /** EnumValueDescriptorProto name */ + name?: (string|null); + + /** EnumValueDescriptorProto number */ + number?: (number|null); + + /** EnumValueDescriptorProto options */ + options?: (google.protobuf.IEnumValueOptions|null); + } + + /** Represents an EnumValueDescriptorProto. */ + class EnumValueDescriptorProto implements IEnumValueDescriptorProto { + + /** + * Constructs a new EnumValueDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueDescriptorProto); + + /** EnumValueDescriptorProto name. */ + public name: string; + + /** EnumValueDescriptorProto number. */ + public number: number; + + /** EnumValueDescriptorProto options. */ + public options?: (google.protobuf.IEnumValueOptions|null); + + /** + * Creates a new EnumValueDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumValueDescriptorProto): google.protobuf.EnumValueDescriptorProto; + + /** + * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueDescriptorProto; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueDescriptorProto; + + /** + * Verifies an EnumValueDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueDescriptorProto; + + /** + * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. + * @param message EnumValueDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ServiceDescriptorProto. */ + interface IServiceDescriptorProto { + + /** ServiceDescriptorProto name */ + name?: (string|null); + + /** ServiceDescriptorProto method */ + method?: (google.protobuf.IMethodDescriptorProto[]|null); + + /** ServiceDescriptorProto options */ + options?: (google.protobuf.IServiceOptions|null); + } + + /** Represents a ServiceDescriptorProto. */ + class ServiceDescriptorProto implements IServiceDescriptorProto { + + /** + * Constructs a new ServiceDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceDescriptorProto); + + /** ServiceDescriptorProto name. */ + public name: string; + + /** ServiceDescriptorProto method. */ + public method: google.protobuf.IMethodDescriptorProto[]; + + /** ServiceDescriptorProto options. */ + public options?: (google.protobuf.IServiceOptions|null); + + /** + * Creates a new ServiceDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceDescriptorProto instance + */ + public static create(properties?: google.protobuf.IServiceDescriptorProto): google.protobuf.ServiceDescriptorProto; + + /** + * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceDescriptorProto; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceDescriptorProto; + + /** + * Verifies a ServiceDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceDescriptorProto; + + /** + * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. + * @param message ServiceDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a MethodDescriptorProto. */ + interface IMethodDescriptorProto { + + /** MethodDescriptorProto name */ + name?: (string|null); + + /** MethodDescriptorProto inputType */ + inputType?: (string|null); + + /** MethodDescriptorProto outputType */ + outputType?: (string|null); + + /** MethodDescriptorProto options */ + options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming */ + clientStreaming?: (boolean|null); + + /** MethodDescriptorProto serverStreaming */ + serverStreaming?: (boolean|null); + } + + /** Represents a MethodDescriptorProto. */ + class MethodDescriptorProto implements IMethodDescriptorProto { + + /** + * Constructs a new MethodDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodDescriptorProto); + + /** MethodDescriptorProto name. */ + public name: string; + + /** MethodDescriptorProto inputType. */ + public inputType: string; + + /** MethodDescriptorProto outputType. */ + public outputType: string; + + /** MethodDescriptorProto options. */ + public options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming. */ + public clientStreaming: boolean; + + /** MethodDescriptorProto serverStreaming. */ + public serverStreaming: boolean; + + /** + * Creates a new MethodDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodDescriptorProto instance + */ + public static create(properties?: google.protobuf.IMethodDescriptorProto): google.protobuf.MethodDescriptorProto; + + /** + * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodDescriptorProto; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodDescriptorProto; + + /** + * Verifies a MethodDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodDescriptorProto; + + /** + * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. + * @param message MethodDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FileOptions. */ + interface IFileOptions { + + /** FileOptions javaPackage */ + javaPackage?: (string|null); + + /** FileOptions javaOuterClassname */ + javaOuterClassname?: (string|null); + + /** FileOptions javaMultipleFiles */ + javaMultipleFiles?: (boolean|null); + + /** FileOptions javaGenerateEqualsAndHash */ + javaGenerateEqualsAndHash?: (boolean|null); + + /** FileOptions javaStringCheckUtf8 */ + javaStringCheckUtf8?: (boolean|null); + + /** FileOptions optimizeFor */ + optimizeFor?: (google.protobuf.FileOptions.OptimizeMode|null); + + /** FileOptions goPackage */ + goPackage?: (string|null); + + /** FileOptions ccGenericServices */ + ccGenericServices?: (boolean|null); + + /** FileOptions javaGenericServices */ + javaGenericServices?: (boolean|null); + + /** FileOptions pyGenericServices */ + pyGenericServices?: (boolean|null); + + /** FileOptions phpGenericServices */ + phpGenericServices?: (boolean|null); + + /** FileOptions deprecated */ + deprecated?: (boolean|null); + + /** FileOptions ccEnableArenas */ + ccEnableArenas?: (boolean|null); + + /** FileOptions objcClassPrefix */ + objcClassPrefix?: (string|null); + + /** FileOptions csharpNamespace */ + csharpNamespace?: (string|null); + + /** FileOptions swiftPrefix */ + swiftPrefix?: (string|null); + + /** FileOptions phpClassPrefix */ + phpClassPrefix?: (string|null); + + /** FileOptions phpNamespace */ + phpNamespace?: (string|null); + + /** FileOptions phpMetadataNamespace */ + phpMetadataNamespace?: (string|null); + + /** FileOptions rubyPackage */ + rubyPackage?: (string|null); + + /** FileOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** FileOptions .google.api.resourceDefinition */ + ".google.api.resourceDefinition"?: (google.api.IResourceDescriptor[]|null); + } + + /** Represents a FileOptions. */ + class FileOptions implements IFileOptions { + + /** + * Constructs a new FileOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileOptions); + + /** FileOptions javaPackage. */ + public javaPackage: string; + + /** FileOptions javaOuterClassname. */ + public javaOuterClassname: string; + + /** FileOptions javaMultipleFiles. */ + public javaMultipleFiles: boolean; + + /** FileOptions javaGenerateEqualsAndHash. */ + public javaGenerateEqualsAndHash: boolean; + + /** FileOptions javaStringCheckUtf8. */ + public javaStringCheckUtf8: boolean; + + /** FileOptions optimizeFor. */ + public optimizeFor: google.protobuf.FileOptions.OptimizeMode; + + /** FileOptions goPackage. */ + public goPackage: string; + + /** FileOptions ccGenericServices. */ + public ccGenericServices: boolean; + + /** FileOptions javaGenericServices. */ + public javaGenericServices: boolean; + + /** FileOptions pyGenericServices. */ + public pyGenericServices: boolean; + + /** FileOptions phpGenericServices. */ + public phpGenericServices: boolean; + + /** FileOptions deprecated. */ + public deprecated: boolean; + + /** FileOptions ccEnableArenas. */ + public ccEnableArenas: boolean; + + /** FileOptions objcClassPrefix. */ + public objcClassPrefix: string; + + /** FileOptions csharpNamespace. */ + public csharpNamespace: string; + + /** FileOptions swiftPrefix. */ + public swiftPrefix: string; + + /** FileOptions phpClassPrefix. */ + public phpClassPrefix: string; + + /** FileOptions phpNamespace. */ + public phpNamespace: string; + + /** FileOptions phpMetadataNamespace. */ + public phpMetadataNamespace: string; + + /** FileOptions rubyPackage. */ + public rubyPackage: string; + + /** FileOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new FileOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns FileOptions instance + */ + public static create(properties?: google.protobuf.IFileOptions): google.protobuf.FileOptions; + + /** + * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileOptions; + + /** + * Decodes a FileOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileOptions; + + /** + * Verifies a FileOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileOptions; + + /** + * Creates a plain object from a FileOptions message. Also converts values to other types if specified. + * @param message FileOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FileOptions { + + /** OptimizeMode enum. */ + enum OptimizeMode { + SPEED = 1, + CODE_SIZE = 2, + LITE_RUNTIME = 3 + } + } + + /** Properties of a MessageOptions. */ + interface IMessageOptions { + + /** MessageOptions messageSetWireFormat */ + messageSetWireFormat?: (boolean|null); + + /** MessageOptions noStandardDescriptorAccessor */ + noStandardDescriptorAccessor?: (boolean|null); + + /** MessageOptions deprecated */ + deprecated?: (boolean|null); + + /** MessageOptions mapEntry */ + mapEntry?: (boolean|null); + + /** MessageOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** MessageOptions .google.api.resource */ + ".google.api.resource"?: (google.api.IResourceDescriptor|null); + } + + /** Represents a MessageOptions. */ + class MessageOptions implements IMessageOptions { + + /** + * Constructs a new MessageOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMessageOptions); + + /** MessageOptions messageSetWireFormat. */ + public messageSetWireFormat: boolean; + + /** MessageOptions noStandardDescriptorAccessor. */ + public noStandardDescriptorAccessor: boolean; + + /** MessageOptions deprecated. */ + public deprecated: boolean; + + /** MessageOptions mapEntry. */ + public mapEntry: boolean; + + /** MessageOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new MessageOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns MessageOptions instance + */ + public static create(properties?: google.protobuf.IMessageOptions): google.protobuf.MessageOptions; + + /** + * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MessageOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MessageOptions; + + /** + * Decodes a MessageOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MessageOptions; + + /** + * Verifies a MessageOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MessageOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MessageOptions; + + /** + * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. + * @param message MessageOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MessageOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MessageOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FieldOptions. */ + interface IFieldOptions { + + /** FieldOptions ctype */ + ctype?: (google.protobuf.FieldOptions.CType|null); + + /** FieldOptions packed */ + packed?: (boolean|null); + + /** FieldOptions jstype */ + jstype?: (google.protobuf.FieldOptions.JSType|null); + + /** FieldOptions lazy */ + lazy?: (boolean|null); + + /** FieldOptions deprecated */ + deprecated?: (boolean|null); + + /** FieldOptions weak */ + weak?: (boolean|null); + + /** FieldOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** FieldOptions .google.api.fieldBehavior */ + ".google.api.fieldBehavior"?: (google.api.FieldBehavior[]|null); + + /** FieldOptions .google.api.resourceReference */ + ".google.api.resourceReference"?: (google.api.IResourceReference|null); + } + + /** Represents a FieldOptions. */ + class FieldOptions implements IFieldOptions { + + /** + * Constructs a new FieldOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldOptions); + + /** FieldOptions ctype. */ + public ctype: google.protobuf.FieldOptions.CType; + + /** FieldOptions packed. */ + public packed: boolean; + + /** FieldOptions jstype. */ + public jstype: google.protobuf.FieldOptions.JSType; + + /** FieldOptions lazy. */ + public lazy: boolean; + + /** FieldOptions deprecated. */ + public deprecated: boolean; + + /** FieldOptions weak. */ + public weak: boolean; + + /** FieldOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new FieldOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldOptions instance + */ + public static create(properties?: google.protobuf.IFieldOptions): google.protobuf.FieldOptions; + + /** + * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions; + + /** + * Decodes a FieldOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions; + + /** + * Verifies a FieldOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions; + + /** + * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. + * @param message FieldOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FieldOptions { + + /** CType enum. */ + enum CType { + STRING = 0, + CORD = 1, + STRING_PIECE = 2 + } + + /** JSType enum. */ + enum JSType { + JS_NORMAL = 0, + JS_STRING = 1, + JS_NUMBER = 2 + } + } + + /** Properties of an OneofOptions. */ + interface IOneofOptions { + + /** OneofOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an OneofOptions. */ + class OneofOptions implements IOneofOptions { + + /** + * Constructs a new OneofOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofOptions); + + /** OneofOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new OneofOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofOptions instance + */ + public static create(properties?: google.protobuf.IOneofOptions): google.protobuf.OneofOptions; + + /** + * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofOptions; + + /** + * Decodes an OneofOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofOptions; + + /** + * Verifies an OneofOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofOptions; + + /** + * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. + * @param message OneofOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumOptions. */ + interface IEnumOptions { + + /** EnumOptions allowAlias */ + allowAlias?: (boolean|null); + + /** EnumOptions deprecated */ + deprecated?: (boolean|null); + + /** EnumOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an EnumOptions. */ + class EnumOptions implements IEnumOptions { + + /** + * Constructs a new EnumOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumOptions); + + /** EnumOptions allowAlias. */ + public allowAlias: boolean; + + /** EnumOptions deprecated. */ + public deprecated: boolean; + + /** EnumOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new EnumOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumOptions instance + */ + public static create(properties?: google.protobuf.IEnumOptions): google.protobuf.EnumOptions; + + /** + * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumOptions; + + /** + * Decodes an EnumOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumOptions; + + /** + * Verifies an EnumOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumOptions; + + /** + * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. + * @param message EnumOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumValueOptions. */ + interface IEnumValueOptions { + + /** EnumValueOptions deprecated */ + deprecated?: (boolean|null); + + /** EnumValueOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an EnumValueOptions. */ + class EnumValueOptions implements IEnumValueOptions { + + /** + * Constructs a new EnumValueOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueOptions); + + /** EnumValueOptions deprecated. */ + public deprecated: boolean; + + /** EnumValueOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new EnumValueOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueOptions instance + */ + public static create(properties?: google.protobuf.IEnumValueOptions): google.protobuf.EnumValueOptions; + + /** + * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueOptions; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueOptions; + + /** + * Verifies an EnumValueOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueOptions; + + /** + * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. + * @param message EnumValueOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ServiceOptions. */ + interface IServiceOptions { + + /** ServiceOptions deprecated */ + deprecated?: (boolean|null); + + /** ServiceOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** ServiceOptions .google.api.defaultHost */ + ".google.api.defaultHost"?: (string|null); + + /** ServiceOptions .google.api.oauthScopes */ + ".google.api.oauthScopes"?: (string|null); + } + + /** Represents a ServiceOptions. */ + class ServiceOptions implements IServiceOptions { + + /** + * Constructs a new ServiceOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceOptions); + + /** ServiceOptions deprecated. */ + public deprecated: boolean; + + /** ServiceOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ServiceOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceOptions instance + */ + public static create(properties?: google.protobuf.IServiceOptions): google.protobuf.ServiceOptions; + + /** + * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceOptions; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceOptions; + + /** + * Verifies a ServiceOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceOptions; + + /** + * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. + * @param message ServiceOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a MethodOptions. */ + interface IMethodOptions { + + /** MethodOptions deprecated */ + deprecated?: (boolean|null); + + /** MethodOptions idempotencyLevel */ + idempotencyLevel?: (google.protobuf.MethodOptions.IdempotencyLevel|null); + + /** MethodOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** MethodOptions .google.api.http */ + ".google.api.http"?: (google.api.IHttpRule|null); + + /** MethodOptions .google.api.methodSignature */ + ".google.api.methodSignature"?: (string[]|null); + } + + /** Represents a MethodOptions. */ + class MethodOptions implements IMethodOptions { + + /** + * Constructs a new MethodOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodOptions); + + /** MethodOptions deprecated. */ + public deprecated: boolean; + + /** MethodOptions idempotencyLevel. */ + public idempotencyLevel: google.protobuf.MethodOptions.IdempotencyLevel; + + /** MethodOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new MethodOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodOptions instance + */ + public static create(properties?: google.protobuf.IMethodOptions): google.protobuf.MethodOptions; + + /** + * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodOptions; + + /** + * Decodes a MethodOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodOptions; + + /** + * Verifies a MethodOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodOptions; + + /** + * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. + * @param message MethodOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace MethodOptions { + + /** IdempotencyLevel enum. */ + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + NO_SIDE_EFFECTS = 1, + IDEMPOTENT = 2 + } + } + + /** Properties of an UninterpretedOption. */ + interface IUninterpretedOption { + + /** UninterpretedOption name */ + name?: (google.protobuf.UninterpretedOption.INamePart[]|null); + + /** UninterpretedOption identifierValue */ + identifierValue?: (string|null); + + /** UninterpretedOption positiveIntValue */ + positiveIntValue?: (number|Long|null); + + /** UninterpretedOption negativeIntValue */ + negativeIntValue?: (number|Long|null); + + /** UninterpretedOption doubleValue */ + doubleValue?: (number|null); + + /** UninterpretedOption stringValue */ + stringValue?: (Uint8Array|null); + + /** UninterpretedOption aggregateValue */ + aggregateValue?: (string|null); + } + + /** Represents an UninterpretedOption. */ + class UninterpretedOption implements IUninterpretedOption { + + /** + * Constructs a new UninterpretedOption. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IUninterpretedOption); + + /** UninterpretedOption name. */ + public name: google.protobuf.UninterpretedOption.INamePart[]; + + /** UninterpretedOption identifierValue. */ + public identifierValue: string; + + /** UninterpretedOption positiveIntValue. */ + public positiveIntValue: (number|Long); + + /** UninterpretedOption negativeIntValue. */ + public negativeIntValue: (number|Long); + + /** UninterpretedOption doubleValue. */ + public doubleValue: number; + + /** UninterpretedOption stringValue. */ + public stringValue: Uint8Array; + + /** UninterpretedOption aggregateValue. */ + public aggregateValue: string; + + /** + * Creates a new UninterpretedOption instance using the specified properties. + * @param [properties] Properties to set + * @returns UninterpretedOption instance + */ + public static create(properties?: google.protobuf.IUninterpretedOption): google.protobuf.UninterpretedOption; + + /** + * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption; + + /** + * Verifies an UninterpretedOption message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns UninterpretedOption + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption; + + /** + * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. + * @param message UninterpretedOption + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this UninterpretedOption to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace UninterpretedOption { + + /** Properties of a NamePart. */ + interface INamePart { + + /** NamePart namePart */ + namePart: string; + + /** NamePart isExtension */ + isExtension: boolean; + } + + /** Represents a NamePart. */ + class NamePart implements INamePart { + + /** + * Constructs a new NamePart. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.UninterpretedOption.INamePart); + + /** NamePart namePart. */ + public namePart: string; + + /** NamePart isExtension. */ + public isExtension: boolean; + + /** + * Creates a new NamePart instance using the specified properties. + * @param [properties] Properties to set + * @returns NamePart instance + */ + public static create(properties?: google.protobuf.UninterpretedOption.INamePart): google.protobuf.UninterpretedOption.NamePart; + + /** + * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a NamePart message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption.NamePart; + + /** + * Decodes a NamePart message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption.NamePart; + + /** + * Verifies a NamePart message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a NamePart message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns NamePart + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption.NamePart; + + /** + * Creates a plain object from a NamePart message. Also converts values to other types if specified. + * @param message NamePart + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption.NamePart, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this NamePart to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a SourceCodeInfo. */ + interface ISourceCodeInfo { + + /** SourceCodeInfo location */ + location?: (google.protobuf.SourceCodeInfo.ILocation[]|null); + } + + /** Represents a SourceCodeInfo. */ + class SourceCodeInfo implements ISourceCodeInfo { + + /** + * Constructs a new SourceCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ISourceCodeInfo); + + /** SourceCodeInfo location. */ + public location: google.protobuf.SourceCodeInfo.ILocation[]; + + /** + * Creates a new SourceCodeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns SourceCodeInfo instance + */ + public static create(properties?: google.protobuf.ISourceCodeInfo): google.protobuf.SourceCodeInfo; + + /** + * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo; + + /** + * Verifies a SourceCodeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SourceCodeInfo + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo; + + /** + * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. + * @param message SourceCodeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SourceCodeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace SourceCodeInfo { + + /** Properties of a Location. */ + interface ILocation { + + /** Location path */ + path?: (number[]|null); + + /** Location span */ + span?: (number[]|null); + + /** Location leadingComments */ + leadingComments?: (string|null); + + /** Location trailingComments */ + trailingComments?: (string|null); + + /** Location leadingDetachedComments */ + leadingDetachedComments?: (string[]|null); + } + + /** Represents a Location. */ + class Location implements ILocation { + + /** + * Constructs a new Location. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.SourceCodeInfo.ILocation); + + /** Location path. */ + public path: number[]; + + /** Location span. */ + public span: number[]; + + /** Location leadingComments. */ + public leadingComments: string; + + /** Location trailingComments. */ + public trailingComments: string; + + /** Location leadingDetachedComments. */ + public leadingDetachedComments: string[]; + + /** + * Creates a new Location instance using the specified properties. + * @param [properties] Properties to set + * @returns Location instance + */ + public static create(properties?: google.protobuf.SourceCodeInfo.ILocation): google.protobuf.SourceCodeInfo.Location; + + /** + * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Location message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo.Location; + + /** + * Decodes a Location message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo.Location; + + /** + * Verifies a Location message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Location message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Location + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo.Location; + + /** + * Creates a plain object from a Location message. Also converts values to other types if specified. + * @param message Location + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo.Location, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Location to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a GeneratedCodeInfo. */ + interface IGeneratedCodeInfo { + + /** GeneratedCodeInfo annotation */ + annotation?: (google.protobuf.GeneratedCodeInfo.IAnnotation[]|null); + } + + /** Represents a GeneratedCodeInfo. */ + class GeneratedCodeInfo implements IGeneratedCodeInfo { + + /** + * Constructs a new GeneratedCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IGeneratedCodeInfo); + + /** GeneratedCodeInfo annotation. */ + public annotation: google.protobuf.GeneratedCodeInfo.IAnnotation[]; + + /** + * Creates a new GeneratedCodeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns GeneratedCodeInfo instance + */ + public static create(properties?: google.protobuf.IGeneratedCodeInfo): google.protobuf.GeneratedCodeInfo; + + /** + * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo; + + /** + * Verifies a GeneratedCodeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GeneratedCodeInfo + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo; + + /** + * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. + * @param message GeneratedCodeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.GeneratedCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GeneratedCodeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace GeneratedCodeInfo { + + /** Properties of an Annotation. */ + interface IAnnotation { + + /** Annotation path */ + path?: (number[]|null); + + /** Annotation sourceFile */ + sourceFile?: (string|null); + + /** Annotation begin */ + begin?: (number|null); + + /** Annotation end */ + end?: (number|null); + } + + /** Represents an Annotation. */ + class Annotation implements IAnnotation { + + /** + * Constructs a new Annotation. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation); + + /** Annotation path. */ + public path: number[]; + + /** Annotation sourceFile. */ + public sourceFile: string; + + /** Annotation begin. */ + public begin: number; + + /** Annotation end. */ + public end: number; + + /** + * Creates a new Annotation instance using the specified properties. + * @param [properties] Properties to set + * @returns Annotation instance + */ + public static create(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Annotation message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Decodes an Annotation message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Verifies an Annotation message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Annotation message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Annotation + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Creates a plain object from an Annotation message. Also converts values to other types if specified. + * @param message Annotation + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.GeneratedCodeInfo.Annotation, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Annotation to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + } + + /** Namespace type. */ + namespace type { + + /** Properties of an Expr. */ + interface IExpr { + + /** Expr expression */ + expression?: (string|null); + + /** Expr title */ + title?: (string|null); + + /** Expr description */ + description?: (string|null); + + /** Expr location */ + location?: (string|null); + } + + /** Represents an Expr. */ + class Expr implements IExpr { + + /** + * Constructs a new Expr. + * @param [properties] Properties to set + */ + constructor(properties?: google.type.IExpr); + + /** Expr expression. */ + public expression: string; + + /** Expr title. */ + public title: string; + + /** Expr description. */ + public description: string; + + /** Expr location. */ + public location: string; + + /** + * Creates a new Expr instance using the specified properties. + * @param [properties] Properties to set + * @returns Expr instance + */ + public static create(properties?: google.type.IExpr): google.type.Expr; + + /** + * Encodes the specified Expr message. Does not implicitly {@link google.type.Expr.verify|verify} messages. + * @param message Expr message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.type.IExpr, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Expr message, length delimited. Does not implicitly {@link google.type.Expr.verify|verify} messages. + * @param message Expr message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.type.IExpr, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Expr message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Expr + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.type.Expr; + + /** + * Decodes an Expr message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Expr + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.type.Expr; + + /** + * Verifies an Expr message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Expr message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Expr + */ + public static fromObject(object: { [k: string]: any }): google.type.Expr; + + /** + * Creates a plain object from an Expr message. Also converts values to other types if specified. + * @param message Expr + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.type.Expr, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Expr to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } +} diff --git a/dist/protos/iam_service.js b/dist/protos/iam_service.js new file mode 100644 index 0000000..24d5ecc --- /dev/null +++ b/dist/protos/iam_service.js @@ -0,0 +1 @@ +(e=>{"function"==typeof define&&define.amd?define(["protobufjs/minimal"],e):"function"==typeof require&&"object"==typeof module&&module&&module.exports&&(module.exports=e(require("protobufjs/minimal")))})(function(o){var e,t,n,r,F,a=o.Reader,i=o.Writer,p=o.util,l=o.roots.iam_protos||(o.roots.iam_protos={});function B(e,t,n){o.rpc.Service.call(this,e,t,n)}function s(e){if(e)for(var t=Object.keys(e),n=0;n>>3){case 1:o.resource=e.string();break;case 2:o.policy=l.google.iam.v1.Policy.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},s.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},s.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.resource&&e.hasOwnProperty("resource")&&!p.isString(e.resource))return"resource: string expected";if(null!=e.policy&&e.hasOwnProperty("policy")){e=l.google.iam.v1.Policy.verify(e.policy);if(e)return"policy."+e}return null},s.fromObject=function(e){if(e instanceof l.google.iam.v1.SetIamPolicyRequest)return e;var t=new l.google.iam.v1.SetIamPolicyRequest;if(null!=e.resource&&(t.resource=String(e.resource)),null!=e.policy){if("object"!=typeof e.policy)throw TypeError(".google.iam.v1.SetIamPolicyRequest.policy: object expected");t.policy=l.google.iam.v1.Policy.fromObject(e.policy)}return t},s.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.resource="",n.policy=null),null!=e.resource&&e.hasOwnProperty("resource")&&(n.resource=e.resource),null!=e.policy&&e.hasOwnProperty("policy")&&(n.policy=l.google.iam.v1.Policy.toObject(e.policy,t)),n},s.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},s),t.GetIamPolicyRequest=(u.prototype.resource="",u.prototype.options=null,u.create=function(e){return new u(e)},u.encode=function(e,t){return t=t||i.create(),null!=e.resource&&Object.hasOwnProperty.call(e,"resource")&&t.uint32(10).string(e.resource),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&l.google.iam.v1.GetPolicyOptions.encode(e.options,t.uint32(18).fork()).ldelim(),t},u.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},u.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.iam.v1.GetIamPolicyRequest;e.pos>>3){case 1:o.resource=e.string();break;case 2:o.options=l.google.iam.v1.GetPolicyOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},u.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},u.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.resource&&e.hasOwnProperty("resource")&&!p.isString(e.resource))return"resource: string expected";if(null!=e.options&&e.hasOwnProperty("options")){e=l.google.iam.v1.GetPolicyOptions.verify(e.options);if(e)return"options."+e}return null},u.fromObject=function(e){if(e instanceof l.google.iam.v1.GetIamPolicyRequest)return e;var t=new l.google.iam.v1.GetIamPolicyRequest;if(null!=e.resource&&(t.resource=String(e.resource)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.iam.v1.GetIamPolicyRequest.options: object expected");t.options=l.google.iam.v1.GetPolicyOptions.fromObject(e.options)}return t},u.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.resource="",n.options=null),null!=e.resource&&e.hasOwnProperty("resource")&&(n.resource=e.resource),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.iam.v1.GetPolicyOptions.toObject(e.options,t)),n},u.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},u),t.TestIamPermissionsRequest=(c.prototype.resource="",c.prototype.permissions=p.emptyArray,c.create=function(e){return new c(e)},c.encode=function(e,t){if(t=t||i.create(),null!=e.resource&&Object.hasOwnProperty.call(e,"resource")&&t.uint32(10).string(e.resource),null!=e.permissions&&e.permissions.length)for(var n=0;n>>3){case 1:o.resource=e.string();break;case 2:o.permissions&&o.permissions.length||(o.permissions=[]),o.permissions.push(e.string());break;default:e.skipType(7&r)}}return o},c.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},c.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.resource&&e.hasOwnProperty("resource")&&!p.isString(e.resource))return"resource: string expected";if(null!=e.permissions&&e.hasOwnProperty("permissions")){if(!Array.isArray(e.permissions))return"permissions: array expected";for(var t=0;t>>3==1?(o.permissions&&o.permissions.length||(o.permissions=[]),o.permissions.push(e.string())):e.skipType(7&r)}return o},G.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},G.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.permissions&&e.hasOwnProperty("permissions")){if(!Array.isArray(e.permissions))return"permissions: array expected";for(var t=0;t>>3==1?o.requestedPolicyVersion=e.int32():e.skipType(7&r)}return o},U.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},U.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.requestedPolicyVersion&&e.hasOwnProperty("requestedPolicyVersion")&&!p.isInteger(e.requestedPolicyVersion)?"requestedPolicyVersion: integer expected":null},U.fromObject=function(e){var t;return e instanceof l.google.iam.v1.GetPolicyOptions?e:(t=new l.google.iam.v1.GetPolicyOptions,null!=e.requestedPolicyVersion&&(t.requestedPolicyVersion=0|e.requestedPolicyVersion),t)},U.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.requestedPolicyVersion=0),null!=e.requestedPolicyVersion&&e.hasOwnProperty("requestedPolicyVersion")&&(n.requestedPolicyVersion=e.requestedPolicyVersion),n},U.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},U),t.Policy=(d.prototype.version=0,d.prototype.bindings=p.emptyArray,d.prototype.etag=p.newBuffer([]),d.create=function(e){return new d(e)},d.encode=function(e,t){if(t=t||i.create(),null!=e.version&&Object.hasOwnProperty.call(e,"version")&&t.uint32(8).int32(e.version),null!=e.etag&&Object.hasOwnProperty.call(e,"etag")&&t.uint32(26).bytes(e.etag),null!=e.bindings&&e.bindings.length)for(var n=0;n>>3){case 1:o.version=e.int32();break;case 4:o.bindings&&o.bindings.length||(o.bindings=[]),o.bindings.push(l.google.iam.v1.Binding.decode(e,e.uint32()));break;case 3:o.etag=e.bytes();break;default:e.skipType(7&r)}}return o},d.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},d.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.version&&e.hasOwnProperty("version")&&!p.isInteger(e.version))return"version: integer expected";if(null!=e.bindings&&e.hasOwnProperty("bindings")){if(!Array.isArray(e.bindings))return"bindings: array expected";for(var t=0;t>>3){case 1:o.role=e.string();break;case 2:o.members&&o.members.length||(o.members=[]),o.members.push(e.string());break;case 3:o.condition=l.google.type.Expr.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},g.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},g.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.role&&e.hasOwnProperty("role")&&!p.isString(e.role))return"role: string expected";if(null!=e.members&&e.hasOwnProperty("members")){if(!Array.isArray(e.members))return"members: array expected";for(var t=0;t>>3){case 1:o.bindingDeltas&&o.bindingDeltas.length||(o.bindingDeltas=[]),o.bindingDeltas.push(l.google.iam.v1.BindingDelta.decode(e,e.uint32()));break;case 2:o.auditConfigDeltas&&o.auditConfigDeltas.length||(o.auditConfigDeltas=[]),o.auditConfigDeltas.push(l.google.iam.v1.AuditConfigDelta.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},M.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},M.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.bindingDeltas&&e.hasOwnProperty("bindingDeltas")){if(!Array.isArray(e.bindingDeltas))return"bindingDeltas: array expected";for(var t=0;t>>3){case 1:o.action=e.int32();break;case 2:o.role=e.string();break;case 3:o.member=e.string();break;case 4:o.condition=l.google.type.Expr.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},f.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},f.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.action&&e.hasOwnProperty("action"))switch(e.action){default:return"action: enum value expected";case 0:case 1:case 2:}if(null!=e.role&&e.hasOwnProperty("role")&&!p.isString(e.role))return"role: string expected";if(null!=e.member&&e.hasOwnProperty("member")&&!p.isString(e.member))return"member: string expected";if(null!=e.condition&&e.hasOwnProperty("condition")){e=l.google.type.Expr.verify(e.condition);if(e)return"condition."+e}return null},f.fromObject=function(e){if(e instanceof l.google.iam.v1.BindingDelta)return e;var t=new l.google.iam.v1.BindingDelta;switch(e.action){case"ACTION_UNSPECIFIED":case 0:t.action=0;break;case"ADD":case 1:t.action=1;break;case"REMOVE":case 2:t.action=2}if(null!=e.role&&(t.role=String(e.role)),null!=e.member&&(t.member=String(e.member)),null!=e.condition){if("object"!=typeof e.condition)throw TypeError(".google.iam.v1.BindingDelta.condition: object expected");t.condition=l.google.type.Expr.fromObject(e.condition)}return t},f.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.action=t.enums===String?"ACTION_UNSPECIFIED":0,n.role="",n.member="",n.condition=null),null!=e.action&&e.hasOwnProperty("action")&&(n.action=t.enums===String?l.google.iam.v1.BindingDelta.Action[e.action]:e.action),null!=e.role&&e.hasOwnProperty("role")&&(n.role=e.role),null!=e.member&&e.hasOwnProperty("member")&&(n.member=e.member),null!=e.condition&&e.hasOwnProperty("condition")&&(n.condition=l.google.type.Expr.toObject(e.condition,t)),n},f.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},f.Action=(e={},(r=Object.create(e))[e[0]="ACTION_UNSPECIFIED"]=0,r[e[1]="ADD"]=1,r[e[2]="REMOVE"]=2,r),f),t.AuditConfigDelta=(y.prototype.action=0,y.prototype.service="",y.prototype.exemptedMember="",y.prototype.logType="",y.create=function(e){return new y(e)},y.encode=function(e,t){return t=t||i.create(),null!=e.action&&Object.hasOwnProperty.call(e,"action")&&t.uint32(8).int32(e.action),null!=e.service&&Object.hasOwnProperty.call(e,"service")&&t.uint32(18).string(e.service),null!=e.exemptedMember&&Object.hasOwnProperty.call(e,"exemptedMember")&&t.uint32(26).string(e.exemptedMember),null!=e.logType&&Object.hasOwnProperty.call(e,"logType")&&t.uint32(34).string(e.logType),t},y.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},y.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.iam.v1.AuditConfigDelta;e.pos>>3){case 1:o.action=e.int32();break;case 2:o.service=e.string();break;case 3:o.exemptedMember=e.string();break;case 4:o.logType=e.string();break;default:e.skipType(7&r)}}return o},y.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},y.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.action&&e.hasOwnProperty("action"))switch(e.action){default:return"action: enum value expected";case 0:case 1:case 2:}return null!=e.service&&e.hasOwnProperty("service")&&!p.isString(e.service)?"service: string expected":null!=e.exemptedMember&&e.hasOwnProperty("exemptedMember")&&!p.isString(e.exemptedMember)?"exemptedMember: string expected":null!=e.logType&&e.hasOwnProperty("logType")&&!p.isString(e.logType)?"logType: string expected":null},y.fromObject=function(e){if(e instanceof l.google.iam.v1.AuditConfigDelta)return e;var t=new l.google.iam.v1.AuditConfigDelta;switch(e.action){case"ACTION_UNSPECIFIED":case 0:t.action=0;break;case"ADD":case 1:t.action=1;break;case"REMOVE":case 2:t.action=2}return null!=e.service&&(t.service=String(e.service)),null!=e.exemptedMember&&(t.exemptedMember=String(e.exemptedMember)),null!=e.logType&&(t.logType=String(e.logType)),t},y.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.action=t.enums===String?"ACTION_UNSPECIFIED":0,n.service="",n.exemptedMember="",n.logType=""),null!=e.action&&e.hasOwnProperty("action")&&(n.action=t.enums===String?l.google.iam.v1.AuditConfigDelta.Action[e.action]:e.action),null!=e.service&&e.hasOwnProperty("service")&&(n.service=e.service),null!=e.exemptedMember&&e.hasOwnProperty("exemptedMember")&&(n.exemptedMember=e.exemptedMember),null!=e.logType&&e.hasOwnProperty("logType")&&(n.logType=e.logType),n},y.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},y.Action=(e={},(r=Object.create(e))[e[0]="ACTION_UNSPECIFIED"]=0,r[e[1]="ADD"]=1,r[e[2]="REMOVE"]=2,r),y),t.logging=((e={}).AuditData=(L.prototype.policyDelta=null,L.create=function(e){return new L(e)},L.encode=function(e,t){return t=t||i.create(),null!=e.policyDelta&&Object.hasOwnProperty.call(e,"policyDelta")&&l.google.iam.v1.PolicyDelta.encode(e.policyDelta,t.uint32(18).fork()).ldelim(),t},L.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},L.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.iam.v1.logging.AuditData;e.pos>>3==2?o.policyDelta=l.google.iam.v1.PolicyDelta.decode(e,e.uint32()):e.skipType(7&r)}return o},L.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},L.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.policyDelta&&e.hasOwnProperty("policyDelta")){e=l.google.iam.v1.PolicyDelta.verify(e.policyDelta);if(e)return"policyDelta."+e}return null},L.fromObject=function(e){if(e instanceof l.google.iam.v1.logging.AuditData)return e;var t=new l.google.iam.v1.logging.AuditData;if(null!=e.policyDelta){if("object"!=typeof e.policyDelta)throw TypeError(".google.iam.v1.logging.AuditData.policyDelta: object expected");t.policyDelta=l.google.iam.v1.PolicyDelta.fromObject(e.policyDelta)}return t},L.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.policyDelta=null),null!=e.policyDelta&&e.hasOwnProperty("policyDelta")&&(n.policyDelta=l.google.iam.v1.PolicyDelta.toObject(e.policyDelta,t)),n},L.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},L),e),t),n),F.api=((r={}).Http=(J.prototype.rules=p.emptyArray,J.prototype.fullyDecodeReservedExpansion=!1,J.create=function(e){return new J(e)},J.encode=function(e,t){if(t=t||i.create(),null!=e.rules&&e.rules.length)for(var n=0;n>>3){case 1:o.rules&&o.rules.length||(o.rules=[]),o.rules.push(l.google.api.HttpRule.decode(e,e.uint32()));break;case 2:o.fullyDecodeReservedExpansion=e.bool();break;default:e.skipType(7&r)}}return o},J.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},J.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.rules&&e.hasOwnProperty("rules")){if(!Array.isArray(e.rules))return"rules: array expected";for(var t=0;t>>3){case 1:o.selector=e.string();break;case 2:o.get=e.string();break;case 3:o.put=e.string();break;case 4:o.post=e.string();break;case 5:o.delete=e.string();break;case 6:o.patch=e.string();break;case 8:o.custom=l.google.api.CustomHttpPattern.decode(e,e.uint32());break;case 7:o.body=e.string();break;case 12:o.responseBody=e.string();break;case 11:o.additionalBindings&&o.additionalBindings.length||(o.additionalBindings=[]),o.additionalBindings.push(l.google.api.HttpRule.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},h.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},h.verify=function(e){if("object"!=typeof e||null===e)return"object expected";var t={};if(null!=e.selector&&e.hasOwnProperty("selector")&&!p.isString(e.selector))return"selector: string expected";if(null!=e.get&&e.hasOwnProperty("get")&&(t.pattern=1,!p.isString(e.get)))return"get: string expected";if(null!=e.put&&e.hasOwnProperty("put")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!p.isString(e.put))return"put: string expected"}if(null!=e.post&&e.hasOwnProperty("post")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!p.isString(e.post))return"post: string expected"}if(null!=e.delete&&e.hasOwnProperty("delete")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!p.isString(e.delete))return"delete: string expected"}if(null!=e.patch&&e.hasOwnProperty("patch")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!p.isString(e.patch))return"patch: string expected"}if(null!=e.custom&&e.hasOwnProperty("custom")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,n=l.google.api.CustomHttpPattern.verify(e.custom))return"custom."+n}if(null!=e.body&&e.hasOwnProperty("body")&&!p.isString(e.body))return"body: string expected";if(null!=e.responseBody&&e.hasOwnProperty("responseBody")&&!p.isString(e.responseBody))return"responseBody: string expected";if(null!=e.additionalBindings&&e.hasOwnProperty("additionalBindings")){if(!Array.isArray(e.additionalBindings))return"additionalBindings: array expected";for(var n,o=0;o>>3){case 1:o.kind=e.string();break;case 2:o.path=e.string();break;default:e.skipType(7&r)}}return o},_.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},_.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.kind&&e.hasOwnProperty("kind")&&!p.isString(e.kind)?"kind: string expected":null!=e.path&&e.hasOwnProperty("path")&&!p.isString(e.path)?"path: string expected":null},_.fromObject=function(e){var t;return e instanceof l.google.api.CustomHttpPattern?e:(t=new l.google.api.CustomHttpPattern,null!=e.kind&&(t.kind=String(e.kind)),null!=e.path&&(t.path=String(e.path)),t)},_.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.kind="",n.path=""),null!=e.kind&&e.hasOwnProperty("kind")&&(n.kind=e.kind),null!=e.path&&e.hasOwnProperty("path")&&(n.path=e.path),n},_.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},_),r.FieldBehavior=(e={},(t=Object.create(e))[e[0]="FIELD_BEHAVIOR_UNSPECIFIED"]=0,t[e[1]="OPTIONAL"]=1,t[e[2]="REQUIRED"]=2,t[e[3]="OUTPUT_ONLY"]=3,t[e[4]="INPUT_ONLY"]=4,t[e[5]="IMMUTABLE"]=5,t),r.ResourceDescriptor=(b.prototype.type="",b.prototype.pattern=p.emptyArray,b.prototype.nameField="",b.prototype.history=0,b.prototype.plural="",b.prototype.singular="",b.create=function(e){return new b(e)},b.encode=function(e,t){if(t=t||i.create(),null!=e.type&&Object.hasOwnProperty.call(e,"type")&&t.uint32(10).string(e.type),null!=e.pattern&&e.pattern.length)for(var n=0;n>>3){case 1:o.type=e.string();break;case 2:o.pattern&&o.pattern.length||(o.pattern=[]),o.pattern.push(e.string());break;case 3:o.nameField=e.string();break;case 4:o.history=e.int32();break;case 5:o.plural=e.string();break;case 6:o.singular=e.string();break;default:e.skipType(7&r)}}return o},b.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},b.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.type&&e.hasOwnProperty("type")&&!p.isString(e.type))return"type: string expected";if(null!=e.pattern&&e.hasOwnProperty("pattern")){if(!Array.isArray(e.pattern))return"pattern: array expected";for(var t=0;t>>3){case 1:o.type=e.string();break;case 2:o.childType=e.string();break;default:e.skipType(7&r)}}return o},H.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},H.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.type&&e.hasOwnProperty("type")&&!p.isString(e.type)?"type: string expected":null!=e.childType&&e.hasOwnProperty("childType")&&!p.isString(e.childType)?"childType: string expected":null},H.fromObject=function(e){var t;return e instanceof l.google.api.ResourceReference?e:(t=new l.google.api.ResourceReference,null!=e.type&&(t.type=String(e.type)),null!=e.childType&&(t.childType=String(e.childType)),t)},H.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.type="",n.childType=""),null!=e.type&&e.hasOwnProperty("type")&&(n.type=e.type),null!=e.childType&&e.hasOwnProperty("childType")&&(n.childType=e.childType),n},H.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},H),r),F.protobuf=((n={}).FileDescriptorSet=(q.prototype.file=p.emptyArray,q.create=function(e){return new q(e)},q.encode=function(e,t){if(t=t||i.create(),null!=e.file&&e.file.length)for(var n=0;n>>3==1?(o.file&&o.file.length||(o.file=[]),o.file.push(l.google.protobuf.FileDescriptorProto.decode(e,e.uint32()))):e.skipType(7&r)}return o},q.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},q.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.file&&e.hasOwnProperty("file")){if(!Array.isArray(e.file))return"file: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.package=e.string();break;case 3:o.dependency&&o.dependency.length||(o.dependency=[]),o.dependency.push(e.string());break;case 10:if(o.publicDependency&&o.publicDependency.length||(o.publicDependency=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.name=e.string();break;case 2:o.field&&o.field.length||(o.field=[]),o.field.push(l.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 6:o.extension&&o.extension.length||(o.extension=[]),o.extension.push(l.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 3:o.nestedType&&o.nestedType.length||(o.nestedType=[]),o.nestedType.push(l.google.protobuf.DescriptorProto.decode(e,e.uint32()));break;case 4:o.enumType&&o.enumType.length||(o.enumType=[]),o.enumType.push(l.google.protobuf.EnumDescriptorProto.decode(e,e.uint32()));break;case 5:o.extensionRange&&o.extensionRange.length||(o.extensionRange=[]),o.extensionRange.push(l.google.protobuf.DescriptorProto.ExtensionRange.decode(e,e.uint32()));break;case 8:o.oneofDecl&&o.oneofDecl.length||(o.oneofDecl=[]),o.oneofDecl.push(l.google.protobuf.OneofDescriptorProto.decode(e,e.uint32()));break;case 7:o.options=l.google.protobuf.MessageOptions.decode(e,e.uint32());break;case 9:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(l.google.protobuf.DescriptorProto.ReservedRange.decode(e,e.uint32()));break;case 10:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},O.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},O.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.field&&e.hasOwnProperty("field")){if(!Array.isArray(e.field))return"field: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;case 3:o.options=l.google.protobuf.ExtensionRangeOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},v.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},v.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.start&&e.hasOwnProperty("start")&&!p.isInteger(e.start))return"start: integer expected";if(null!=e.end&&e.hasOwnProperty("end")&&!p.isInteger(e.end))return"end: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=l.google.protobuf.ExtensionRangeOptions.verify(e.options);if(e)return"options."+e}return null},v.fromObject=function(e){if(e instanceof l.google.protobuf.DescriptorProto.ExtensionRange)return e;var t=new l.google.protobuf.DescriptorProto.ExtensionRange;if(null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected");t.options=l.google.protobuf.ExtensionRangeOptions.fromObject(e.options)}return t},v.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0,n.options=null),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.protobuf.ExtensionRangeOptions.toObject(e.options,t)),n},v.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},v),O.ReservedRange=(Y.prototype.start=0,Y.prototype.end=0,Y.create=function(e){return new Y(e)},Y.encode=function(e,t){return t=t||i.create(),null!=e.start&&Object.hasOwnProperty.call(e,"start")&&t.uint32(8).int32(e.start),null!=e.end&&Object.hasOwnProperty.call(e,"end")&&t.uint32(16).int32(e.end),t},Y.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},Y.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.protobuf.DescriptorProto.ReservedRange;e.pos>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},Y.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},Y.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!p.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!p.isInteger(e.end)?"end: integer expected":null},Y.fromObject=function(e){var t;return e instanceof l.google.protobuf.DescriptorProto.ReservedRange?e:(t=new l.google.protobuf.DescriptorProto.ReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},Y.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},Y.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},Y),O),n.ExtensionRangeOptions=(z.prototype.uninterpretedOption=p.emptyArray,z.create=function(e){return new z(e)},z.encode=function(e,t){if(t=t||i.create(),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},z.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},z.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 3:o.number=e.int32();break;case 4:o.label=e.int32();break;case 5:o.type=e.int32();break;case 6:o.typeName=e.string();break;case 2:o.extendee=e.string();break;case 7:o.defaultValue=e.string();break;case 9:o.oneofIndex=e.int32();break;case 10:o.jsonName=e.string();break;case 8:o.options=l.google.protobuf.FieldOptions.decode(e,e.uint32());break;case 17:o.proto3Optional=e.bool();break;default:e.skipType(7&r)}}return o},P.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},P.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!p.isInteger(e.number))return"number: integer expected";if(null!=e.label&&e.hasOwnProperty("label"))switch(e.label){default:return"label: enum value expected";case 1:case 2:case 3:}if(null!=e.type&&e.hasOwnProperty("type"))switch(e.type){default:return"type: enum value expected";case 1:case 2:case 3:case 4:case 5:case 6:case 7:case 8:case 9:case 10:case 11:case 12:case 13:case 14:case 15:case 16:case 17:case 18:}if(null!=e.typeName&&e.hasOwnProperty("typeName")&&!p.isString(e.typeName))return"typeName: string expected";if(null!=e.extendee&&e.hasOwnProperty("extendee")&&!p.isString(e.extendee))return"extendee: string expected";if(null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&!p.isString(e.defaultValue))return"defaultValue: string expected";if(null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&!p.isInteger(e.oneofIndex))return"oneofIndex: integer expected";if(null!=e.jsonName&&e.hasOwnProperty("jsonName")&&!p.isString(e.jsonName))return"jsonName: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=l.google.protobuf.FieldOptions.verify(e.options);if(t)return"options."+t}return null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&"boolean"!=typeof e.proto3Optional?"proto3Optional: boolean expected":null},P.fromObject=function(e){if(e instanceof l.google.protobuf.FieldDescriptorProto)return e;var t=new l.google.protobuf.FieldDescriptorProto;switch(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),e.label){case"LABEL_OPTIONAL":case 1:t.label=1;break;case"LABEL_REQUIRED":case 2:t.label=2;break;case"LABEL_REPEATED":case 3:t.label=3}switch(e.type){case"TYPE_DOUBLE":case 1:t.type=1;break;case"TYPE_FLOAT":case 2:t.type=2;break;case"TYPE_INT64":case 3:t.type=3;break;case"TYPE_UINT64":case 4:t.type=4;break;case"TYPE_INT32":case 5:t.type=5;break;case"TYPE_FIXED64":case 6:t.type=6;break;case"TYPE_FIXED32":case 7:t.type=7;break;case"TYPE_BOOL":case 8:t.type=8;break;case"TYPE_STRING":case 9:t.type=9;break;case"TYPE_GROUP":case 10:t.type=10;break;case"TYPE_MESSAGE":case 11:t.type=11;break;case"TYPE_BYTES":case 12:t.type=12;break;case"TYPE_UINT32":case 13:t.type=13;break;case"TYPE_ENUM":case 14:t.type=14;break;case"TYPE_SFIXED32":case 15:t.type=15;break;case"TYPE_SFIXED64":case 16:t.type=16;break;case"TYPE_SINT32":case 17:t.type=17;break;case"TYPE_SINT64":case 18:t.type=18}if(null!=e.typeName&&(t.typeName=String(e.typeName)),null!=e.extendee&&(t.extendee=String(e.extendee)),null!=e.defaultValue&&(t.defaultValue=String(e.defaultValue)),null!=e.oneofIndex&&(t.oneofIndex=0|e.oneofIndex),null!=e.jsonName&&(t.jsonName=String(e.jsonName)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected");t.options=l.google.protobuf.FieldOptions.fromObject(e.options)}return null!=e.proto3Optional&&(t.proto3Optional=Boolean(e.proto3Optional)),t},P.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.extendee="",n.number=0,n.label=t.enums===String?"LABEL_OPTIONAL":1,n.type=t.enums===String?"TYPE_DOUBLE":1,n.typeName="",n.defaultValue="",n.options=null,n.oneofIndex=0,n.jsonName="",n.proto3Optional=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.extendee&&e.hasOwnProperty("extendee")&&(n.extendee=e.extendee),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.label&&e.hasOwnProperty("label")&&(n.label=t.enums===String?l.google.protobuf.FieldDescriptorProto.Label[e.label]:e.label),null!=e.type&&e.hasOwnProperty("type")&&(n.type=t.enums===String?l.google.protobuf.FieldDescriptorProto.Type[e.type]:e.type),null!=e.typeName&&e.hasOwnProperty("typeName")&&(n.typeName=e.typeName),null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&(n.defaultValue=e.defaultValue),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.protobuf.FieldOptions.toObject(e.options,t)),null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&(n.oneofIndex=e.oneofIndex),null!=e.jsonName&&e.hasOwnProperty("jsonName")&&(n.jsonName=e.jsonName),null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&(n.proto3Optional=e.proto3Optional),n},P.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},P.Type=(e={},(t=Object.create(e))[e[1]="TYPE_DOUBLE"]=1,t[e[2]="TYPE_FLOAT"]=2,t[e[3]="TYPE_INT64"]=3,t[e[4]="TYPE_UINT64"]=4,t[e[5]="TYPE_INT32"]=5,t[e[6]="TYPE_FIXED64"]=6,t[e[7]="TYPE_FIXED32"]=7,t[e[8]="TYPE_BOOL"]=8,t[e[9]="TYPE_STRING"]=9,t[e[10]="TYPE_GROUP"]=10,t[e[11]="TYPE_MESSAGE"]=11,t[e[12]="TYPE_BYTES"]=12,t[e[13]="TYPE_UINT32"]=13,t[e[14]="TYPE_ENUM"]=14,t[e[15]="TYPE_SFIXED32"]=15,t[e[16]="TYPE_SFIXED64"]=16,t[e[17]="TYPE_SINT32"]=17,t[e[18]="TYPE_SINT64"]=18,t),P.Label=(e={},(t=Object.create(e))[e[1]="LABEL_OPTIONAL"]=1,t[e[2]="LABEL_REQUIRED"]=2,t[e[3]="LABEL_REPEATED"]=3,t),P),n.OneofDescriptorProto=(W.prototype.name="",W.prototype.options=null,W.create=function(e){return new W(e)},W.encode=function(e,t){return t=t||i.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&l.google.protobuf.OneofOptions.encode(e.options,t.uint32(18).fork()).ldelim(),t},W.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},W.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.protobuf.OneofDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.options=l.google.protobuf.OneofOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},W.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},W.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.options&&e.hasOwnProperty("options")){e=l.google.protobuf.OneofOptions.verify(e.options);if(e)return"options."+e}return null},W.fromObject=function(e){if(e instanceof l.google.protobuf.OneofDescriptorProto)return e;var t=new l.google.protobuf.OneofDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected");t.options=l.google.protobuf.OneofOptions.fromObject(e.options)}return t},W.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.protobuf.OneofOptions.toObject(e.options,t)),n},W.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},W),n.EnumDescriptorProto=(w.prototype.name="",w.prototype.value=p.emptyArray,w.prototype.options=null,w.prototype.reservedRange=p.emptyArray,w.prototype.reservedName=p.emptyArray,w.create=function(e){return new w(e)},w.encode=function(e,t){if(t=t||i.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.value&&e.value.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.value&&o.value.length||(o.value=[]),o.value.push(l.google.protobuf.EnumValueDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=l.google.protobuf.EnumOptions.decode(e,e.uint32());break;case 4:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(l.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(e,e.uint32()));break;case 5:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},w.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},w.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.value&&e.hasOwnProperty("value")){if(!Array.isArray(e.value))return"value: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},X.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},X.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!p.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!p.isInteger(e.end)?"end: integer expected":null},X.fromObject=function(e){var t;return e instanceof l.google.protobuf.EnumDescriptorProto.EnumReservedRange?e:(t=new l.google.protobuf.EnumDescriptorProto.EnumReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},X.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},X.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},X),w),n.EnumValueDescriptorProto=(j.prototype.name="",j.prototype.number=0,j.prototype.options=null,j.create=function(e){return new j(e)},j.encode=function(e,t){return t=t||i.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.number&&Object.hasOwnProperty.call(e,"number")&&t.uint32(16).int32(e.number),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&l.google.protobuf.EnumValueOptions.encode(e.options,t.uint32(26).fork()).ldelim(),t},j.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},j.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.protobuf.EnumValueDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.number=e.int32();break;case 3:o.options=l.google.protobuf.EnumValueOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},j.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},j.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!p.isInteger(e.number))return"number: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=l.google.protobuf.EnumValueOptions.verify(e.options);if(e)return"options."+e}return null},j.fromObject=function(e){if(e instanceof l.google.protobuf.EnumValueDescriptorProto)return e;var t=new l.google.protobuf.EnumValueDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected");t.options=l.google.protobuf.EnumValueOptions.fromObject(e.options)}return t},j.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.number=0,n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.protobuf.EnumValueOptions.toObject(e.options,t)),n},j.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},j),n.ServiceDescriptorProto=(D.prototype.name="",D.prototype.method=p.emptyArray,D.prototype.options=null,D.create=function(e){return new D(e)},D.encode=function(e,t){if(t=t||i.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.method&&e.method.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.method&&o.method.length||(o.method=[]),o.method.push(l.google.protobuf.MethodDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=l.google.protobuf.ServiceOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},D.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},D.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.method&&e.hasOwnProperty("method")){if(!Array.isArray(e.method))return"method: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.inputType=e.string();break;case 3:o.outputType=e.string();break;case 4:o.options=l.google.protobuf.MethodOptions.decode(e,e.uint32());break;case 5:o.clientStreaming=e.bool();break;case 6:o.serverStreaming=e.bool();break;default:e.skipType(7&r)}}return o},x.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},x.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!p.isString(e.name))return"name: string expected";if(null!=e.inputType&&e.hasOwnProperty("inputType")&&!p.isString(e.inputType))return"inputType: string expected";if(null!=e.outputType&&e.hasOwnProperty("outputType")&&!p.isString(e.outputType))return"outputType: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=l.google.protobuf.MethodOptions.verify(e.options);if(t)return"options."+t}return null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&"boolean"!=typeof e.clientStreaming?"clientStreaming: boolean expected":null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&"boolean"!=typeof e.serverStreaming?"serverStreaming: boolean expected":null},x.fromObject=function(e){if(e instanceof l.google.protobuf.MethodDescriptorProto)return e;var t=new l.google.protobuf.MethodDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.inputType&&(t.inputType=String(e.inputType)),null!=e.outputType&&(t.outputType=String(e.outputType)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected");t.options=l.google.protobuf.MethodOptions.fromObject(e.options)}return null!=e.clientStreaming&&(t.clientStreaming=Boolean(e.clientStreaming)),null!=e.serverStreaming&&(t.serverStreaming=Boolean(e.serverStreaming)),t},x.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.inputType="",n.outputType="",n.options=null,n.clientStreaming=!1,n.serverStreaming=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.inputType&&e.hasOwnProperty("inputType")&&(n.inputType=e.inputType),null!=e.outputType&&e.hasOwnProperty("outputType")&&(n.outputType=e.outputType),null!=e.options&&e.hasOwnProperty("options")&&(n.options=l.google.protobuf.MethodOptions.toObject(e.options,t)),null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&(n.clientStreaming=e.clientStreaming),null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&(n.serverStreaming=e.serverStreaming),n},x.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},x),n.FileOptions=(S.prototype.javaPackage="",S.prototype.javaOuterClassname="",S.prototype.javaMultipleFiles=!1,S.prototype.javaGenerateEqualsAndHash=!1,S.prototype.javaStringCheckUtf8=!1,S.prototype.optimizeFor=1,S.prototype.goPackage="",S.prototype.ccGenericServices=!1,S.prototype.javaGenericServices=!1,S.prototype.pyGenericServices=!1,S.prototype.phpGenericServices=!1,S.prototype.deprecated=!1,S.prototype.ccEnableArenas=!0,S.prototype.objcClassPrefix="",S.prototype.csharpNamespace="",S.prototype.swiftPrefix="",S.prototype.phpClassPrefix="",S.prototype.phpNamespace="",S.prototype.phpMetadataNamespace="",S.prototype.rubyPackage="",S.prototype.uninterpretedOption=p.emptyArray,S.prototype[".google.api.resourceDefinition"]=p.emptyArray,S.create=function(e){return new S(e)},S.encode=function(e,t){if(t=t||i.create(),null!=e.javaPackage&&Object.hasOwnProperty.call(e,"javaPackage")&&t.uint32(10).string(e.javaPackage),null!=e.javaOuterClassname&&Object.hasOwnProperty.call(e,"javaOuterClassname")&&t.uint32(66).string(e.javaOuterClassname),null!=e.optimizeFor&&Object.hasOwnProperty.call(e,"optimizeFor")&&t.uint32(72).int32(e.optimizeFor),null!=e.javaMultipleFiles&&Object.hasOwnProperty.call(e,"javaMultipleFiles")&&t.uint32(80).bool(e.javaMultipleFiles),null!=e.goPackage&&Object.hasOwnProperty.call(e,"goPackage")&&t.uint32(90).string(e.goPackage),null!=e.ccGenericServices&&Object.hasOwnProperty.call(e,"ccGenericServices")&&t.uint32(128).bool(e.ccGenericServices),null!=e.javaGenericServices&&Object.hasOwnProperty.call(e,"javaGenericServices")&&t.uint32(136).bool(e.javaGenericServices),null!=e.pyGenericServices&&Object.hasOwnProperty.call(e,"pyGenericServices")&&t.uint32(144).bool(e.pyGenericServices),null!=e.javaGenerateEqualsAndHash&&Object.hasOwnProperty.call(e,"javaGenerateEqualsAndHash")&&t.uint32(160).bool(e.javaGenerateEqualsAndHash),null!=e.deprecated&&Object.hasOwnProperty.call(e,"deprecated")&&t.uint32(184).bool(e.deprecated),null!=e.javaStringCheckUtf8&&Object.hasOwnProperty.call(e,"javaStringCheckUtf8")&&t.uint32(216).bool(e.javaStringCheckUtf8),null!=e.ccEnableArenas&&Object.hasOwnProperty.call(e,"ccEnableArenas")&&t.uint32(248).bool(e.ccEnableArenas),null!=e.objcClassPrefix&&Object.hasOwnProperty.call(e,"objcClassPrefix")&&t.uint32(290).string(e.objcClassPrefix),null!=e.csharpNamespace&&Object.hasOwnProperty.call(e,"csharpNamespace")&&t.uint32(298).string(e.csharpNamespace),null!=e.swiftPrefix&&Object.hasOwnProperty.call(e,"swiftPrefix")&&t.uint32(314).string(e.swiftPrefix),null!=e.phpClassPrefix&&Object.hasOwnProperty.call(e,"phpClassPrefix")&&t.uint32(322).string(e.phpClassPrefix),null!=e.phpNamespace&&Object.hasOwnProperty.call(e,"phpNamespace")&&t.uint32(330).string(e.phpNamespace),null!=e.phpGenericServices&&Object.hasOwnProperty.call(e,"phpGenericServices")&&t.uint32(336).bool(e.phpGenericServices),null!=e.phpMetadataNamespace&&Object.hasOwnProperty.call(e,"phpMetadataNamespace")&&t.uint32(354).string(e.phpMetadataNamespace),null!=e.rubyPackage&&Object.hasOwnProperty.call(e,"rubyPackage")&&t.uint32(362).string(e.rubyPackage),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3){case 1:o.javaPackage=e.string();break;case 8:o.javaOuterClassname=e.string();break;case 10:o.javaMultipleFiles=e.bool();break;case 20:o.javaGenerateEqualsAndHash=e.bool();break;case 27:o.javaStringCheckUtf8=e.bool();break;case 9:o.optimizeFor=e.int32();break;case 11:o.goPackage=e.string();break;case 16:o.ccGenericServices=e.bool();break;case 17:o.javaGenericServices=e.bool();break;case 18:o.pyGenericServices=e.bool();break;case 42:o.phpGenericServices=e.bool();break;case 23:o.deprecated=e.bool();break;case 31:o.ccEnableArenas=e.bool();break;case 36:o.objcClassPrefix=e.string();break;case 37:o.csharpNamespace=e.string();break;case 39:o.swiftPrefix=e.string();break;case 40:o.phpClassPrefix=e.string();break;case 41:o.phpNamespace=e.string();break;case 44:o.phpMetadataNamespace=e.string();break;case 45:o.rubyPackage=e.string();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1053:o[".google.api.resourceDefinition"]&&o[".google.api.resourceDefinition"].length||(o[".google.api.resourceDefinition"]=[]),o[".google.api.resourceDefinition"].push(l.google.api.ResourceDescriptor.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},S.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},S.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.javaPackage&&e.hasOwnProperty("javaPackage")&&!p.isString(e.javaPackage))return"javaPackage: string expected";if(null!=e.javaOuterClassname&&e.hasOwnProperty("javaOuterClassname")&&!p.isString(e.javaOuterClassname))return"javaOuterClassname: string expected";if(null!=e.javaMultipleFiles&&e.hasOwnProperty("javaMultipleFiles")&&"boolean"!=typeof e.javaMultipleFiles)return"javaMultipleFiles: boolean expected";if(null!=e.javaGenerateEqualsAndHash&&e.hasOwnProperty("javaGenerateEqualsAndHash")&&"boolean"!=typeof e.javaGenerateEqualsAndHash)return"javaGenerateEqualsAndHash: boolean expected";if(null!=e.javaStringCheckUtf8&&e.hasOwnProperty("javaStringCheckUtf8")&&"boolean"!=typeof e.javaStringCheckUtf8)return"javaStringCheckUtf8: boolean expected";if(null!=e.optimizeFor&&e.hasOwnProperty("optimizeFor"))switch(e.optimizeFor){default:return"optimizeFor: enum value expected";case 1:case 2:case 3:}if(null!=e.goPackage&&e.hasOwnProperty("goPackage")&&!p.isString(e.goPackage))return"goPackage: string expected";if(null!=e.ccGenericServices&&e.hasOwnProperty("ccGenericServices")&&"boolean"!=typeof e.ccGenericServices)return"ccGenericServices: boolean expected";if(null!=e.javaGenericServices&&e.hasOwnProperty("javaGenericServices")&&"boolean"!=typeof e.javaGenericServices)return"javaGenericServices: boolean expected";if(null!=e.pyGenericServices&&e.hasOwnProperty("pyGenericServices")&&"boolean"!=typeof e.pyGenericServices)return"pyGenericServices: boolean expected";if(null!=e.phpGenericServices&&e.hasOwnProperty("phpGenericServices")&&"boolean"!=typeof e.phpGenericServices)return"phpGenericServices: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.ccEnableArenas&&e.hasOwnProperty("ccEnableArenas")&&"boolean"!=typeof e.ccEnableArenas)return"ccEnableArenas: boolean expected";if(null!=e.objcClassPrefix&&e.hasOwnProperty("objcClassPrefix")&&!p.isString(e.objcClassPrefix))return"objcClassPrefix: string expected";if(null!=e.csharpNamespace&&e.hasOwnProperty("csharpNamespace")&&!p.isString(e.csharpNamespace))return"csharpNamespace: string expected";if(null!=e.swiftPrefix&&e.hasOwnProperty("swiftPrefix")&&!p.isString(e.swiftPrefix))return"swiftPrefix: string expected";if(null!=e.phpClassPrefix&&e.hasOwnProperty("phpClassPrefix")&&!p.isString(e.phpClassPrefix))return"phpClassPrefix: string expected";if(null!=e.phpNamespace&&e.hasOwnProperty("phpNamespace")&&!p.isString(e.phpNamespace))return"phpNamespace: string expected";if(null!=e.phpMetadataNamespace&&e.hasOwnProperty("phpMetadataNamespace")&&!p.isString(e.phpMetadataNamespace))return"phpMetadataNamespace: string expected";if(null!=e.rubyPackage&&e.hasOwnProperty("rubyPackage")&&!p.isString(e.rubyPackage))return"rubyPackage: string expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.messageSetWireFormat=e.bool();break;case 2:o.noStandardDescriptorAccessor=e.bool();break;case 3:o.deprecated=e.bool();break;case 7:o.mapEntry=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1053:o[".google.api.resource"]=l.google.api.ResourceDescriptor.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},k.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},k.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.messageSetWireFormat&&e.hasOwnProperty("messageSetWireFormat")&&"boolean"!=typeof e.messageSetWireFormat)return"messageSetWireFormat: boolean expected";if(null!=e.noStandardDescriptorAccessor&&e.hasOwnProperty("noStandardDescriptorAccessor")&&"boolean"!=typeof e.noStandardDescriptorAccessor)return"noStandardDescriptorAccessor: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.mapEntry&&e.hasOwnProperty("mapEntry")&&"boolean"!=typeof e.mapEntry)return"mapEntry: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.ctype=e.int32();break;case 2:o.packed=e.bool();break;case 6:o.jstype=e.int32();break;case 5:o.lazy=e.bool();break;case 3:o.deprecated=e.bool();break;case 10:o.weak=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1052:if(o[".google.api.fieldBehavior"]&&o[".google.api.fieldBehavior"].length||(o[".google.api.fieldBehavior"]=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},Q.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},Q.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.allowAlias=e.bool();break;case 3:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},E.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},E.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.allowAlias&&e.hasOwnProperty("allowAlias")&&"boolean"!=typeof e.allowAlias)return"allowAlias: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},K.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},K.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1049:o[".google.api.defaultHost"]=e.string();break;case 1050:o[".google.api.oauthScopes"]=e.string();break;default:e.skipType(7&r)}}return o},A.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},A.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 34:o.idempotencyLevel=e.int32();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(l.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 72295728:o[".google.api.http"]=l.google.api.HttpRule.decode(e,e.uint32());break;case 1051:o[".google.api.methodSignature"]&&o[".google.api.methodSignature"].length||(o[".google.api.methodSignature"]=[]),o[".google.api.methodSignature"].push(e.string());break;default:e.skipType(7&r)}}return o},N.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},N.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.idempotencyLevel&&e.hasOwnProperty("idempotencyLevel"))switch(e.idempotencyLevel){default:return"idempotencyLevel: enum value expected";case 0:case 1:case 2:}if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.name&&o.name.length||(o.name=[]),o.name.push(l.google.protobuf.UninterpretedOption.NamePart.decode(e,e.uint32()));break;case 3:o.identifierValue=e.string();break;case 4:o.positiveIntValue=e.uint64();break;case 5:o.negativeIntValue=e.int64();break;case 6:o.doubleValue=e.double();break;case 7:o.stringValue=e.bytes();break;case 8:o.aggregateValue=e.string();break;default:e.skipType(7&r)}}return o},R.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},R.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")){if(!Array.isArray(e.name))return"name: array expected";for(var t=0;t>>0,e.positiveIntValue.high>>>0).toNumber(!0))),null!=e.negativeIntValue&&(p.Long?(t.negativeIntValue=p.Long.fromValue(e.negativeIntValue)).unsigned=!1:"string"==typeof e.negativeIntValue?t.negativeIntValue=parseInt(e.negativeIntValue,10):"number"==typeof e.negativeIntValue?t.negativeIntValue=e.negativeIntValue:"object"==typeof e.negativeIntValue&&(t.negativeIntValue=new p.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber())),null!=e.doubleValue&&(t.doubleValue=Number(e.doubleValue)),null!=e.stringValue&&("string"==typeof e.stringValue?p.base64.decode(e.stringValue,t.stringValue=p.newBuffer(p.base64.length(e.stringValue)),0):e.stringValue.length&&(t.stringValue=e.stringValue)),null!=e.aggregateValue&&(t.aggregateValue=String(e.aggregateValue)),t},R.toObject=function(e,t){var n,o={};if(((t=t||{}).arrays||t.defaults)&&(o.name=[]),t.defaults&&(o.identifierValue="",p.Long?(n=new p.Long(0,0,!0),o.positiveIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.positiveIntValue=t.longs===String?"0":0,p.Long?(n=new p.Long(0,0,!1),o.negativeIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.negativeIntValue=t.longs===String?"0":0,o.doubleValue=0,t.bytes===String?o.stringValue="":(o.stringValue=[],t.bytes!==Array&&(o.stringValue=p.newBuffer(o.stringValue))),o.aggregateValue=""),e.name&&e.name.length){o.name=[];for(var r=0;r>>0,e.positiveIntValue.high>>>0).toNumber(!0):e.positiveIntValue),null!=e.negativeIntValue&&e.hasOwnProperty("negativeIntValue")&&("number"==typeof e.negativeIntValue?o.negativeIntValue=t.longs===String?String(e.negativeIntValue):e.negativeIntValue:o.negativeIntValue=t.longs===String?p.Long.prototype.toString.call(e.negativeIntValue):t.longs===Number?new p.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber():e.negativeIntValue),null!=e.doubleValue&&e.hasOwnProperty("doubleValue")&&(o.doubleValue=t.json&&!isFinite(e.doubleValue)?String(e.doubleValue):e.doubleValue),null!=e.stringValue&&e.hasOwnProperty("stringValue")&&(o.stringValue=t.bytes===String?p.base64.encode(e.stringValue,0,e.stringValue.length):t.bytes===Array?Array.prototype.slice.call(e.stringValue):e.stringValue),null!=e.aggregateValue&&e.hasOwnProperty("aggregateValue")&&(o.aggregateValue=e.aggregateValue),o},R.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},R.NamePart=(Z.prototype.namePart="",Z.prototype.isExtension=!1,Z.create=function(e){return new Z(e)},Z.encode=function(e,t){return(t=t||i.create()).uint32(10).string(e.namePart),t.uint32(16).bool(e.isExtension),t},Z.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},Z.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new l.google.protobuf.UninterpretedOption.NamePart;e.pos>>3){case 1:o.namePart=e.string();break;case 2:o.isExtension=e.bool();break;default:e.skipType(7&r)}}if(!o.hasOwnProperty("namePart"))throw p.ProtocolError("missing required 'namePart'",{instance:o});if(o.hasOwnProperty("isExtension"))return o;throw p.ProtocolError("missing required 'isExtension'",{instance:o})},Z.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},Z.verify=function(e){return"object"!=typeof e||null===e?"object expected":p.isString(e.namePart)?"boolean"!=typeof e.isExtension?"isExtension: boolean expected":null:"namePart: string expected"},Z.fromObject=function(e){var t;return e instanceof l.google.protobuf.UninterpretedOption.NamePart?e:(t=new l.google.protobuf.UninterpretedOption.NamePart,null!=e.namePart&&(t.namePart=String(e.namePart)),null!=e.isExtension&&(t.isExtension=Boolean(e.isExtension)),t)},Z.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.namePart="",n.isExtension=!1),null!=e.namePart&&e.hasOwnProperty("namePart")&&(n.namePart=e.namePart),null!=e.isExtension&&e.hasOwnProperty("isExtension")&&(n.isExtension=e.isExtension),n},Z.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},Z),R),n.SourceCodeInfo=($.prototype.location=p.emptyArray,$.create=function(e){return new $(e)},$.encode=function(e,t){if(t=t||i.create(),null!=e.location&&e.location.length)for(var n=0;n>>3==1?(o.location&&o.location.length||(o.location=[]),o.location.push(l.google.protobuf.SourceCodeInfo.Location.decode(e,e.uint32()))):e.skipType(7&r)}return o},$.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},$.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.location&&e.hasOwnProperty("location")){if(!Array.isArray(e.location))return"location: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3==1?(o.annotation&&o.annotation.length||(o.annotation=[]),o.annotation.push(l.google.protobuf.GeneratedCodeInfo.Annotation.decode(e,e.uint32()))):e.skipType(7&r)}return o},ee.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},ee.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.annotation&&e.hasOwnProperty("annotation")){if(!Array.isArray(e.annotation))return"annotation: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.expression=e.string();break;case 2:o.title=e.string();break;case 3:o.description=e.string();break;case 4:o.location=e.string();break;default:e.skipType(7&r)}}return o},V.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},V.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.expression&&e.hasOwnProperty("expression")&&!p.isString(e.expression)?"expression: string expected":null!=e.title&&e.hasOwnProperty("title")&&!p.isString(e.title)?"title: string expected":null!=e.description&&e.hasOwnProperty("description")&&!p.isString(e.description)?"description: string expected":null!=e.location&&e.hasOwnProperty("location")&&!p.isString(e.location)?"location: string expected":null},V.fromObject=function(e){var t;return e instanceof l.google.type.Expr?e:(t=new l.google.type.Expr,null!=e.expression&&(t.expression=String(e.expression)),null!=e.title&&(t.title=String(e.title)),null!=e.description&&(t.description=String(e.description)),null!=e.location&&(t.location=String(e.location)),t)},V.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.expression="",n.title="",n.description="",n.location=""),null!=e.expression&&e.hasOwnProperty("expression")&&(n.expression=e.expression),null!=e.title&&e.hasOwnProperty("title")&&(n.title=e.title),null!=e.description&&e.hasOwnProperty("description")&&(n.description=e.description),null!=e.location&&e.hasOwnProperty("location")&&(n.location=e.location),n},V.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},V),r),F),l}); \ No newline at end of file diff --git a/dist/protos/iam_service.json b/dist/protos/iam_service.json new file mode 100644 index 0000000..f70405c --- /dev/null +++ b/dist/protos/iam_service.json @@ -0,0 +1 @@ +{"nested":{"google":{"nested":{"iam":{"nested":{"v1":{"options":{"cc_enable_arenas":true,"csharp_namespace":"Google.Cloud.Iam.V1","go_package":"google.golang.org/genproto/googleapis/iam/v1;iam","java_multiple_files":true,"java_outer_classname":"PolicyProto","java_package":"com.google.iam.v1","php_namespace":"Google\\Cloud\\Iam\\V1"},"nested":{"IAMPolicy":{"options":{"(google.api.default_host)":"iam-meta-api.googleapis.com"},"methods":{"SetIamPolicy":{"requestType":"SetIamPolicyRequest","responseType":"Policy","options":{"(google.api.http).post":"/v1/{resource=**}:setIamPolicy","(google.api.http).body":"*"},"parsedOptions":[{"(google.api.http)":{"post":"/v1/{resource=**}:setIamPolicy","body":"*"}}]},"GetIamPolicy":{"requestType":"GetIamPolicyRequest","responseType":"Policy","options":{"(google.api.http).post":"/v1/{resource=**}:getIamPolicy","(google.api.http).body":"*"},"parsedOptions":[{"(google.api.http)":{"post":"/v1/{resource=**}:getIamPolicy","body":"*"}}]},"TestIamPermissions":{"requestType":"TestIamPermissionsRequest","responseType":"TestIamPermissionsResponse","options":{"(google.api.http).post":"/v1/{resource=**}:testIamPermissions","(google.api.http).body":"*"},"parsedOptions":[{"(google.api.http)":{"post":"/v1/{resource=**}:testIamPermissions","body":"*"}}]}}},"SetIamPolicyRequest":{"fields":{"resource":{"type":"string","id":1,"options":{"(google.api.field_behavior)":"REQUIRED","(google.api.resource_reference).type":"*"}},"policy":{"type":"Policy","id":2,"options":{"(google.api.field_behavior)":"REQUIRED"}}}},"GetIamPolicyRequest":{"fields":{"resource":{"type":"string","id":1,"options":{"(google.api.field_behavior)":"REQUIRED","(google.api.resource_reference).type":"*"}},"options":{"type":"GetPolicyOptions","id":2}}},"TestIamPermissionsRequest":{"fields":{"resource":{"type":"string","id":1,"options":{"(google.api.field_behavior)":"REQUIRED","(google.api.resource_reference).type":"*"}},"permissions":{"rule":"repeated","type":"string","id":2,"options":{"(google.api.field_behavior)":"REQUIRED"}}}},"TestIamPermissionsResponse":{"fields":{"permissions":{"rule":"repeated","type":"string","id":1}}},"GetPolicyOptions":{"fields":{"requestedPolicyVersion":{"type":"int32","id":1}}},"Policy":{"fields":{"version":{"type":"int32","id":1},"bindings":{"rule":"repeated","type":"Binding","id":4},"etag":{"type":"bytes","id":3}}},"Binding":{"fields":{"role":{"type":"string","id":1},"members":{"rule":"repeated","type":"string","id":2},"condition":{"type":"google.type.Expr","id":3}}},"PolicyDelta":{"fields":{"bindingDeltas":{"rule":"repeated","type":"BindingDelta","id":1},"auditConfigDeltas":{"rule":"repeated","type":"AuditConfigDelta","id":2}}},"BindingDelta":{"fields":{"action":{"type":"Action","id":1},"role":{"type":"string","id":2},"member":{"type":"string","id":3},"condition":{"type":"google.type.Expr","id":4}},"nested":{"Action":{"values":{"ACTION_UNSPECIFIED":0,"ADD":1,"REMOVE":2}}}},"AuditConfigDelta":{"fields":{"action":{"type":"Action","id":1},"service":{"type":"string","id":2},"exemptedMember":{"type":"string","id":3},"logType":{"type":"string","id":4}},"nested":{"Action":{"values":{"ACTION_UNSPECIFIED":0,"ADD":1,"REMOVE":2}}}},"logging":{"options":{"csharp_namespace":"Google.Cloud.Iam.V1.Logging","go_package":"google.golang.org/genproto/googleapis/iam/v1/logging;logging","java_multiple_files":true,"java_outer_classname":"AuditDataProto","java_package":"com.google.iam.v1.logging"},"nested":{"AuditData":{"fields":{"policyDelta":{"type":"google.iam.v1.PolicyDelta","id":2}}}}}}}}},"api":{"options":{"go_package":"google.golang.org/genproto/googleapis/api/annotations;annotations","java_multiple_files":true,"java_outer_classname":"ResourceProto","java_package":"com.google.api","objc_class_prefix":"GAPI","cc_enable_arenas":true},"nested":{"http":{"type":"HttpRule","id":72295728,"extend":"google.protobuf.MethodOptions"},"Http":{"fields":{"rules":{"rule":"repeated","type":"HttpRule","id":1},"fullyDecodeReservedExpansion":{"type":"bool","id":2}}},"HttpRule":{"oneofs":{"pattern":{"oneof":["get","put","post","delete","patch","custom"]}},"fields":{"selector":{"type":"string","id":1},"get":{"type":"string","id":2},"put":{"type":"string","id":3},"post":{"type":"string","id":4},"delete":{"type":"string","id":5},"patch":{"type":"string","id":6},"custom":{"type":"CustomHttpPattern","id":8},"body":{"type":"string","id":7},"responseBody":{"type":"string","id":12},"additionalBindings":{"rule":"repeated","type":"HttpRule","id":11}}},"CustomHttpPattern":{"fields":{"kind":{"type":"string","id":1},"path":{"type":"string","id":2}}},"methodSignature":{"rule":"repeated","type":"string","id":1051,"extend":"google.protobuf.MethodOptions"},"defaultHost":{"type":"string","id":1049,"extend":"google.protobuf.ServiceOptions"},"oauthScopes":{"type":"string","id":1050,"extend":"google.protobuf.ServiceOptions"},"fieldBehavior":{"rule":"repeated","type":"google.api.FieldBehavior","id":1052,"extend":"google.protobuf.FieldOptions"},"FieldBehavior":{"values":{"FIELD_BEHAVIOR_UNSPECIFIED":0,"OPTIONAL":1,"REQUIRED":2,"OUTPUT_ONLY":3,"INPUT_ONLY":4,"IMMUTABLE":5}},"resourceReference":{"type":"google.api.ResourceReference","id":1055,"extend":"google.protobuf.FieldOptions"},"resourceDefinition":{"rule":"repeated","type":"google.api.ResourceDescriptor","id":1053,"extend":"google.protobuf.FileOptions"},"resource":{"type":"google.api.ResourceDescriptor","id":1053,"extend":"google.protobuf.MessageOptions"},"ResourceDescriptor":{"fields":{"type":{"type":"string","id":1},"pattern":{"rule":"repeated","type":"string","id":2},"nameField":{"type":"string","id":3},"history":{"type":"History","id":4},"plural":{"type":"string","id":5},"singular":{"type":"string","id":6}},"nested":{"History":{"values":{"HISTORY_UNSPECIFIED":0,"ORIGINALLY_SINGLE_PATTERN":1,"FUTURE_MULTI_PATTERN":2}}}},"ResourceReference":{"fields":{"type":{"type":"string","id":1},"childType":{"type":"string","id":2}}}}},"protobuf":{"options":{"go_package":"github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor","java_package":"com.google.protobuf","java_outer_classname":"DescriptorProtos","csharp_namespace":"Google.Protobuf.Reflection","objc_class_prefix":"GPB","cc_enable_arenas":true,"optimize_for":"SPEED"},"nested":{"FileDescriptorSet":{"fields":{"file":{"rule":"repeated","type":"FileDescriptorProto","id":1}}},"FileDescriptorProto":{"fields":{"name":{"type":"string","id":1},"package":{"type":"string","id":2},"dependency":{"rule":"repeated","type":"string","id":3},"publicDependency":{"rule":"repeated","type":"int32","id":10,"options":{"packed":false}},"weakDependency":{"rule":"repeated","type":"int32","id":11,"options":{"packed":false}},"messageType":{"rule":"repeated","type":"DescriptorProto","id":4},"enumType":{"rule":"repeated","type":"EnumDescriptorProto","id":5},"service":{"rule":"repeated","type":"ServiceDescriptorProto","id":6},"extension":{"rule":"repeated","type":"FieldDescriptorProto","id":7},"options":{"type":"FileOptions","id":8},"sourceCodeInfo":{"type":"SourceCodeInfo","id":9},"syntax":{"type":"string","id":12}}},"DescriptorProto":{"fields":{"name":{"type":"string","id":1},"field":{"rule":"repeated","type":"FieldDescriptorProto","id":2},"extension":{"rule":"repeated","type":"FieldDescriptorProto","id":6},"nestedType":{"rule":"repeated","type":"DescriptorProto","id":3},"enumType":{"rule":"repeated","type":"EnumDescriptorProto","id":4},"extensionRange":{"rule":"repeated","type":"ExtensionRange","id":5},"oneofDecl":{"rule":"repeated","type":"OneofDescriptorProto","id":8},"options":{"type":"MessageOptions","id":7},"reservedRange":{"rule":"repeated","type":"ReservedRange","id":9},"reservedName":{"rule":"repeated","type":"string","id":10}},"nested":{"ExtensionRange":{"fields":{"start":{"type":"int32","id":1},"end":{"type":"int32","id":2},"options":{"type":"ExtensionRangeOptions","id":3}}},"ReservedRange":{"fields":{"start":{"type":"int32","id":1},"end":{"type":"int32","id":2}}}}},"ExtensionRangeOptions":{"fields":{"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]]},"FieldDescriptorProto":{"fields":{"name":{"type":"string","id":1},"number":{"type":"int32","id":3},"label":{"type":"Label","id":4},"type":{"type":"Type","id":5},"typeName":{"type":"string","id":6},"extendee":{"type":"string","id":2},"defaultValue":{"type":"string","id":7},"oneofIndex":{"type":"int32","id":9},"jsonName":{"type":"string","id":10},"options":{"type":"FieldOptions","id":8},"proto3Optional":{"type":"bool","id":17}},"nested":{"Type":{"values":{"TYPE_DOUBLE":1,"TYPE_FLOAT":2,"TYPE_INT64":3,"TYPE_UINT64":4,"TYPE_INT32":5,"TYPE_FIXED64":6,"TYPE_FIXED32":7,"TYPE_BOOL":8,"TYPE_STRING":9,"TYPE_GROUP":10,"TYPE_MESSAGE":11,"TYPE_BYTES":12,"TYPE_UINT32":13,"TYPE_ENUM":14,"TYPE_SFIXED32":15,"TYPE_SFIXED64":16,"TYPE_SINT32":17,"TYPE_SINT64":18}},"Label":{"values":{"LABEL_OPTIONAL":1,"LABEL_REQUIRED":2,"LABEL_REPEATED":3}}}},"OneofDescriptorProto":{"fields":{"name":{"type":"string","id":1},"options":{"type":"OneofOptions","id":2}}},"EnumDescriptorProto":{"fields":{"name":{"type":"string","id":1},"value":{"rule":"repeated","type":"EnumValueDescriptorProto","id":2},"options":{"type":"EnumOptions","id":3},"reservedRange":{"rule":"repeated","type":"EnumReservedRange","id":4},"reservedName":{"rule":"repeated","type":"string","id":5}},"nested":{"EnumReservedRange":{"fields":{"start":{"type":"int32","id":1},"end":{"type":"int32","id":2}}}}},"EnumValueDescriptorProto":{"fields":{"name":{"type":"string","id":1},"number":{"type":"int32","id":2},"options":{"type":"EnumValueOptions","id":3}}},"ServiceDescriptorProto":{"fields":{"name":{"type":"string","id":1},"method":{"rule":"repeated","type":"MethodDescriptorProto","id":2},"options":{"type":"ServiceOptions","id":3}}},"MethodDescriptorProto":{"fields":{"name":{"type":"string","id":1},"inputType":{"type":"string","id":2},"outputType":{"type":"string","id":3},"options":{"type":"MethodOptions","id":4},"clientStreaming":{"type":"bool","id":5,"options":{"default":false}},"serverStreaming":{"type":"bool","id":6,"options":{"default":false}}}},"FileOptions":{"fields":{"javaPackage":{"type":"string","id":1},"javaOuterClassname":{"type":"string","id":8},"javaMultipleFiles":{"type":"bool","id":10,"options":{"default":false}},"javaGenerateEqualsAndHash":{"type":"bool","id":20,"options":{"deprecated":true}},"javaStringCheckUtf8":{"type":"bool","id":27,"options":{"default":false}},"optimizeFor":{"type":"OptimizeMode","id":9,"options":{"default":"SPEED"}},"goPackage":{"type":"string","id":11},"ccGenericServices":{"type":"bool","id":16,"options":{"default":false}},"javaGenericServices":{"type":"bool","id":17,"options":{"default":false}},"pyGenericServices":{"type":"bool","id":18,"options":{"default":false}},"phpGenericServices":{"type":"bool","id":42,"options":{"default":false}},"deprecated":{"type":"bool","id":23,"options":{"default":false}},"ccEnableArenas":{"type":"bool","id":31,"options":{"default":true}},"objcClassPrefix":{"type":"string","id":36},"csharpNamespace":{"type":"string","id":37},"swiftPrefix":{"type":"string","id":39},"phpClassPrefix":{"type":"string","id":40},"phpNamespace":{"type":"string","id":41},"phpMetadataNamespace":{"type":"string","id":44},"rubyPackage":{"type":"string","id":45},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]],"reserved":[[38,38]],"nested":{"OptimizeMode":{"values":{"SPEED":1,"CODE_SIZE":2,"LITE_RUNTIME":3}}}},"MessageOptions":{"fields":{"messageSetWireFormat":{"type":"bool","id":1,"options":{"default":false}},"noStandardDescriptorAccessor":{"type":"bool","id":2,"options":{"default":false}},"deprecated":{"type":"bool","id":3,"options":{"default":false}},"mapEntry":{"type":"bool","id":7},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]],"reserved":[[8,8],[9,9]]},"FieldOptions":{"fields":{"ctype":{"type":"CType","id":1,"options":{"default":"STRING"}},"packed":{"type":"bool","id":2},"jstype":{"type":"JSType","id":6,"options":{"default":"JS_NORMAL"}},"lazy":{"type":"bool","id":5,"options":{"default":false}},"deprecated":{"type":"bool","id":3,"options":{"default":false}},"weak":{"type":"bool","id":10,"options":{"default":false}},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]],"reserved":[[4,4]],"nested":{"CType":{"values":{"STRING":0,"CORD":1,"STRING_PIECE":2}},"JSType":{"values":{"JS_NORMAL":0,"JS_STRING":1,"JS_NUMBER":2}}}},"OneofOptions":{"fields":{"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]]},"EnumOptions":{"fields":{"allowAlias":{"type":"bool","id":2},"deprecated":{"type":"bool","id":3,"options":{"default":false}},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]],"reserved":[[5,5]]},"EnumValueOptions":{"fields":{"deprecated":{"type":"bool","id":1,"options":{"default":false}},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]]},"ServiceOptions":{"fields":{"deprecated":{"type":"bool","id":33,"options":{"default":false}},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]]},"MethodOptions":{"fields":{"deprecated":{"type":"bool","id":33,"options":{"default":false}},"idempotencyLevel":{"type":"IdempotencyLevel","id":34,"options":{"default":"IDEMPOTENCY_UNKNOWN"}},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]],"nested":{"IdempotencyLevel":{"values":{"IDEMPOTENCY_UNKNOWN":0,"NO_SIDE_EFFECTS":1,"IDEMPOTENT":2}}}},"UninterpretedOption":{"fields":{"name":{"rule":"repeated","type":"NamePart","id":2},"identifierValue":{"type":"string","id":3},"positiveIntValue":{"type":"uint64","id":4},"negativeIntValue":{"type":"int64","id":5},"doubleValue":{"type":"double","id":6},"stringValue":{"type":"bytes","id":7},"aggregateValue":{"type":"string","id":8}},"nested":{"NamePart":{"fields":{"namePart":{"rule":"required","type":"string","id":1},"isExtension":{"rule":"required","type":"bool","id":2}}}}},"SourceCodeInfo":{"fields":{"location":{"rule":"repeated","type":"Location","id":1}},"nested":{"Location":{"fields":{"path":{"rule":"repeated","type":"int32","id":1},"span":{"rule":"repeated","type":"int32","id":2},"leadingComments":{"type":"string","id":3},"trailingComments":{"type":"string","id":4},"leadingDetachedComments":{"rule":"repeated","type":"string","id":6}}}}},"GeneratedCodeInfo":{"fields":{"annotation":{"rule":"repeated","type":"Annotation","id":1}},"nested":{"Annotation":{"fields":{"path":{"rule":"repeated","type":"int32","id":1},"sourceFile":{"type":"string","id":2},"begin":{"type":"int32","id":3},"end":{"type":"int32","id":4}}}}}}},"type":{"options":{"go_package":"google.golang.org/genproto/googleapis/type/expr;expr","java_multiple_files":true,"java_outer_classname":"ExprProto","java_package":"com.google.type","objc_class_prefix":"GTP"},"nested":{"Expr":{"fields":{"expression":{"type":"string","id":1},"title":{"type":"string","id":2},"description":{"type":"string","id":3},"location":{"type":"string","id":4}}}}}}}}} \ No newline at end of file diff --git a/dist/protos/locations.d.ts b/dist/protos/locations.d.ts new file mode 100644 index 0000000..501ddb2 --- /dev/null +++ b/dist/protos/locations.d.ts @@ -0,0 +1,4060 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as $protobuf from "protobufjs"; +import Long = require('long'); +/** Namespace google. */ +export namespace google { + + /** Namespace cloud. */ + namespace cloud { + + /** Namespace location. */ + namespace location { + + /** Represents a Locations */ + class Locations extends $protobuf.rpc.Service { + + /** + * Constructs a new Locations service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new Locations service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): Locations; + + /** + * Calls ListLocations. + * @param request ListLocationsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ListLocationsResponse + */ + public listLocations(request: google.cloud.location.IListLocationsRequest, callback: google.cloud.location.Locations.ListLocationsCallback): void; + + /** + * Calls ListLocations. + * @param request ListLocationsRequest message or plain object + * @returns Promise + */ + public listLocations(request: google.cloud.location.IListLocationsRequest): Promise; + + /** + * Calls GetLocation. + * @param request GetLocationRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Location + */ + public getLocation(request: google.cloud.location.IGetLocationRequest, callback: google.cloud.location.Locations.GetLocationCallback): void; + + /** + * Calls GetLocation. + * @param request GetLocationRequest message or plain object + * @returns Promise + */ + public getLocation(request: google.cloud.location.IGetLocationRequest): Promise; + } + + namespace Locations { + + /** + * Callback as used by {@link google.cloud.location.Locations#listLocations}. + * @param error Error, if any + * @param [response] ListLocationsResponse + */ + type ListLocationsCallback = (error: (Error|null), response?: google.cloud.location.ListLocationsResponse) => void; + + /** + * Callback as used by {@link google.cloud.location.Locations#getLocation}. + * @param error Error, if any + * @param [response] Location + */ + type GetLocationCallback = (error: (Error|null), response?: google.cloud.location.Location) => void; + } + + /** Properties of a ListLocationsRequest. */ + interface IListLocationsRequest { + + /** ListLocationsRequest name */ + name?: (string|null); + + /** ListLocationsRequest filter */ + filter?: (string|null); + + /** ListLocationsRequest pageSize */ + pageSize?: (number|null); + + /** ListLocationsRequest pageToken */ + pageToken?: (string|null); + } + + /** Represents a ListLocationsRequest. */ + class ListLocationsRequest implements IListLocationsRequest { + + /** + * Constructs a new ListLocationsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.location.IListLocationsRequest); + + /** ListLocationsRequest name. */ + public name: string; + + /** ListLocationsRequest filter. */ + public filter: string; + + /** ListLocationsRequest pageSize. */ + public pageSize: number; + + /** ListLocationsRequest pageToken. */ + public pageToken: string; + + /** + * Creates a new ListLocationsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns ListLocationsRequest instance + */ + public static create(properties?: google.cloud.location.IListLocationsRequest): google.cloud.location.ListLocationsRequest; + + /** + * Encodes the specified ListLocationsRequest message. Does not implicitly {@link google.cloud.location.ListLocationsRequest.verify|verify} messages. + * @param message ListLocationsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.location.IListLocationsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ListLocationsRequest message, length delimited. Does not implicitly {@link google.cloud.location.ListLocationsRequest.verify|verify} messages. + * @param message ListLocationsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.location.IListLocationsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ListLocationsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ListLocationsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.location.ListLocationsRequest; + + /** + * Decodes a ListLocationsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ListLocationsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.location.ListLocationsRequest; + + /** + * Verifies a ListLocationsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ListLocationsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ListLocationsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.location.ListLocationsRequest; + + /** + * Creates a plain object from a ListLocationsRequest message. Also converts values to other types if specified. + * @param message ListLocationsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.location.ListLocationsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ListLocationsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ListLocationsResponse. */ + interface IListLocationsResponse { + + /** ListLocationsResponse locations */ + locations?: (google.cloud.location.ILocation[]|null); + + /** ListLocationsResponse nextPageToken */ + nextPageToken?: (string|null); + } + + /** Represents a ListLocationsResponse. */ + class ListLocationsResponse implements IListLocationsResponse { + + /** + * Constructs a new ListLocationsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.location.IListLocationsResponse); + + /** ListLocationsResponse locations. */ + public locations: google.cloud.location.ILocation[]; + + /** ListLocationsResponse nextPageToken. */ + public nextPageToken: string; + + /** + * Creates a new ListLocationsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns ListLocationsResponse instance + */ + public static create(properties?: google.cloud.location.IListLocationsResponse): google.cloud.location.ListLocationsResponse; + + /** + * Encodes the specified ListLocationsResponse message. Does not implicitly {@link google.cloud.location.ListLocationsResponse.verify|verify} messages. + * @param message ListLocationsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.location.IListLocationsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ListLocationsResponse message, length delimited. Does not implicitly {@link google.cloud.location.ListLocationsResponse.verify|verify} messages. + * @param message ListLocationsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.location.IListLocationsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ListLocationsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ListLocationsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.location.ListLocationsResponse; + + /** + * Decodes a ListLocationsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ListLocationsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.location.ListLocationsResponse; + + /** + * Verifies a ListLocationsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ListLocationsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ListLocationsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.location.ListLocationsResponse; + + /** + * Creates a plain object from a ListLocationsResponse message. Also converts values to other types if specified. + * @param message ListLocationsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.location.ListLocationsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ListLocationsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a GetLocationRequest. */ + interface IGetLocationRequest { + + /** GetLocationRequest name */ + name?: (string|null); + } + + /** Represents a GetLocationRequest. */ + class GetLocationRequest implements IGetLocationRequest { + + /** + * Constructs a new GetLocationRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.location.IGetLocationRequest); + + /** GetLocationRequest name. */ + public name: string; + + /** + * Creates a new GetLocationRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns GetLocationRequest instance + */ + public static create(properties?: google.cloud.location.IGetLocationRequest): google.cloud.location.GetLocationRequest; + + /** + * Encodes the specified GetLocationRequest message. Does not implicitly {@link google.cloud.location.GetLocationRequest.verify|verify} messages. + * @param message GetLocationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.location.IGetLocationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GetLocationRequest message, length delimited. Does not implicitly {@link google.cloud.location.GetLocationRequest.verify|verify} messages. + * @param message GetLocationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.location.IGetLocationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GetLocationRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GetLocationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.location.GetLocationRequest; + + /** + * Decodes a GetLocationRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GetLocationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.location.GetLocationRequest; + + /** + * Verifies a GetLocationRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GetLocationRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GetLocationRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.location.GetLocationRequest; + + /** + * Creates a plain object from a GetLocationRequest message. Also converts values to other types if specified. + * @param message GetLocationRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.location.GetLocationRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GetLocationRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a Location. */ + interface ILocation { + + /** Location name */ + name?: (string|null); + + /** Location locationId */ + locationId?: (string|null); + + /** Location displayName */ + displayName?: (string|null); + + /** Location labels */ + labels?: ({ [k: string]: string }|null); + + /** Location metadata */ + metadata?: (google.protobuf.IAny|null); + } + + /** Represents a Location. */ + class Location implements ILocation { + + /** + * Constructs a new Location. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.location.ILocation); + + /** Location name. */ + public name: string; + + /** Location locationId. */ + public locationId: string; + + /** Location displayName. */ + public displayName: string; + + /** Location labels. */ + public labels: { [k: string]: string }; + + /** Location metadata. */ + public metadata?: (google.protobuf.IAny|null); + + /** + * Creates a new Location instance using the specified properties. + * @param [properties] Properties to set + * @returns Location instance + */ + public static create(properties?: google.cloud.location.ILocation): google.cloud.location.Location; + + /** + * Encodes the specified Location message. Does not implicitly {@link google.cloud.location.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.location.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Location message, length delimited. Does not implicitly {@link google.cloud.location.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.location.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Location message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.location.Location; + + /** + * Decodes a Location message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.location.Location; + + /** + * Verifies a Location message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Location message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Location + */ + public static fromObject(object: { [k: string]: any }): google.cloud.location.Location; + + /** + * Creates a plain object from a Location message. Also converts values to other types if specified. + * @param message Location + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.location.Location, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Location to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + } + + /** Namespace api. */ + namespace api { + + /** Properties of a Http. */ + interface IHttp { + + /** Http rules */ + rules?: (google.api.IHttpRule[]|null); + + /** Http fullyDecodeReservedExpansion */ + fullyDecodeReservedExpansion?: (boolean|null); + } + + /** Represents a Http. */ + class Http implements IHttp { + + /** + * Constructs a new Http. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttp); + + /** Http rules. */ + public rules: google.api.IHttpRule[]; + + /** Http fullyDecodeReservedExpansion. */ + public fullyDecodeReservedExpansion: boolean; + + /** + * Creates a new Http instance using the specified properties. + * @param [properties] Properties to set + * @returns Http instance + */ + public static create(properties?: google.api.IHttp): google.api.Http; + + /** + * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Http message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.Http; + + /** + * Decodes a Http message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.Http; + + /** + * Verifies a Http message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Http message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Http + */ + public static fromObject(object: { [k: string]: any }): google.api.Http; + + /** + * Creates a plain object from a Http message. Also converts values to other types if specified. + * @param message Http + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.Http, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Http to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a HttpRule. */ + interface IHttpRule { + + /** HttpRule selector */ + selector?: (string|null); + + /** HttpRule get */ + get?: (string|null); + + /** HttpRule put */ + put?: (string|null); + + /** HttpRule post */ + post?: (string|null); + + /** HttpRule delete */ + "delete"?: (string|null); + + /** HttpRule patch */ + patch?: (string|null); + + /** HttpRule custom */ + custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body */ + body?: (string|null); + + /** HttpRule responseBody */ + responseBody?: (string|null); + + /** HttpRule additionalBindings */ + additionalBindings?: (google.api.IHttpRule[]|null); + } + + /** Represents a HttpRule. */ + class HttpRule implements IHttpRule { + + /** + * Constructs a new HttpRule. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttpRule); + + /** HttpRule selector. */ + public selector: string; + + /** HttpRule get. */ + public get?: (string|null); + + /** HttpRule put. */ + public put?: (string|null); + + /** HttpRule post. */ + public post?: (string|null); + + /** HttpRule delete. */ + public delete?: (string|null); + + /** HttpRule patch. */ + public patch?: (string|null); + + /** HttpRule custom. */ + public custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body. */ + public body: string; + + /** HttpRule responseBody. */ + public responseBody: string; + + /** HttpRule additionalBindings. */ + public additionalBindings: google.api.IHttpRule[]; + + /** HttpRule pattern. */ + public pattern?: ("get"|"put"|"post"|"delete"|"patch"|"custom"); + + /** + * Creates a new HttpRule instance using the specified properties. + * @param [properties] Properties to set + * @returns HttpRule instance + */ + public static create(properties?: google.api.IHttpRule): google.api.HttpRule; + + /** + * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a HttpRule message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.HttpRule; + + /** + * Decodes a HttpRule message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.HttpRule; + + /** + * Verifies a HttpRule message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns HttpRule + */ + public static fromObject(object: { [k: string]: any }): google.api.HttpRule; + + /** + * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * @param message HttpRule + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.HttpRule, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this HttpRule to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a CustomHttpPattern. */ + interface ICustomHttpPattern { + + /** CustomHttpPattern kind */ + kind?: (string|null); + + /** CustomHttpPattern path */ + path?: (string|null); + } + + /** Represents a CustomHttpPattern. */ + class CustomHttpPattern implements ICustomHttpPattern { + + /** + * Constructs a new CustomHttpPattern. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ICustomHttpPattern); + + /** CustomHttpPattern kind. */ + public kind: string; + + /** CustomHttpPattern path. */ + public path: string; + + /** + * Creates a new CustomHttpPattern instance using the specified properties. + * @param [properties] Properties to set + * @returns CustomHttpPattern instance + */ + public static create(properties?: google.api.ICustomHttpPattern): google.api.CustomHttpPattern; + + /** + * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CustomHttpPattern; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CustomHttpPattern; + + /** + * Verifies a CustomHttpPattern message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CustomHttpPattern + */ + public static fromObject(object: { [k: string]: any }): google.api.CustomHttpPattern; + + /** + * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * @param message CustomHttpPattern + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.CustomHttpPattern, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CustomHttpPattern to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Namespace protobuf. */ + namespace protobuf { + + /** Properties of a FileDescriptorSet. */ + interface IFileDescriptorSet { + + /** FileDescriptorSet file */ + file?: (google.protobuf.IFileDescriptorProto[]|null); + } + + /** Represents a FileDescriptorSet. */ + class FileDescriptorSet implements IFileDescriptorSet { + + /** + * Constructs a new FileDescriptorSet. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorSet); + + /** FileDescriptorSet file. */ + public file: google.protobuf.IFileDescriptorProto[]; + + /** + * Creates a new FileDescriptorSet instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorSet instance + */ + public static create(properties?: google.protobuf.IFileDescriptorSet): google.protobuf.FileDescriptorSet; + + /** + * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorSet; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorSet; + + /** + * Verifies a FileDescriptorSet message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorSet + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorSet; + + /** + * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. + * @param message FileDescriptorSet + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorSet, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorSet to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FileDescriptorProto. */ + interface IFileDescriptorProto { + + /** FileDescriptorProto name */ + name?: (string|null); + + /** FileDescriptorProto package */ + "package"?: (string|null); + + /** FileDescriptorProto dependency */ + dependency?: (string[]|null); + + /** FileDescriptorProto publicDependency */ + publicDependency?: (number[]|null); + + /** FileDescriptorProto weakDependency */ + weakDependency?: (number[]|null); + + /** FileDescriptorProto messageType */ + messageType?: (google.protobuf.IDescriptorProto[]|null); + + /** FileDescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** FileDescriptorProto service */ + service?: (google.protobuf.IServiceDescriptorProto[]|null); + + /** FileDescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** FileDescriptorProto options */ + options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo */ + sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax */ + syntax?: (string|null); + } + + /** Represents a FileDescriptorProto. */ + class FileDescriptorProto implements IFileDescriptorProto { + + /** + * Constructs a new FileDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorProto); + + /** FileDescriptorProto name. */ + public name: string; + + /** FileDescriptorProto package. */ + public package: string; + + /** FileDescriptorProto dependency. */ + public dependency: string[]; + + /** FileDescriptorProto publicDependency. */ + public publicDependency: number[]; + + /** FileDescriptorProto weakDependency. */ + public weakDependency: number[]; + + /** FileDescriptorProto messageType. */ + public messageType: google.protobuf.IDescriptorProto[]; + + /** FileDescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** FileDescriptorProto service. */ + public service: google.protobuf.IServiceDescriptorProto[]; + + /** FileDescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** FileDescriptorProto options. */ + public options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo. */ + public sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax. */ + public syntax: string; + + /** + * Creates a new FileDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFileDescriptorProto): google.protobuf.FileDescriptorProto; + + /** + * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorProto; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorProto; + + /** + * Verifies a FileDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorProto; + + /** + * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. + * @param message FileDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a DescriptorProto. */ + interface IDescriptorProto { + + /** DescriptorProto name */ + name?: (string|null); + + /** DescriptorProto field */ + field?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto nestedType */ + nestedType?: (google.protobuf.IDescriptorProto[]|null); + + /** DescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** DescriptorProto extensionRange */ + extensionRange?: (google.protobuf.DescriptorProto.IExtensionRange[]|null); + + /** DescriptorProto oneofDecl */ + oneofDecl?: (google.protobuf.IOneofDescriptorProto[]|null); + + /** DescriptorProto options */ + options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange */ + reservedRange?: (google.protobuf.DescriptorProto.IReservedRange[]|null); + + /** DescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents a DescriptorProto. */ + class DescriptorProto implements IDescriptorProto { + + /** + * Constructs a new DescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IDescriptorProto); + + /** DescriptorProto name. */ + public name: string; + + /** DescriptorProto field. */ + public field: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto nestedType. */ + public nestedType: google.protobuf.IDescriptorProto[]; + + /** DescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** DescriptorProto extensionRange. */ + public extensionRange: google.protobuf.DescriptorProto.IExtensionRange[]; + + /** DescriptorProto oneofDecl. */ + public oneofDecl: google.protobuf.IOneofDescriptorProto[]; + + /** DescriptorProto options. */ + public options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange. */ + public reservedRange: google.protobuf.DescriptorProto.IReservedRange[]; + + /** DescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new DescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns DescriptorProto instance + */ + public static create(properties?: google.protobuf.IDescriptorProto): google.protobuf.DescriptorProto; + + /** + * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto; + + /** + * Verifies a DescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto; + + /** + * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. + * @param message DescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace DescriptorProto { + + /** Properties of an ExtensionRange. */ + interface IExtensionRange { + + /** ExtensionRange start */ + start?: (number|null); + + /** ExtensionRange end */ + end?: (number|null); + + /** ExtensionRange options */ + options?: (google.protobuf.IExtensionRangeOptions|null); + } + + /** Represents an ExtensionRange. */ + class ExtensionRange implements IExtensionRange { + + /** + * Constructs a new ExtensionRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IExtensionRange); + + /** ExtensionRange start. */ + public start: number; + + /** ExtensionRange end. */ + public end: number; + + /** ExtensionRange options. */ + public options?: (google.protobuf.IExtensionRangeOptions|null); + + /** + * Creates a new ExtensionRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IExtensionRange): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Verifies an ExtensionRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. + * @param message ExtensionRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ExtensionRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ReservedRange. */ + interface IReservedRange { + + /** ReservedRange start */ + start?: (number|null); + + /** ReservedRange end */ + end?: (number|null); + } + + /** Represents a ReservedRange. */ + class ReservedRange implements IReservedRange { + + /** + * Constructs a new ReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IReservedRange); + + /** ReservedRange start. */ + public start: number; + + /** ReservedRange end. */ + public end: number; + + /** + * Creates a new ReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ReservedRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IReservedRange): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Decodes a ReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Verifies a ReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. + * @param message ReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an ExtensionRangeOptions. */ + interface IExtensionRangeOptions { + + /** ExtensionRangeOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an ExtensionRangeOptions. */ + class ExtensionRangeOptions implements IExtensionRangeOptions { + + /** + * Constructs a new ExtensionRangeOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IExtensionRangeOptions); + + /** ExtensionRangeOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ExtensionRangeOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRangeOptions instance + */ + public static create(properties?: google.protobuf.IExtensionRangeOptions): google.protobuf.ExtensionRangeOptions; + + /** + * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ExtensionRangeOptions; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ExtensionRangeOptions; + + /** + * Verifies an ExtensionRangeOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRangeOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ExtensionRangeOptions; + + /** + * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. + * @param message ExtensionRangeOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ExtensionRangeOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRangeOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FieldDescriptorProto. */ + interface IFieldDescriptorProto { + + /** FieldDescriptorProto name */ + name?: (string|null); + + /** FieldDescriptorProto number */ + number?: (number|null); + + /** FieldDescriptorProto label */ + label?: (google.protobuf.FieldDescriptorProto.Label|null); + + /** FieldDescriptorProto type */ + type?: (google.protobuf.FieldDescriptorProto.Type|null); + + /** FieldDescriptorProto typeName */ + typeName?: (string|null); + + /** FieldDescriptorProto extendee */ + extendee?: (string|null); + + /** FieldDescriptorProto defaultValue */ + defaultValue?: (string|null); + + /** FieldDescriptorProto oneofIndex */ + oneofIndex?: (number|null); + + /** FieldDescriptorProto jsonName */ + jsonName?: (string|null); + + /** FieldDescriptorProto options */ + options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional */ + proto3Optional?: (boolean|null); + } + + /** Represents a FieldDescriptorProto. */ + class FieldDescriptorProto implements IFieldDescriptorProto { + + /** + * Constructs a new FieldDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldDescriptorProto); + + /** FieldDescriptorProto name. */ + public name: string; + + /** FieldDescriptorProto number. */ + public number: number; + + /** FieldDescriptorProto label. */ + public label: google.protobuf.FieldDescriptorProto.Label; + + /** FieldDescriptorProto type. */ + public type: google.protobuf.FieldDescriptorProto.Type; + + /** FieldDescriptorProto typeName. */ + public typeName: string; + + /** FieldDescriptorProto extendee. */ + public extendee: string; + + /** FieldDescriptorProto defaultValue. */ + public defaultValue: string; + + /** FieldDescriptorProto oneofIndex. */ + public oneofIndex: number; + + /** FieldDescriptorProto jsonName. */ + public jsonName: string; + + /** FieldDescriptorProto options. */ + public options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional. */ + public proto3Optional: boolean; + + /** + * Creates a new FieldDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFieldDescriptorProto): google.protobuf.FieldDescriptorProto; + + /** + * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldDescriptorProto; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldDescriptorProto; + + /** + * Verifies a FieldDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldDescriptorProto; + + /** + * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. + * @param message FieldDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FieldDescriptorProto { + + /** Type enum. */ + enum Type { + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + TYPE_GROUP = 10, + TYPE_MESSAGE = 11, + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + TYPE_SINT32 = 17, + TYPE_SINT64 = 18 + } + + /** Label enum. */ + enum Label { + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3 + } + } + + /** Properties of an OneofDescriptorProto. */ + interface IOneofDescriptorProto { + + /** OneofDescriptorProto name */ + name?: (string|null); + + /** OneofDescriptorProto options */ + options?: (google.protobuf.IOneofOptions|null); + } + + /** Represents an OneofDescriptorProto. */ + class OneofDescriptorProto implements IOneofDescriptorProto { + + /** + * Constructs a new OneofDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofDescriptorProto); + + /** OneofDescriptorProto name. */ + public name: string; + + /** OneofDescriptorProto options. */ + public options?: (google.protobuf.IOneofOptions|null); + + /** + * Creates a new OneofDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofDescriptorProto instance + */ + public static create(properties?: google.protobuf.IOneofDescriptorProto): google.protobuf.OneofDescriptorProto; + + /** + * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofDescriptorProto; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofDescriptorProto; + + /** + * Verifies an OneofDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofDescriptorProto; + + /** + * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. + * @param message OneofDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumDescriptorProto. */ + interface IEnumDescriptorProto { + + /** EnumDescriptorProto name */ + name?: (string|null); + + /** EnumDescriptorProto value */ + value?: (google.protobuf.IEnumValueDescriptorProto[]|null); + + /** EnumDescriptorProto options */ + options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange */ + reservedRange?: (google.protobuf.EnumDescriptorProto.IEnumReservedRange[]|null); + + /** EnumDescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents an EnumDescriptorProto. */ + class EnumDescriptorProto implements IEnumDescriptorProto { + + /** + * Constructs a new EnumDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumDescriptorProto); + + /** EnumDescriptorProto name. */ + public name: string; + + /** EnumDescriptorProto value. */ + public value: google.protobuf.IEnumValueDescriptorProto[]; + + /** EnumDescriptorProto options. */ + public options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange. */ + public reservedRange: google.protobuf.EnumDescriptorProto.IEnumReservedRange[]; + + /** EnumDescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new EnumDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumDescriptorProto): google.protobuf.EnumDescriptorProto; + + /** + * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto; + + /** + * Verifies an EnumDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto; + + /** + * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. + * @param message EnumDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace EnumDescriptorProto { + + /** Properties of an EnumReservedRange. */ + interface IEnumReservedRange { + + /** EnumReservedRange start */ + start?: (number|null); + + /** EnumReservedRange end */ + end?: (number|null); + } + + /** Represents an EnumReservedRange. */ + class EnumReservedRange implements IEnumReservedRange { + + /** + * Constructs a new EnumReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange); + + /** EnumReservedRange start. */ + public start: number; + + /** EnumReservedRange end. */ + public end: number; + + /** + * Creates a new EnumReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumReservedRange instance + */ + public static create(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Verifies an EnumReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. + * @param message EnumReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto.EnumReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an EnumValueDescriptorProto. */ + interface IEnumValueDescriptorProto { + + /** EnumValueDescriptorProto name */ + name?: (string|null); + + /** EnumValueDescriptorProto number */ + number?: (number|null); + + /** EnumValueDescriptorProto options */ + options?: (google.protobuf.IEnumValueOptions|null); + } + + /** Represents an EnumValueDescriptorProto. */ + class EnumValueDescriptorProto implements IEnumValueDescriptorProto { + + /** + * Constructs a new EnumValueDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueDescriptorProto); + + /** EnumValueDescriptorProto name. */ + public name: string; + + /** EnumValueDescriptorProto number. */ + public number: number; + + /** EnumValueDescriptorProto options. */ + public options?: (google.protobuf.IEnumValueOptions|null); + + /** + * Creates a new EnumValueDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumValueDescriptorProto): google.protobuf.EnumValueDescriptorProto; + + /** + * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueDescriptorProto; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueDescriptorProto; + + /** + * Verifies an EnumValueDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueDescriptorProto; + + /** + * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. + * @param message EnumValueDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ServiceDescriptorProto. */ + interface IServiceDescriptorProto { + + /** ServiceDescriptorProto name */ + name?: (string|null); + + /** ServiceDescriptorProto method */ + method?: (google.protobuf.IMethodDescriptorProto[]|null); + + /** ServiceDescriptorProto options */ + options?: (google.protobuf.IServiceOptions|null); + } + + /** Represents a ServiceDescriptorProto. */ + class ServiceDescriptorProto implements IServiceDescriptorProto { + + /** + * Constructs a new ServiceDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceDescriptorProto); + + /** ServiceDescriptorProto name. */ + public name: string; + + /** ServiceDescriptorProto method. */ + public method: google.protobuf.IMethodDescriptorProto[]; + + /** ServiceDescriptorProto options. */ + public options?: (google.protobuf.IServiceOptions|null); + + /** + * Creates a new ServiceDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceDescriptorProto instance + */ + public static create(properties?: google.protobuf.IServiceDescriptorProto): google.protobuf.ServiceDescriptorProto; + + /** + * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceDescriptorProto; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceDescriptorProto; + + /** + * Verifies a ServiceDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceDescriptorProto; + + /** + * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. + * @param message ServiceDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a MethodDescriptorProto. */ + interface IMethodDescriptorProto { + + /** MethodDescriptorProto name */ + name?: (string|null); + + /** MethodDescriptorProto inputType */ + inputType?: (string|null); + + /** MethodDescriptorProto outputType */ + outputType?: (string|null); + + /** MethodDescriptorProto options */ + options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming */ + clientStreaming?: (boolean|null); + + /** MethodDescriptorProto serverStreaming */ + serverStreaming?: (boolean|null); + } + + /** Represents a MethodDescriptorProto. */ + class MethodDescriptorProto implements IMethodDescriptorProto { + + /** + * Constructs a new MethodDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodDescriptorProto); + + /** MethodDescriptorProto name. */ + public name: string; + + /** MethodDescriptorProto inputType. */ + public inputType: string; + + /** MethodDescriptorProto outputType. */ + public outputType: string; + + /** MethodDescriptorProto options. */ + public options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming. */ + public clientStreaming: boolean; + + /** MethodDescriptorProto serverStreaming. */ + public serverStreaming: boolean; + + /** + * Creates a new MethodDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodDescriptorProto instance + */ + public static create(properties?: google.protobuf.IMethodDescriptorProto): google.protobuf.MethodDescriptorProto; + + /** + * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodDescriptorProto; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodDescriptorProto; + + /** + * Verifies a MethodDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodDescriptorProto; + + /** + * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. + * @param message MethodDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FileOptions. */ + interface IFileOptions { + + /** FileOptions javaPackage */ + javaPackage?: (string|null); + + /** FileOptions javaOuterClassname */ + javaOuterClassname?: (string|null); + + /** FileOptions javaMultipleFiles */ + javaMultipleFiles?: (boolean|null); + + /** FileOptions javaGenerateEqualsAndHash */ + javaGenerateEqualsAndHash?: (boolean|null); + + /** FileOptions javaStringCheckUtf8 */ + javaStringCheckUtf8?: (boolean|null); + + /** FileOptions optimizeFor */ + optimizeFor?: (google.protobuf.FileOptions.OptimizeMode|null); + + /** FileOptions goPackage */ + goPackage?: (string|null); + + /** FileOptions ccGenericServices */ + ccGenericServices?: (boolean|null); + + /** FileOptions javaGenericServices */ + javaGenericServices?: (boolean|null); + + /** FileOptions pyGenericServices */ + pyGenericServices?: (boolean|null); + + /** FileOptions phpGenericServices */ + phpGenericServices?: (boolean|null); + + /** FileOptions deprecated */ + deprecated?: (boolean|null); + + /** FileOptions ccEnableArenas */ + ccEnableArenas?: (boolean|null); + + /** FileOptions objcClassPrefix */ + objcClassPrefix?: (string|null); + + /** FileOptions csharpNamespace */ + csharpNamespace?: (string|null); + + /** FileOptions swiftPrefix */ + swiftPrefix?: (string|null); + + /** FileOptions phpClassPrefix */ + phpClassPrefix?: (string|null); + + /** FileOptions phpNamespace */ + phpNamespace?: (string|null); + + /** FileOptions phpMetadataNamespace */ + phpMetadataNamespace?: (string|null); + + /** FileOptions rubyPackage */ + rubyPackage?: (string|null); + + /** FileOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents a FileOptions. */ + class FileOptions implements IFileOptions { + + /** + * Constructs a new FileOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileOptions); + + /** FileOptions javaPackage. */ + public javaPackage: string; + + /** FileOptions javaOuterClassname. */ + public javaOuterClassname: string; + + /** FileOptions javaMultipleFiles. */ + public javaMultipleFiles: boolean; + + /** FileOptions javaGenerateEqualsAndHash. */ + public javaGenerateEqualsAndHash: boolean; + + /** FileOptions javaStringCheckUtf8. */ + public javaStringCheckUtf8: boolean; + + /** FileOptions optimizeFor. */ + public optimizeFor: google.protobuf.FileOptions.OptimizeMode; + + /** FileOptions goPackage. */ + public goPackage: string; + + /** FileOptions ccGenericServices. */ + public ccGenericServices: boolean; + + /** FileOptions javaGenericServices. */ + public javaGenericServices: boolean; + + /** FileOptions pyGenericServices. */ + public pyGenericServices: boolean; + + /** FileOptions phpGenericServices. */ + public phpGenericServices: boolean; + + /** FileOptions deprecated. */ + public deprecated: boolean; + + /** FileOptions ccEnableArenas. */ + public ccEnableArenas: boolean; + + /** FileOptions objcClassPrefix. */ + public objcClassPrefix: string; + + /** FileOptions csharpNamespace. */ + public csharpNamespace: string; + + /** FileOptions swiftPrefix. */ + public swiftPrefix: string; + + /** FileOptions phpClassPrefix. */ + public phpClassPrefix: string; + + /** FileOptions phpNamespace. */ + public phpNamespace: string; + + /** FileOptions phpMetadataNamespace. */ + public phpMetadataNamespace: string; + + /** FileOptions rubyPackage. */ + public rubyPackage: string; + + /** FileOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new FileOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns FileOptions instance + */ + public static create(properties?: google.protobuf.IFileOptions): google.protobuf.FileOptions; + + /** + * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileOptions; + + /** + * Decodes a FileOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileOptions; + + /** + * Verifies a FileOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileOptions; + + /** + * Creates a plain object from a FileOptions message. Also converts values to other types if specified. + * @param message FileOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FileOptions { + + /** OptimizeMode enum. */ + enum OptimizeMode { + SPEED = 1, + CODE_SIZE = 2, + LITE_RUNTIME = 3 + } + } + + /** Properties of a MessageOptions. */ + interface IMessageOptions { + + /** MessageOptions messageSetWireFormat */ + messageSetWireFormat?: (boolean|null); + + /** MessageOptions noStandardDescriptorAccessor */ + noStandardDescriptorAccessor?: (boolean|null); + + /** MessageOptions deprecated */ + deprecated?: (boolean|null); + + /** MessageOptions mapEntry */ + mapEntry?: (boolean|null); + + /** MessageOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents a MessageOptions. */ + class MessageOptions implements IMessageOptions { + + /** + * Constructs a new MessageOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMessageOptions); + + /** MessageOptions messageSetWireFormat. */ + public messageSetWireFormat: boolean; + + /** MessageOptions noStandardDescriptorAccessor. */ + public noStandardDescriptorAccessor: boolean; + + /** MessageOptions deprecated. */ + public deprecated: boolean; + + /** MessageOptions mapEntry. */ + public mapEntry: boolean; + + /** MessageOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new MessageOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns MessageOptions instance + */ + public static create(properties?: google.protobuf.IMessageOptions): google.protobuf.MessageOptions; + + /** + * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MessageOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MessageOptions; + + /** + * Decodes a MessageOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MessageOptions; + + /** + * Verifies a MessageOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MessageOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MessageOptions; + + /** + * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. + * @param message MessageOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MessageOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MessageOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FieldOptions. */ + interface IFieldOptions { + + /** FieldOptions ctype */ + ctype?: (google.protobuf.FieldOptions.CType|null); + + /** FieldOptions packed */ + packed?: (boolean|null); + + /** FieldOptions jstype */ + jstype?: (google.protobuf.FieldOptions.JSType|null); + + /** FieldOptions lazy */ + lazy?: (boolean|null); + + /** FieldOptions deprecated */ + deprecated?: (boolean|null); + + /** FieldOptions weak */ + weak?: (boolean|null); + + /** FieldOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents a FieldOptions. */ + class FieldOptions implements IFieldOptions { + + /** + * Constructs a new FieldOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldOptions); + + /** FieldOptions ctype. */ + public ctype: google.protobuf.FieldOptions.CType; + + /** FieldOptions packed. */ + public packed: boolean; + + /** FieldOptions jstype. */ + public jstype: google.protobuf.FieldOptions.JSType; + + /** FieldOptions lazy. */ + public lazy: boolean; + + /** FieldOptions deprecated. */ + public deprecated: boolean; + + /** FieldOptions weak. */ + public weak: boolean; + + /** FieldOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new FieldOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldOptions instance + */ + public static create(properties?: google.protobuf.IFieldOptions): google.protobuf.FieldOptions; + + /** + * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions; + + /** + * Decodes a FieldOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions; + + /** + * Verifies a FieldOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions; + + /** + * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. + * @param message FieldOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FieldOptions { + + /** CType enum. */ + enum CType { + STRING = 0, + CORD = 1, + STRING_PIECE = 2 + } + + /** JSType enum. */ + enum JSType { + JS_NORMAL = 0, + JS_STRING = 1, + JS_NUMBER = 2 + } + } + + /** Properties of an OneofOptions. */ + interface IOneofOptions { + + /** OneofOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an OneofOptions. */ + class OneofOptions implements IOneofOptions { + + /** + * Constructs a new OneofOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofOptions); + + /** OneofOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new OneofOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofOptions instance + */ + public static create(properties?: google.protobuf.IOneofOptions): google.protobuf.OneofOptions; + + /** + * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofOptions; + + /** + * Decodes an OneofOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofOptions; + + /** + * Verifies an OneofOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofOptions; + + /** + * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. + * @param message OneofOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumOptions. */ + interface IEnumOptions { + + /** EnumOptions allowAlias */ + allowAlias?: (boolean|null); + + /** EnumOptions deprecated */ + deprecated?: (boolean|null); + + /** EnumOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an EnumOptions. */ + class EnumOptions implements IEnumOptions { + + /** + * Constructs a new EnumOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumOptions); + + /** EnumOptions allowAlias. */ + public allowAlias: boolean; + + /** EnumOptions deprecated. */ + public deprecated: boolean; + + /** EnumOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new EnumOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumOptions instance + */ + public static create(properties?: google.protobuf.IEnumOptions): google.protobuf.EnumOptions; + + /** + * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumOptions; + + /** + * Decodes an EnumOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumOptions; + + /** + * Verifies an EnumOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumOptions; + + /** + * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. + * @param message EnumOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumValueOptions. */ + interface IEnumValueOptions { + + /** EnumValueOptions deprecated */ + deprecated?: (boolean|null); + + /** EnumValueOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an EnumValueOptions. */ + class EnumValueOptions implements IEnumValueOptions { + + /** + * Constructs a new EnumValueOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueOptions); + + /** EnumValueOptions deprecated. */ + public deprecated: boolean; + + /** EnumValueOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new EnumValueOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueOptions instance + */ + public static create(properties?: google.protobuf.IEnumValueOptions): google.protobuf.EnumValueOptions; + + /** + * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueOptions; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueOptions; + + /** + * Verifies an EnumValueOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueOptions; + + /** + * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. + * @param message EnumValueOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ServiceOptions. */ + interface IServiceOptions { + + /** ServiceOptions deprecated */ + deprecated?: (boolean|null); + + /** ServiceOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** ServiceOptions .google.api.defaultHost */ + ".google.api.defaultHost"?: (string|null); + + /** ServiceOptions .google.api.oauthScopes */ + ".google.api.oauthScopes"?: (string|null); + } + + /** Represents a ServiceOptions. */ + class ServiceOptions implements IServiceOptions { + + /** + * Constructs a new ServiceOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceOptions); + + /** ServiceOptions deprecated. */ + public deprecated: boolean; + + /** ServiceOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ServiceOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceOptions instance + */ + public static create(properties?: google.protobuf.IServiceOptions): google.protobuf.ServiceOptions; + + /** + * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceOptions; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceOptions; + + /** + * Verifies a ServiceOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceOptions; + + /** + * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. + * @param message ServiceOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a MethodOptions. */ + interface IMethodOptions { + + /** MethodOptions deprecated */ + deprecated?: (boolean|null); + + /** MethodOptions idempotencyLevel */ + idempotencyLevel?: (google.protobuf.MethodOptions.IdempotencyLevel|null); + + /** MethodOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** MethodOptions .google.api.http */ + ".google.api.http"?: (google.api.IHttpRule|null); + + /** MethodOptions .google.api.methodSignature */ + ".google.api.methodSignature"?: (string[]|null); + } + + /** Represents a MethodOptions. */ + class MethodOptions implements IMethodOptions { + + /** + * Constructs a new MethodOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodOptions); + + /** MethodOptions deprecated. */ + public deprecated: boolean; + + /** MethodOptions idempotencyLevel. */ + public idempotencyLevel: google.protobuf.MethodOptions.IdempotencyLevel; + + /** MethodOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new MethodOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodOptions instance + */ + public static create(properties?: google.protobuf.IMethodOptions): google.protobuf.MethodOptions; + + /** + * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodOptions; + + /** + * Decodes a MethodOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodOptions; + + /** + * Verifies a MethodOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodOptions; + + /** + * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. + * @param message MethodOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace MethodOptions { + + /** IdempotencyLevel enum. */ + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + NO_SIDE_EFFECTS = 1, + IDEMPOTENT = 2 + } + } + + /** Properties of an UninterpretedOption. */ + interface IUninterpretedOption { + + /** UninterpretedOption name */ + name?: (google.protobuf.UninterpretedOption.INamePart[]|null); + + /** UninterpretedOption identifierValue */ + identifierValue?: (string|null); + + /** UninterpretedOption positiveIntValue */ + positiveIntValue?: (number|Long|null); + + /** UninterpretedOption negativeIntValue */ + negativeIntValue?: (number|Long|null); + + /** UninterpretedOption doubleValue */ + doubleValue?: (number|null); + + /** UninterpretedOption stringValue */ + stringValue?: (Uint8Array|null); + + /** UninterpretedOption aggregateValue */ + aggregateValue?: (string|null); + } + + /** Represents an UninterpretedOption. */ + class UninterpretedOption implements IUninterpretedOption { + + /** + * Constructs a new UninterpretedOption. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IUninterpretedOption); + + /** UninterpretedOption name. */ + public name: google.protobuf.UninterpretedOption.INamePart[]; + + /** UninterpretedOption identifierValue. */ + public identifierValue: string; + + /** UninterpretedOption positiveIntValue. */ + public positiveIntValue: (number|Long); + + /** UninterpretedOption negativeIntValue. */ + public negativeIntValue: (number|Long); + + /** UninterpretedOption doubleValue. */ + public doubleValue: number; + + /** UninterpretedOption stringValue. */ + public stringValue: Uint8Array; + + /** UninterpretedOption aggregateValue. */ + public aggregateValue: string; + + /** + * Creates a new UninterpretedOption instance using the specified properties. + * @param [properties] Properties to set + * @returns UninterpretedOption instance + */ + public static create(properties?: google.protobuf.IUninterpretedOption): google.protobuf.UninterpretedOption; + + /** + * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption; + + /** + * Verifies an UninterpretedOption message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns UninterpretedOption + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption; + + /** + * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. + * @param message UninterpretedOption + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this UninterpretedOption to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace UninterpretedOption { + + /** Properties of a NamePart. */ + interface INamePart { + + /** NamePart namePart */ + namePart: string; + + /** NamePart isExtension */ + isExtension: boolean; + } + + /** Represents a NamePart. */ + class NamePart implements INamePart { + + /** + * Constructs a new NamePart. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.UninterpretedOption.INamePart); + + /** NamePart namePart. */ + public namePart: string; + + /** NamePart isExtension. */ + public isExtension: boolean; + + /** + * Creates a new NamePart instance using the specified properties. + * @param [properties] Properties to set + * @returns NamePart instance + */ + public static create(properties?: google.protobuf.UninterpretedOption.INamePart): google.protobuf.UninterpretedOption.NamePart; + + /** + * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a NamePart message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption.NamePart; + + /** + * Decodes a NamePart message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption.NamePart; + + /** + * Verifies a NamePart message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a NamePart message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns NamePart + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption.NamePart; + + /** + * Creates a plain object from a NamePart message. Also converts values to other types if specified. + * @param message NamePart + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption.NamePart, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this NamePart to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a SourceCodeInfo. */ + interface ISourceCodeInfo { + + /** SourceCodeInfo location */ + location?: (google.protobuf.SourceCodeInfo.ILocation[]|null); + } + + /** Represents a SourceCodeInfo. */ + class SourceCodeInfo implements ISourceCodeInfo { + + /** + * Constructs a new SourceCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ISourceCodeInfo); + + /** SourceCodeInfo location. */ + public location: google.protobuf.SourceCodeInfo.ILocation[]; + + /** + * Creates a new SourceCodeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns SourceCodeInfo instance + */ + public static create(properties?: google.protobuf.ISourceCodeInfo): google.protobuf.SourceCodeInfo; + + /** + * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo; + + /** + * Verifies a SourceCodeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SourceCodeInfo + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo; + + /** + * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. + * @param message SourceCodeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SourceCodeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace SourceCodeInfo { + + /** Properties of a Location. */ + interface ILocation { + + /** Location path */ + path?: (number[]|null); + + /** Location span */ + span?: (number[]|null); + + /** Location leadingComments */ + leadingComments?: (string|null); + + /** Location trailingComments */ + trailingComments?: (string|null); + + /** Location leadingDetachedComments */ + leadingDetachedComments?: (string[]|null); + } + + /** Represents a Location. */ + class Location implements ILocation { + + /** + * Constructs a new Location. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.SourceCodeInfo.ILocation); + + /** Location path. */ + public path: number[]; + + /** Location span. */ + public span: number[]; + + /** Location leadingComments. */ + public leadingComments: string; + + /** Location trailingComments. */ + public trailingComments: string; + + /** Location leadingDetachedComments. */ + public leadingDetachedComments: string[]; + + /** + * Creates a new Location instance using the specified properties. + * @param [properties] Properties to set + * @returns Location instance + */ + public static create(properties?: google.protobuf.SourceCodeInfo.ILocation): google.protobuf.SourceCodeInfo.Location; + + /** + * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Location message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo.Location; + + /** + * Decodes a Location message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo.Location; + + /** + * Verifies a Location message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Location message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Location + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo.Location; + + /** + * Creates a plain object from a Location message. Also converts values to other types if specified. + * @param message Location + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo.Location, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Location to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a GeneratedCodeInfo. */ + interface IGeneratedCodeInfo { + + /** GeneratedCodeInfo annotation */ + annotation?: (google.protobuf.GeneratedCodeInfo.IAnnotation[]|null); + } + + /** Represents a GeneratedCodeInfo. */ + class GeneratedCodeInfo implements IGeneratedCodeInfo { + + /** + * Constructs a new GeneratedCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IGeneratedCodeInfo); + + /** GeneratedCodeInfo annotation. */ + public annotation: google.protobuf.GeneratedCodeInfo.IAnnotation[]; + + /** + * Creates a new GeneratedCodeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns GeneratedCodeInfo instance + */ + public static create(properties?: google.protobuf.IGeneratedCodeInfo): google.protobuf.GeneratedCodeInfo; + + /** + * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo; + + /** + * Verifies a GeneratedCodeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GeneratedCodeInfo + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo; + + /** + * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. + * @param message GeneratedCodeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.GeneratedCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GeneratedCodeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace GeneratedCodeInfo { + + /** Properties of an Annotation. */ + interface IAnnotation { + + /** Annotation path */ + path?: (number[]|null); + + /** Annotation sourceFile */ + sourceFile?: (string|null); + + /** Annotation begin */ + begin?: (number|null); + + /** Annotation end */ + end?: (number|null); + } + + /** Represents an Annotation. */ + class Annotation implements IAnnotation { + + /** + * Constructs a new Annotation. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation); + + /** Annotation path. */ + public path: number[]; + + /** Annotation sourceFile. */ + public sourceFile: string; + + /** Annotation begin. */ + public begin: number; + + /** Annotation end. */ + public end: number; + + /** + * Creates a new Annotation instance using the specified properties. + * @param [properties] Properties to set + * @returns Annotation instance + */ + public static create(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Annotation message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Decodes an Annotation message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Verifies an Annotation message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Annotation message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Annotation + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Creates a plain object from an Annotation message. Also converts values to other types if specified. + * @param message Annotation + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.GeneratedCodeInfo.Annotation, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Annotation to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an Any. */ + interface IAny { + + /** Any type_url */ + type_url?: (string|null); + + /** Any value */ + value?: (Uint8Array|null); + } + + /** Represents an Any. */ + class Any implements IAny { + + /** + * Constructs a new Any. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IAny); + + /** Any type_url. */ + public type_url: string; + + /** Any value. */ + public value: Uint8Array; + + /** + * Creates a new Any instance using the specified properties. + * @param [properties] Properties to set + * @returns Any instance + */ + public static create(properties?: google.protobuf.IAny): google.protobuf.Any; + + /** + * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @param message Any message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @param message Any message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Any message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Any; + + /** + * Decodes an Any message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Any; + + /** + * Verifies an Any message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Any message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Any + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Any; + + /** + * Creates a plain object from an Any message. Also converts values to other types if specified. + * @param message Any + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Any, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Any to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } +} diff --git a/dist/protos/locations.js b/dist/protos/locations.js new file mode 100644 index 0000000..17e0dec --- /dev/null +++ b/dist/protos/locations.js @@ -0,0 +1 @@ +(e=>{"function"==typeof define&&define.amd?define(["protobufjs/minimal"],e):"function"==typeof require&&"object"==typeof module&&module&&module.exports&&(module.exports=e(require("protobufjs/minimal")))})(function(o){var e,t,n,F,s=o.Reader,r=o.Writer,u=o.util,c=o.roots.locations_protos||(o.roots.locations_protos={});function L(e,t,n){o.rpc.Service.call(this,e,t,n)}function i(e){if(e)for(var t=Object.keys(e),n=0;n>>3){case 1:o.name=e.string();break;case 2:o.filter=e.string();break;case 3:o.pageSize=e.int32();break;case 4:o.pageToken=e.string();break;default:e.skipType(7&r)}}return o},i.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},i.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name)?"name: string expected":null!=e.filter&&e.hasOwnProperty("filter")&&!u.isString(e.filter)?"filter: string expected":null!=e.pageSize&&e.hasOwnProperty("pageSize")&&!u.isInteger(e.pageSize)?"pageSize: integer expected":null!=e.pageToken&&e.hasOwnProperty("pageToken")&&!u.isString(e.pageToken)?"pageToken: string expected":null},i.fromObject=function(e){var t;return e instanceof c.google.cloud.location.ListLocationsRequest?e:(t=new c.google.cloud.location.ListLocationsRequest,null!=e.name&&(t.name=String(e.name)),null!=e.filter&&(t.filter=String(e.filter)),null!=e.pageSize&&(t.pageSize=0|e.pageSize),null!=e.pageToken&&(t.pageToken=String(e.pageToken)),t)},i.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.filter="",n.pageSize=0,n.pageToken=""),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.filter&&e.hasOwnProperty("filter")&&(n.filter=e.filter),null!=e.pageSize&&e.hasOwnProperty("pageSize")&&(n.pageSize=e.pageSize),null!=e.pageToken&&e.hasOwnProperty("pageToken")&&(n.pageToken=e.pageToken),n},i.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},i),e.ListLocationsResponse=(a.prototype.locations=u.emptyArray,a.prototype.nextPageToken="",a.create=function(e){return new a(e)},a.encode=function(e,t){if(t=t||r.create(),null!=e.locations&&e.locations.length)for(var n=0;n>>3){case 1:o.locations&&o.locations.length||(o.locations=[]),o.locations.push(c.google.cloud.location.Location.decode(e,e.uint32()));break;case 2:o.nextPageToken=e.string();break;default:e.skipType(7&r)}}return o},a.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},a.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.locations&&e.hasOwnProperty("locations")){if(!Array.isArray(e.locations))return"locations: array expected";for(var t=0;t>>3==1?o.name=e.string():e.skipType(7&r)}return o},G.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},G.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name)?"name: string expected":null},G.fromObject=function(e){var t;return e instanceof c.google.cloud.location.GetLocationRequest?e:(t=new c.google.cloud.location.GetLocationRequest,null!=e.name&&(t.name=String(e.name)),t)},G.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name=""),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),n},G.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},G),e.Location=(p.prototype.name="",p.prototype.locationId="",p.prototype.displayName="",p.prototype.labels=u.emptyObject,p.prototype.metadata=null,p.create=function(e){return new p(e)},p.encode=function(e,t){if(t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.labels&&Object.hasOwnProperty.call(e,"labels"))for(var n=Object.keys(e.labels),o=0;o>>3){case 1:o.name=e.string();break;case 4:o.locationId=e.string();break;case 5:o.displayName=e.string();break;case 2:o.labels===u.emptyObject&&(o.labels={});for(var i=e.uint32()+e.pos,a="",p="";e.pos>>3){case 1:a=e.string();break;case 2:p=e.string();break;default:e.skipType(7&l)}}o.labels[a]=p;break;case 3:o.metadata=c.google.protobuf.Any.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},p.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},p.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.locationId&&e.hasOwnProperty("locationId")&&!u.isString(e.locationId))return"locationId: string expected";if(null!=e.displayName&&e.hasOwnProperty("displayName")&&!u.isString(e.displayName))return"displayName: string expected";if(null!=e.labels&&e.hasOwnProperty("labels")){if(!u.isObject(e.labels))return"labels: object expected";for(var t=Object.keys(e.labels),n=0;n>>3){case 1:o.rules&&o.rules.length||(o.rules=[]),o.rules.push(c.google.api.HttpRule.decode(e,e.uint32()));break;case 2:o.fullyDecodeReservedExpansion=e.bool();break;default:e.skipType(7&r)}}return o},l.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},l.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.rules&&e.hasOwnProperty("rules")){if(!Array.isArray(e.rules))return"rules: array expected";for(var t=0;t>>3){case 1:o.selector=e.string();break;case 2:o.get=e.string();break;case 3:o.put=e.string();break;case 4:o.post=e.string();break;case 5:o.delete=e.string();break;case 6:o.patch=e.string();break;case 8:o.custom=c.google.api.CustomHttpPattern.decode(e,e.uint32());break;case 7:o.body=e.string();break;case 12:o.responseBody=e.string();break;case 11:o.additionalBindings&&o.additionalBindings.length||(o.additionalBindings=[]),o.additionalBindings.push(c.google.api.HttpRule.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},d.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},d.verify=function(e){if("object"!=typeof e||null===e)return"object expected";var t={};if(null!=e.selector&&e.hasOwnProperty("selector")&&!u.isString(e.selector))return"selector: string expected";if(null!=e.get&&e.hasOwnProperty("get")&&(t.pattern=1,!u.isString(e.get)))return"get: string expected";if(null!=e.put&&e.hasOwnProperty("put")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!u.isString(e.put))return"put: string expected"}if(null!=e.post&&e.hasOwnProperty("post")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!u.isString(e.post))return"post: string expected"}if(null!=e.delete&&e.hasOwnProperty("delete")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!u.isString(e.delete))return"delete: string expected"}if(null!=e.patch&&e.hasOwnProperty("patch")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!u.isString(e.patch))return"patch: string expected"}if(null!=e.custom&&e.hasOwnProperty("custom")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,n=c.google.api.CustomHttpPattern.verify(e.custom))return"custom."+n}if(null!=e.body&&e.hasOwnProperty("body")&&!u.isString(e.body))return"body: string expected";if(null!=e.responseBody&&e.hasOwnProperty("responseBody")&&!u.isString(e.responseBody))return"responseBody: string expected";if(null!=e.additionalBindings&&e.hasOwnProperty("additionalBindings")){if(!Array.isArray(e.additionalBindings))return"additionalBindings: array expected";for(var n,o=0;o>>3){case 1:o.kind=e.string();break;case 2:o.path=e.string();break;default:e.skipType(7&r)}}return o},g.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},g.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.kind&&e.hasOwnProperty("kind")&&!u.isString(e.kind)?"kind: string expected":null!=e.path&&e.hasOwnProperty("path")&&!u.isString(e.path)?"path: string expected":null},g.fromObject=function(e){var t;return e instanceof c.google.api.CustomHttpPattern?e:(t=new c.google.api.CustomHttpPattern,null!=e.kind&&(t.kind=String(e.kind)),null!=e.path&&(t.path=String(e.path)),t)},g.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.kind="",n.path=""),null!=e.kind&&e.hasOwnProperty("kind")&&(n.kind=e.kind),null!=e.path&&e.hasOwnProperty("path")&&(n.path=e.path),n},g.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},g),e),F.protobuf=((n={}).FileDescriptorSet=(B.prototype.file=u.emptyArray,B.create=function(e){return new B(e)},B.encode=function(e,t){if(t=t||r.create(),null!=e.file&&e.file.length)for(var n=0;n>>3==1?(o.file&&o.file.length||(o.file=[]),o.file.push(c.google.protobuf.FileDescriptorProto.decode(e,e.uint32()))):e.skipType(7&r)}return o},B.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},B.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.file&&e.hasOwnProperty("file")){if(!Array.isArray(e.file))return"file: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.package=e.string();break;case 3:o.dependency&&o.dependency.length||(o.dependency=[]),o.dependency.push(e.string());break;case 10:if(o.publicDependency&&o.publicDependency.length||(o.publicDependency=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.name=e.string();break;case 2:o.field&&o.field.length||(o.field=[]),o.field.push(c.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 6:o.extension&&o.extension.length||(o.extension=[]),o.extension.push(c.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 3:o.nestedType&&o.nestedType.length||(o.nestedType=[]),o.nestedType.push(c.google.protobuf.DescriptorProto.decode(e,e.uint32()));break;case 4:o.enumType&&o.enumType.length||(o.enumType=[]),o.enumType.push(c.google.protobuf.EnumDescriptorProto.decode(e,e.uint32()));break;case 5:o.extensionRange&&o.extensionRange.length||(o.extensionRange=[]),o.extensionRange.push(c.google.protobuf.DescriptorProto.ExtensionRange.decode(e,e.uint32()));break;case 8:o.oneofDecl&&o.oneofDecl.length||(o.oneofDecl=[]),o.oneofDecl.push(c.google.protobuf.OneofDescriptorProto.decode(e,e.uint32()));break;case 7:o.options=c.google.protobuf.MessageOptions.decode(e,e.uint32());break;case 9:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(c.google.protobuf.DescriptorProto.ReservedRange.decode(e,e.uint32()));break;case 10:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},y.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},y.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.field&&e.hasOwnProperty("field")){if(!Array.isArray(e.field))return"field: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;case 3:o.options=c.google.protobuf.ExtensionRangeOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},h.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},h.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.start&&e.hasOwnProperty("start")&&!u.isInteger(e.start))return"start: integer expected";if(null!=e.end&&e.hasOwnProperty("end")&&!u.isInteger(e.end))return"end: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=c.google.protobuf.ExtensionRangeOptions.verify(e.options);if(e)return"options."+e}return null},h.fromObject=function(e){if(e instanceof c.google.protobuf.DescriptorProto.ExtensionRange)return e;var t=new c.google.protobuf.DescriptorProto.ExtensionRange;if(null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected");t.options=c.google.protobuf.ExtensionRangeOptions.fromObject(e.options)}return t},h.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0,n.options=null),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),null!=e.options&&e.hasOwnProperty("options")&&(n.options=c.google.protobuf.ExtensionRangeOptions.toObject(e.options,t)),n},h.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},h),y.ReservedRange=(b.prototype.start=0,b.prototype.end=0,b.create=function(e){return new b(e)},b.encode=function(e,t){return t=t||r.create(),null!=e.start&&Object.hasOwnProperty.call(e,"start")&&t.uint32(8).int32(e.start),null!=e.end&&Object.hasOwnProperty.call(e,"end")&&t.uint32(16).int32(e.end),t},b.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},b.decode=function(e,t){e instanceof s||(e=s.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new c.google.protobuf.DescriptorProto.ReservedRange;e.pos>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},b.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},b.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!u.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!u.isInteger(e.end)?"end: integer expected":null},b.fromObject=function(e){var t;return e instanceof c.google.protobuf.DescriptorProto.ReservedRange?e:(t=new c.google.protobuf.DescriptorProto.ReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},b.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},b.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},b),y),n.ExtensionRangeOptions=(U.prototype.uninterpretedOption=u.emptyArray,U.create=function(e){return new U(e)},U.encode=function(e,t){if(t=t||r.create(),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},U.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},U.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 3:o.number=e.int32();break;case 4:o.label=e.int32();break;case 5:o.type=e.int32();break;case 6:o.typeName=e.string();break;case 2:o.extendee=e.string();break;case 7:o.defaultValue=e.string();break;case 9:o.oneofIndex=e.int32();break;case 10:o.jsonName=e.string();break;case 8:o.options=c.google.protobuf.FieldOptions.decode(e,e.uint32());break;case 17:o.proto3Optional=e.bool();break;default:e.skipType(7&r)}}return o},O.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},O.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!u.isInteger(e.number))return"number: integer expected";if(null!=e.label&&e.hasOwnProperty("label"))switch(e.label){default:return"label: enum value expected";case 1:case 2:case 3:}if(null!=e.type&&e.hasOwnProperty("type"))switch(e.type){default:return"type: enum value expected";case 1:case 2:case 3:case 4:case 5:case 6:case 7:case 8:case 9:case 10:case 11:case 12:case 13:case 14:case 15:case 16:case 17:case 18:}if(null!=e.typeName&&e.hasOwnProperty("typeName")&&!u.isString(e.typeName))return"typeName: string expected";if(null!=e.extendee&&e.hasOwnProperty("extendee")&&!u.isString(e.extendee))return"extendee: string expected";if(null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&!u.isString(e.defaultValue))return"defaultValue: string expected";if(null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&!u.isInteger(e.oneofIndex))return"oneofIndex: integer expected";if(null!=e.jsonName&&e.hasOwnProperty("jsonName")&&!u.isString(e.jsonName))return"jsonName: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=c.google.protobuf.FieldOptions.verify(e.options);if(t)return"options."+t}return null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&"boolean"!=typeof e.proto3Optional?"proto3Optional: boolean expected":null},O.fromObject=function(e){if(e instanceof c.google.protobuf.FieldDescriptorProto)return e;var t=new c.google.protobuf.FieldDescriptorProto;switch(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),e.label){case"LABEL_OPTIONAL":case 1:t.label=1;break;case"LABEL_REQUIRED":case 2:t.label=2;break;case"LABEL_REPEATED":case 3:t.label=3}switch(e.type){case"TYPE_DOUBLE":case 1:t.type=1;break;case"TYPE_FLOAT":case 2:t.type=2;break;case"TYPE_INT64":case 3:t.type=3;break;case"TYPE_UINT64":case 4:t.type=4;break;case"TYPE_INT32":case 5:t.type=5;break;case"TYPE_FIXED64":case 6:t.type=6;break;case"TYPE_FIXED32":case 7:t.type=7;break;case"TYPE_BOOL":case 8:t.type=8;break;case"TYPE_STRING":case 9:t.type=9;break;case"TYPE_GROUP":case 10:t.type=10;break;case"TYPE_MESSAGE":case 11:t.type=11;break;case"TYPE_BYTES":case 12:t.type=12;break;case"TYPE_UINT32":case 13:t.type=13;break;case"TYPE_ENUM":case 14:t.type=14;break;case"TYPE_SFIXED32":case 15:t.type=15;break;case"TYPE_SFIXED64":case 16:t.type=16;break;case"TYPE_SINT32":case 17:t.type=17;break;case"TYPE_SINT64":case 18:t.type=18}if(null!=e.typeName&&(t.typeName=String(e.typeName)),null!=e.extendee&&(t.extendee=String(e.extendee)),null!=e.defaultValue&&(t.defaultValue=String(e.defaultValue)),null!=e.oneofIndex&&(t.oneofIndex=0|e.oneofIndex),null!=e.jsonName&&(t.jsonName=String(e.jsonName)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected");t.options=c.google.protobuf.FieldOptions.fromObject(e.options)}return null!=e.proto3Optional&&(t.proto3Optional=Boolean(e.proto3Optional)),t},O.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.extendee="",n.number=0,n.label=t.enums===String?"LABEL_OPTIONAL":1,n.type=t.enums===String?"TYPE_DOUBLE":1,n.typeName="",n.defaultValue="",n.options=null,n.oneofIndex=0,n.jsonName="",n.proto3Optional=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.extendee&&e.hasOwnProperty("extendee")&&(n.extendee=e.extendee),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.label&&e.hasOwnProperty("label")&&(n.label=t.enums===String?c.google.protobuf.FieldDescriptorProto.Label[e.label]:e.label),null!=e.type&&e.hasOwnProperty("type")&&(n.type=t.enums===String?c.google.protobuf.FieldDescriptorProto.Type[e.type]:e.type),null!=e.typeName&&e.hasOwnProperty("typeName")&&(n.typeName=e.typeName),null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&(n.defaultValue=e.defaultValue),null!=e.options&&e.hasOwnProperty("options")&&(n.options=c.google.protobuf.FieldOptions.toObject(e.options,t)),null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&(n.oneofIndex=e.oneofIndex),null!=e.jsonName&&e.hasOwnProperty("jsonName")&&(n.jsonName=e.jsonName),null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&(n.proto3Optional=e.proto3Optional),n},O.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},O.Type=(e={},(t=Object.create(e))[e[1]="TYPE_DOUBLE"]=1,t[e[2]="TYPE_FLOAT"]=2,t[e[3]="TYPE_INT64"]=3,t[e[4]="TYPE_UINT64"]=4,t[e[5]="TYPE_INT32"]=5,t[e[6]="TYPE_FIXED64"]=6,t[e[7]="TYPE_FIXED32"]=7,t[e[8]="TYPE_BOOL"]=8,t[e[9]="TYPE_STRING"]=9,t[e[10]="TYPE_GROUP"]=10,t[e[11]="TYPE_MESSAGE"]=11,t[e[12]="TYPE_BYTES"]=12,t[e[13]="TYPE_UINT32"]=13,t[e[14]="TYPE_ENUM"]=14,t[e[15]="TYPE_SFIXED32"]=15,t[e[16]="TYPE_SFIXED64"]=16,t[e[17]="TYPE_SINT32"]=17,t[e[18]="TYPE_SINT64"]=18,t),O.Label=(e={},(t=Object.create(e))[e[1]="LABEL_OPTIONAL"]=1,t[e[2]="LABEL_REQUIRED"]=2,t[e[3]="LABEL_REPEATED"]=3,t),O),n.OneofDescriptorProto=(m.prototype.name="",m.prototype.options=null,m.create=function(e){return new m(e)},m.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&c.google.protobuf.OneofOptions.encode(e.options,t.uint32(18).fork()).ldelim(),t},m.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},m.decode=function(e,t){e instanceof s||(e=s.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new c.google.protobuf.OneofDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.options=c.google.protobuf.OneofOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},m.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},m.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.options&&e.hasOwnProperty("options")){e=c.google.protobuf.OneofOptions.verify(e.options);if(e)return"options."+e}return null},m.fromObject=function(e){if(e instanceof c.google.protobuf.OneofDescriptorProto)return e;var t=new c.google.protobuf.OneofDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected");t.options=c.google.protobuf.OneofOptions.fromObject(e.options)}return t},m.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.options&&e.hasOwnProperty("options")&&(n.options=c.google.protobuf.OneofOptions.toObject(e.options,t)),n},m.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},m),n.EnumDescriptorProto=(v.prototype.name="",v.prototype.value=u.emptyArray,v.prototype.options=null,v.prototype.reservedRange=u.emptyArray,v.prototype.reservedName=u.emptyArray,v.create=function(e){return new v(e)},v.encode=function(e,t){if(t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.value&&e.value.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.value&&o.value.length||(o.value=[]),o.value.push(c.google.protobuf.EnumValueDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=c.google.protobuf.EnumOptions.decode(e,e.uint32());break;case 4:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(c.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(e,e.uint32()));break;case 5:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},v.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},v.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.value&&e.hasOwnProperty("value")){if(!Array.isArray(e.value))return"value: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},P.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},P.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!u.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!u.isInteger(e.end)?"end: integer expected":null},P.fromObject=function(e){var t;return e instanceof c.google.protobuf.EnumDescriptorProto.EnumReservedRange?e:(t=new c.google.protobuf.EnumDescriptorProto.EnumReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},P.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},P.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},P),v),n.EnumValueDescriptorProto=(w.prototype.name="",w.prototype.number=0,w.prototype.options=null,w.create=function(e){return new w(e)},w.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.number&&Object.hasOwnProperty.call(e,"number")&&t.uint32(16).int32(e.number),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&c.google.protobuf.EnumValueOptions.encode(e.options,t.uint32(26).fork()).ldelim(),t},w.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},w.decode=function(e,t){e instanceof s||(e=s.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new c.google.protobuf.EnumValueDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.number=e.int32();break;case 3:o.options=c.google.protobuf.EnumValueOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},w.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},w.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!u.isInteger(e.number))return"number: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=c.google.protobuf.EnumValueOptions.verify(e.options);if(e)return"options."+e}return null},w.fromObject=function(e){if(e instanceof c.google.protobuf.EnumValueDescriptorProto)return e;var t=new c.google.protobuf.EnumValueDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected");t.options=c.google.protobuf.EnumValueOptions.fromObject(e.options)}return t},w.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.number=0,n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.options&&e.hasOwnProperty("options")&&(n.options=c.google.protobuf.EnumValueOptions.toObject(e.options,t)),n},w.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},w),n.ServiceDescriptorProto=(j.prototype.name="",j.prototype.method=u.emptyArray,j.prototype.options=null,j.create=function(e){return new j(e)},j.encode=function(e,t){if(t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.method&&e.method.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.method&&o.method.length||(o.method=[]),o.method.push(c.google.protobuf.MethodDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=c.google.protobuf.ServiceOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},j.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},j.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.method&&e.hasOwnProperty("method")){if(!Array.isArray(e.method))return"method: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.inputType=e.string();break;case 3:o.outputType=e.string();break;case 4:o.options=c.google.protobuf.MethodOptions.decode(e,e.uint32());break;case 5:o.clientStreaming=e.bool();break;case 6:o.serverStreaming=e.bool();break;default:e.skipType(7&r)}}return o},x.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},x.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!u.isString(e.name))return"name: string expected";if(null!=e.inputType&&e.hasOwnProperty("inputType")&&!u.isString(e.inputType))return"inputType: string expected";if(null!=e.outputType&&e.hasOwnProperty("outputType")&&!u.isString(e.outputType))return"outputType: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=c.google.protobuf.MethodOptions.verify(e.options);if(t)return"options."+t}return null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&"boolean"!=typeof e.clientStreaming?"clientStreaming: boolean expected":null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&"boolean"!=typeof e.serverStreaming?"serverStreaming: boolean expected":null},x.fromObject=function(e){if(e instanceof c.google.protobuf.MethodDescriptorProto)return e;var t=new c.google.protobuf.MethodDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.inputType&&(t.inputType=String(e.inputType)),null!=e.outputType&&(t.outputType=String(e.outputType)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected");t.options=c.google.protobuf.MethodOptions.fromObject(e.options)}return null!=e.clientStreaming&&(t.clientStreaming=Boolean(e.clientStreaming)),null!=e.serverStreaming&&(t.serverStreaming=Boolean(e.serverStreaming)),t},x.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.inputType="",n.outputType="",n.options=null,n.clientStreaming=!1,n.serverStreaming=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.inputType&&e.hasOwnProperty("inputType")&&(n.inputType=e.inputType),null!=e.outputType&&e.hasOwnProperty("outputType")&&(n.outputType=e.outputType),null!=e.options&&e.hasOwnProperty("options")&&(n.options=c.google.protobuf.MethodOptions.toObject(e.options,t)),null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&(n.clientStreaming=e.clientStreaming),null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&(n.serverStreaming=e.serverStreaming),n},x.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},x),n.FileOptions=(S.prototype.javaPackage="",S.prototype.javaOuterClassname="",S.prototype.javaMultipleFiles=!1,S.prototype.javaGenerateEqualsAndHash=!1,S.prototype.javaStringCheckUtf8=!1,S.prototype.optimizeFor=1,S.prototype.goPackage="",S.prototype.ccGenericServices=!1,S.prototype.javaGenericServices=!1,S.prototype.pyGenericServices=!1,S.prototype.phpGenericServices=!1,S.prototype.deprecated=!1,S.prototype.ccEnableArenas=!0,S.prototype.objcClassPrefix="",S.prototype.csharpNamespace="",S.prototype.swiftPrefix="",S.prototype.phpClassPrefix="",S.prototype.phpNamespace="",S.prototype.phpMetadataNamespace="",S.prototype.rubyPackage="",S.prototype.uninterpretedOption=u.emptyArray,S.create=function(e){return new S(e)},S.encode=function(e,t){if(t=t||r.create(),null!=e.javaPackage&&Object.hasOwnProperty.call(e,"javaPackage")&&t.uint32(10).string(e.javaPackage),null!=e.javaOuterClassname&&Object.hasOwnProperty.call(e,"javaOuterClassname")&&t.uint32(66).string(e.javaOuterClassname),null!=e.optimizeFor&&Object.hasOwnProperty.call(e,"optimizeFor")&&t.uint32(72).int32(e.optimizeFor),null!=e.javaMultipleFiles&&Object.hasOwnProperty.call(e,"javaMultipleFiles")&&t.uint32(80).bool(e.javaMultipleFiles),null!=e.goPackage&&Object.hasOwnProperty.call(e,"goPackage")&&t.uint32(90).string(e.goPackage),null!=e.ccGenericServices&&Object.hasOwnProperty.call(e,"ccGenericServices")&&t.uint32(128).bool(e.ccGenericServices),null!=e.javaGenericServices&&Object.hasOwnProperty.call(e,"javaGenericServices")&&t.uint32(136).bool(e.javaGenericServices),null!=e.pyGenericServices&&Object.hasOwnProperty.call(e,"pyGenericServices")&&t.uint32(144).bool(e.pyGenericServices),null!=e.javaGenerateEqualsAndHash&&Object.hasOwnProperty.call(e,"javaGenerateEqualsAndHash")&&t.uint32(160).bool(e.javaGenerateEqualsAndHash),null!=e.deprecated&&Object.hasOwnProperty.call(e,"deprecated")&&t.uint32(184).bool(e.deprecated),null!=e.javaStringCheckUtf8&&Object.hasOwnProperty.call(e,"javaStringCheckUtf8")&&t.uint32(216).bool(e.javaStringCheckUtf8),null!=e.ccEnableArenas&&Object.hasOwnProperty.call(e,"ccEnableArenas")&&t.uint32(248).bool(e.ccEnableArenas),null!=e.objcClassPrefix&&Object.hasOwnProperty.call(e,"objcClassPrefix")&&t.uint32(290).string(e.objcClassPrefix),null!=e.csharpNamespace&&Object.hasOwnProperty.call(e,"csharpNamespace")&&t.uint32(298).string(e.csharpNamespace),null!=e.swiftPrefix&&Object.hasOwnProperty.call(e,"swiftPrefix")&&t.uint32(314).string(e.swiftPrefix),null!=e.phpClassPrefix&&Object.hasOwnProperty.call(e,"phpClassPrefix")&&t.uint32(322).string(e.phpClassPrefix),null!=e.phpNamespace&&Object.hasOwnProperty.call(e,"phpNamespace")&&t.uint32(330).string(e.phpNamespace),null!=e.phpGenericServices&&Object.hasOwnProperty.call(e,"phpGenericServices")&&t.uint32(336).bool(e.phpGenericServices),null!=e.phpMetadataNamespace&&Object.hasOwnProperty.call(e,"phpMetadataNamespace")&&t.uint32(354).string(e.phpMetadataNamespace),null!=e.rubyPackage&&Object.hasOwnProperty.call(e,"rubyPackage")&&t.uint32(362).string(e.rubyPackage),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3){case 1:o.javaPackage=e.string();break;case 8:o.javaOuterClassname=e.string();break;case 10:o.javaMultipleFiles=e.bool();break;case 20:o.javaGenerateEqualsAndHash=e.bool();break;case 27:o.javaStringCheckUtf8=e.bool();break;case 9:o.optimizeFor=e.int32();break;case 11:o.goPackage=e.string();break;case 16:o.ccGenericServices=e.bool();break;case 17:o.javaGenericServices=e.bool();break;case 18:o.pyGenericServices=e.bool();break;case 42:o.phpGenericServices=e.bool();break;case 23:o.deprecated=e.bool();break;case 31:o.ccEnableArenas=e.bool();break;case 36:o.objcClassPrefix=e.string();break;case 37:o.csharpNamespace=e.string();break;case 39:o.swiftPrefix=e.string();break;case 40:o.phpClassPrefix=e.string();break;case 41:o.phpNamespace=e.string();break;case 44:o.phpMetadataNamespace=e.string();break;case 45:o.rubyPackage=e.string();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},S.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},S.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.javaPackage&&e.hasOwnProperty("javaPackage")&&!u.isString(e.javaPackage))return"javaPackage: string expected";if(null!=e.javaOuterClassname&&e.hasOwnProperty("javaOuterClassname")&&!u.isString(e.javaOuterClassname))return"javaOuterClassname: string expected";if(null!=e.javaMultipleFiles&&e.hasOwnProperty("javaMultipleFiles")&&"boolean"!=typeof e.javaMultipleFiles)return"javaMultipleFiles: boolean expected";if(null!=e.javaGenerateEqualsAndHash&&e.hasOwnProperty("javaGenerateEqualsAndHash")&&"boolean"!=typeof e.javaGenerateEqualsAndHash)return"javaGenerateEqualsAndHash: boolean expected";if(null!=e.javaStringCheckUtf8&&e.hasOwnProperty("javaStringCheckUtf8")&&"boolean"!=typeof e.javaStringCheckUtf8)return"javaStringCheckUtf8: boolean expected";if(null!=e.optimizeFor&&e.hasOwnProperty("optimizeFor"))switch(e.optimizeFor){default:return"optimizeFor: enum value expected";case 1:case 2:case 3:}if(null!=e.goPackage&&e.hasOwnProperty("goPackage")&&!u.isString(e.goPackage))return"goPackage: string expected";if(null!=e.ccGenericServices&&e.hasOwnProperty("ccGenericServices")&&"boolean"!=typeof e.ccGenericServices)return"ccGenericServices: boolean expected";if(null!=e.javaGenericServices&&e.hasOwnProperty("javaGenericServices")&&"boolean"!=typeof e.javaGenericServices)return"javaGenericServices: boolean expected";if(null!=e.pyGenericServices&&e.hasOwnProperty("pyGenericServices")&&"boolean"!=typeof e.pyGenericServices)return"pyGenericServices: boolean expected";if(null!=e.phpGenericServices&&e.hasOwnProperty("phpGenericServices")&&"boolean"!=typeof e.phpGenericServices)return"phpGenericServices: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.ccEnableArenas&&e.hasOwnProperty("ccEnableArenas")&&"boolean"!=typeof e.ccEnableArenas)return"ccEnableArenas: boolean expected";if(null!=e.objcClassPrefix&&e.hasOwnProperty("objcClassPrefix")&&!u.isString(e.objcClassPrefix))return"objcClassPrefix: string expected";if(null!=e.csharpNamespace&&e.hasOwnProperty("csharpNamespace")&&!u.isString(e.csharpNamespace))return"csharpNamespace: string expected";if(null!=e.swiftPrefix&&e.hasOwnProperty("swiftPrefix")&&!u.isString(e.swiftPrefix))return"swiftPrefix: string expected";if(null!=e.phpClassPrefix&&e.hasOwnProperty("phpClassPrefix")&&!u.isString(e.phpClassPrefix))return"phpClassPrefix: string expected";if(null!=e.phpNamespace&&e.hasOwnProperty("phpNamespace")&&!u.isString(e.phpNamespace))return"phpNamespace: string expected";if(null!=e.phpMetadataNamespace&&e.hasOwnProperty("phpMetadataNamespace")&&!u.isString(e.phpMetadataNamespace))return"phpMetadataNamespace: string expected";if(null!=e.rubyPackage&&e.hasOwnProperty("rubyPackage")&&!u.isString(e.rubyPackage))return"rubyPackage: string expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.messageSetWireFormat=e.bool();break;case 2:o.noStandardDescriptorAccessor=e.bool();break;case 3:o.deprecated=e.bool();break;case 7:o.mapEntry=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},k.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},k.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.messageSetWireFormat&&e.hasOwnProperty("messageSetWireFormat")&&"boolean"!=typeof e.messageSetWireFormat)return"messageSetWireFormat: boolean expected";if(null!=e.noStandardDescriptorAccessor&&e.hasOwnProperty("noStandardDescriptorAccessor")&&"boolean"!=typeof e.noStandardDescriptorAccessor)return"noStandardDescriptorAccessor: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.mapEntry&&e.hasOwnProperty("mapEntry")&&"boolean"!=typeof e.mapEntry)return"mapEntry: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.ctype=e.int32();break;case 2:o.packed=e.bool();break;case 6:o.jstype=e.int32();break;case 5:o.lazy=e.bool();break;case 3:o.deprecated=e.bool();break;case 10:o.weak=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},D.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},D.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.ctype&&e.hasOwnProperty("ctype"))switch(e.ctype){default:return"ctype: enum value expected";case 0:case 1:case 2:}if(null!=e.packed&&e.hasOwnProperty("packed")&&"boolean"!=typeof e.packed)return"packed: boolean expected";if(null!=e.jstype&&e.hasOwnProperty("jstype"))switch(e.jstype){default:return"jstype: enum value expected";case 0:case 1:case 2:}if(null!=e.lazy&&e.hasOwnProperty("lazy")&&"boolean"!=typeof e.lazy)return"lazy: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.weak&&e.hasOwnProperty("weak")&&"boolean"!=typeof e.weak)return"weak: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},M.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},M.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.allowAlias=e.bool();break;case 3:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},T.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},T.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.allowAlias&&e.hasOwnProperty("allowAlias")&&"boolean"!=typeof e.allowAlias)return"allowAlias: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},E.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},E.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1049:o[".google.api.defaultHost"]=e.string();break;case 1050:o[".google.api.oauthScopes"]=e.string();break;default:e.skipType(7&r)}}return o},A.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},A.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 34:o.idempotencyLevel=e.int32();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(c.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 72295728:o[".google.api.http"]=c.google.api.HttpRule.decode(e,e.uint32());break;case 1051:o[".google.api.methodSignature"]&&o[".google.api.methodSignature"].length||(o[".google.api.methodSignature"]=[]),o[".google.api.methodSignature"].push(e.string());break;default:e.skipType(7&r)}}return o},N.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},N.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.idempotencyLevel&&e.hasOwnProperty("idempotencyLevel"))switch(e.idempotencyLevel){default:return"idempotencyLevel: enum value expected";case 0:case 1:case 2:}if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.name&&o.name.length||(o.name=[]),o.name.push(c.google.protobuf.UninterpretedOption.NamePart.decode(e,e.uint32()));break;case 3:o.identifierValue=e.string();break;case 4:o.positiveIntValue=e.uint64();break;case 5:o.negativeIntValue=e.int64();break;case 6:o.doubleValue=e.double();break;case 7:o.stringValue=e.bytes();break;case 8:o.aggregateValue=e.string();break;default:e.skipType(7&r)}}return o},I.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},I.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")){if(!Array.isArray(e.name))return"name: array expected";for(var t=0;t>>0,e.positiveIntValue.high>>>0).toNumber(!0))),null!=e.negativeIntValue&&(u.Long?(t.negativeIntValue=u.Long.fromValue(e.negativeIntValue)).unsigned=!1:"string"==typeof e.negativeIntValue?t.negativeIntValue=parseInt(e.negativeIntValue,10):"number"==typeof e.negativeIntValue?t.negativeIntValue=e.negativeIntValue:"object"==typeof e.negativeIntValue&&(t.negativeIntValue=new u.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber())),null!=e.doubleValue&&(t.doubleValue=Number(e.doubleValue)),null!=e.stringValue&&("string"==typeof e.stringValue?u.base64.decode(e.stringValue,t.stringValue=u.newBuffer(u.base64.length(e.stringValue)),0):e.stringValue.length&&(t.stringValue=e.stringValue)),null!=e.aggregateValue&&(t.aggregateValue=String(e.aggregateValue)),t},I.toObject=function(e,t){var n,o={};if(((t=t||{}).arrays||t.defaults)&&(o.name=[]),t.defaults&&(o.identifierValue="",u.Long?(n=new u.Long(0,0,!0),o.positiveIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.positiveIntValue=t.longs===String?"0":0,u.Long?(n=new u.Long(0,0,!1),o.negativeIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.negativeIntValue=t.longs===String?"0":0,o.doubleValue=0,t.bytes===String?o.stringValue="":(o.stringValue=[],t.bytes!==Array&&(o.stringValue=u.newBuffer(o.stringValue))),o.aggregateValue=""),e.name&&e.name.length){o.name=[];for(var r=0;r>>0,e.positiveIntValue.high>>>0).toNumber(!0):e.positiveIntValue),null!=e.negativeIntValue&&e.hasOwnProperty("negativeIntValue")&&("number"==typeof e.negativeIntValue?o.negativeIntValue=t.longs===String?String(e.negativeIntValue):e.negativeIntValue:o.negativeIntValue=t.longs===String?u.Long.prototype.toString.call(e.negativeIntValue):t.longs===Number?new u.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber():e.negativeIntValue),null!=e.doubleValue&&e.hasOwnProperty("doubleValue")&&(o.doubleValue=t.json&&!isFinite(e.doubleValue)?String(e.doubleValue):e.doubleValue),null!=e.stringValue&&e.hasOwnProperty("stringValue")&&(o.stringValue=t.bytes===String?u.base64.encode(e.stringValue,0,e.stringValue.length):t.bytes===Array?Array.prototype.slice.call(e.stringValue):e.stringValue),null!=e.aggregateValue&&e.hasOwnProperty("aggregateValue")&&(o.aggregateValue=e.aggregateValue),o},I.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},I.NamePart=(R.prototype.namePart="",R.prototype.isExtension=!1,R.create=function(e){return new R(e)},R.encode=function(e,t){return(t=t||r.create()).uint32(10).string(e.namePart),t.uint32(16).bool(e.isExtension),t},R.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},R.decode=function(e,t){e instanceof s||(e=s.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new c.google.protobuf.UninterpretedOption.NamePart;e.pos>>3){case 1:o.namePart=e.string();break;case 2:o.isExtension=e.bool();break;default:e.skipType(7&r)}}if(!o.hasOwnProperty("namePart"))throw u.ProtocolError("missing required 'namePart'",{instance:o});if(o.hasOwnProperty("isExtension"))return o;throw u.ProtocolError("missing required 'isExtension'",{instance:o})},R.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},R.verify=function(e){return"object"!=typeof e||null===e?"object expected":u.isString(e.namePart)?"boolean"!=typeof e.isExtension?"isExtension: boolean expected":null:"namePart: string expected"},R.fromObject=function(e){var t;return e instanceof c.google.protobuf.UninterpretedOption.NamePart?e:(t=new c.google.protobuf.UninterpretedOption.NamePart,null!=e.namePart&&(t.namePart=String(e.namePart)),null!=e.isExtension&&(t.isExtension=Boolean(e.isExtension)),t)},R.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.namePart="",n.isExtension=!1),null!=e.namePart&&e.hasOwnProperty("namePart")&&(n.namePart=e.namePart),null!=e.isExtension&&e.hasOwnProperty("isExtension")&&(n.isExtension=e.isExtension),n},R.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},R),I),n.SourceCodeInfo=(_.prototype.location=u.emptyArray,_.create=function(e){return new _(e)},_.encode=function(e,t){if(t=t||r.create(),null!=e.location&&e.location.length)for(var n=0;n>>3==1?(o.location&&o.location.length||(o.location=[]),o.location.push(c.google.protobuf.SourceCodeInfo.Location.decode(e,e.uint32()))):e.skipType(7&r)}return o},_.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},_.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.location&&e.hasOwnProperty("location")){if(!Array.isArray(e.location))return"location: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3==1?(o.annotation&&o.annotation.length||(o.annotation=[]),o.annotation.push(c.google.protobuf.GeneratedCodeInfo.Annotation.decode(e,e.uint32()))):e.skipType(7&r)}return o},J.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},J.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.annotation&&e.hasOwnProperty("annotation")){if(!Array.isArray(e.annotation))return"annotation: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.type_url=e.string();break;case 2:o.value=e.bytes();break;default:e.skipType(7&r)}}return o},H.decodeDelimited=function(e){return e instanceof s||(e=new s(e)),this.decode(e,e.uint32())},H.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.type_url&&e.hasOwnProperty("type_url")&&!u.isString(e.type_url)?"type_url: string expected":null!=e.value&&e.hasOwnProperty("value")&&!(e.value&&"number"==typeof e.value.length||u.isString(e.value))?"value: buffer expected":null},H.fromObject=function(e){var t;return e instanceof c.google.protobuf.Any?e:(t=new c.google.protobuf.Any,null!=e.type_url&&(t.type_url=String(e.type_url)),null!=e.value&&("string"==typeof e.value?u.base64.decode(e.value,t.value=u.newBuffer(u.base64.length(e.value)),0):e.value.length&&(t.value=e.value)),t)},H.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.type_url="",t.bytes===String?n.value="":(n.value=[],t.bytes!==Array&&(n.value=u.newBuffer(n.value)))),null!=e.type_url&&e.hasOwnProperty("type_url")&&(n.type_url=e.type_url),null!=e.value&&e.hasOwnProperty("value")&&(n.value=t.bytes===String?u.base64.encode(e.value,0,e.value.length):t.bytes===Array?Array.prototype.slice.call(e.value):e.value),n},H.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},H),n),F),c}); \ No newline at end of file diff --git a/dist/protos/locations.json b/dist/protos/locations.json new file mode 100644 index 0000000..31fd40f --- /dev/null +++ b/dist/protos/locations.json @@ -0,0 +1 @@ +{"nested":{"google":{"nested":{"cloud":{"nested":{"location":{"options":{"cc_enable_arenas":true,"go_package":"google.golang.org/genproto/googleapis/cloud/location;location","java_multiple_files":true,"java_outer_classname":"LocationsProto","java_package":"com.google.cloud.location"},"nested":{"Locations":{"options":{"(google.api.default_host)":"cloud.googleapis.com","(google.api.oauth_scopes)":"https://www.googleapis.com/auth/cloud-platform"},"methods":{"ListLocations":{"requestType":"ListLocationsRequest","responseType":"ListLocationsResponse","options":{"(google.api.http).get":"/v1/{name=locations}","(google.api.http).additional_bindings.get":"/v1/{name=projects/*}/locations"},"parsedOptions":[{"(google.api.http)":{"get":"/v1/{name=locations}","additional_bindings":{"get":"/v1/{name=projects/*}/locations"}}}]},"GetLocation":{"requestType":"GetLocationRequest","responseType":"Location","options":{"(google.api.http).get":"/v1/{name=locations/*}","(google.api.http).additional_bindings.get":"/v1/{name=projects/*/locations/*}"},"parsedOptions":[{"(google.api.http)":{"get":"/v1/{name=locations/*}","additional_bindings":{"get":"/v1/{name=projects/*/locations/*}"}}}]}}},"ListLocationsRequest":{"fields":{"name":{"type":"string","id":1},"filter":{"type":"string","id":2},"pageSize":{"type":"int32","id":3},"pageToken":{"type":"string","id":4}}},"ListLocationsResponse":{"fields":{"locations":{"rule":"repeated","type":"Location","id":1},"nextPageToken":{"type":"string","id":2}}},"GetLocationRequest":{"fields":{"name":{"type":"string","id":1}}},"Location":{"fields":{"name":{"type":"string","id":1},"locationId":{"type":"string","id":4},"displayName":{"type":"string","id":5},"labels":{"keyType":"string","type":"string","id":2},"metadata":{"type":"google.protobuf.Any","id":3}}}}}}},"api":{"options":{"go_package":"google.golang.org/genproto/googleapis/api/annotations;annotations","java_multiple_files":true,"java_outer_classname":"ClientProto","java_package":"com.google.api","objc_class_prefix":"GAPI","cc_enable_arenas":true},"nested":{"http":{"type":"HttpRule","id":72295728,"extend":"google.protobuf.MethodOptions"},"Http":{"fields":{"rules":{"rule":"repeated","type":"HttpRule","id":1},"fullyDecodeReservedExpansion":{"type":"bool","id":2}}},"HttpRule":{"oneofs":{"pattern":{"oneof":["get","put","post","delete","patch","custom"]}},"fields":{"selector":{"type":"string","id":1},"get":{"type":"string","id":2},"put":{"type":"string","id":3},"post":{"type":"string","id":4},"delete":{"type":"string","id":5},"patch":{"type":"string","id":6},"custom":{"type":"CustomHttpPattern","id":8},"body":{"type":"string","id":7},"responseBody":{"type":"string","id":12},"additionalBindings":{"rule":"repeated","type":"HttpRule","id":11}}},"CustomHttpPattern":{"fields":{"kind":{"type":"string","id":1},"path":{"type":"string","id":2}}},"methodSignature":{"rule":"repeated","type":"string","id":1051,"extend":"google.protobuf.MethodOptions"},"defaultHost":{"type":"string","id":1049,"extend":"google.protobuf.ServiceOptions"},"oauthScopes":{"type":"string","id":1050,"extend":"google.protobuf.ServiceOptions"}}},"protobuf":{"options":{"go_package":"google.golang.org/protobuf/types/descriptorpb","java_package":"com.google.protobuf","java_outer_classname":"DescriptorProtos","csharp_namespace":"Google.Protobuf.Reflection","objc_class_prefix":"GPB","cc_enable_arenas":true,"optimize_for":"SPEED"},"nested":{"FileDescriptorSet":{"fields":{"file":{"rule":"repeated","type":"FileDescriptorProto","id":1}}},"FileDescriptorProto":{"fields":{"name":{"type":"string","id":1},"package":{"type":"string","id":2},"dependency":{"rule":"repeated","type":"string","id":3},"publicDependency":{"rule":"repeated","type":"int32","id":10,"options":{"packed":false}},"weakDependency":{"rule":"repeated","type":"int32","id":11,"options":{"packed":false}},"messageType":{"rule":"repeated","type":"DescriptorProto","id":4},"enumType":{"rule":"repeated","type":"EnumDescriptorProto","id":5},"service":{"rule":"repeated","type":"ServiceDescriptorProto","id":6},"extension":{"rule":"repeated","type":"FieldDescriptorProto","id":7},"options":{"type":"FileOptions","id":8},"sourceCodeInfo":{"type":"SourceCodeInfo","id":9},"syntax":{"type":"string","id":12}}},"DescriptorProto":{"fields":{"name":{"type":"string","id":1},"field":{"rule":"repeated","type":"FieldDescriptorProto","id":2},"extension":{"rule":"repeated","type":"FieldDescriptorProto","id":6},"nestedType":{"rule":"repeated","type":"DescriptorProto","id":3},"enumType":{"rule":"repeated","type":"EnumDescriptorProto","id":4},"extensionRange":{"rule":"repeated","type":"ExtensionRange","id":5},"oneofDecl":{"rule":"repeated","type":"OneofDescriptorProto","id":8},"options":{"type":"MessageOptions","id":7},"reservedRange":{"rule":"repeated","type":"ReservedRange","id":9},"reservedName":{"rule":"repeated","type":"string","id":10}},"nested":{"ExtensionRange":{"fields":{"start":{"type":"int32","id":1},"end":{"type":"int32","id":2},"options":{"type":"ExtensionRangeOptions","id":3}}},"ReservedRange":{"fields":{"start":{"type":"int32","id":1},"end":{"type":"int32","id":2}}}}},"ExtensionRangeOptions":{"fields":{"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]]},"FieldDescriptorProto":{"fields":{"name":{"type":"string","id":1},"number":{"type":"int32","id":3},"label":{"type":"Label","id":4},"type":{"type":"Type","id":5},"typeName":{"type":"string","id":6},"extendee":{"type":"string","id":2},"defaultValue":{"type":"string","id":7},"oneofIndex":{"type":"int32","id":9},"jsonName":{"type":"string","id":10},"options":{"type":"FieldOptions","id":8},"proto3Optional":{"type":"bool","id":17}},"nested":{"Type":{"values":{"TYPE_DOUBLE":1,"TYPE_FLOAT":2,"TYPE_INT64":3,"TYPE_UINT64":4,"TYPE_INT32":5,"TYPE_FIXED64":6,"TYPE_FIXED32":7,"TYPE_BOOL":8,"TYPE_STRING":9,"TYPE_GROUP":10,"TYPE_MESSAGE":11,"TYPE_BYTES":12,"TYPE_UINT32":13,"TYPE_ENUM":14,"TYPE_SFIXED32":15,"TYPE_SFIXED64":16,"TYPE_SINT32":17,"TYPE_SINT64":18}},"Label":{"values":{"LABEL_OPTIONAL":1,"LABEL_REQUIRED":2,"LABEL_REPEATED":3}}}},"OneofDescriptorProto":{"fields":{"name":{"type":"string","id":1},"options":{"type":"OneofOptions","id":2}}},"EnumDescriptorProto":{"fields":{"name":{"type":"string","id":1},"value":{"rule":"repeated","type":"EnumValueDescriptorProto","id":2},"options":{"type":"EnumOptions","id":3},"reservedRange":{"rule":"repeated","type":"EnumReservedRange","id":4},"reservedName":{"rule":"repeated","type":"string","id":5}},"nested":{"EnumReservedRange":{"fields":{"start":{"type":"int32","id":1},"end":{"type":"int32","id":2}}}}},"EnumValueDescriptorProto":{"fields":{"name":{"type":"string","id":1},"number":{"type":"int32","id":2},"options":{"type":"EnumValueOptions","id":3}}},"ServiceDescriptorProto":{"fields":{"name":{"type":"string","id":1},"method":{"rule":"repeated","type":"MethodDescriptorProto","id":2},"options":{"type":"ServiceOptions","id":3}}},"MethodDescriptorProto":{"fields":{"name":{"type":"string","id":1},"inputType":{"type":"string","id":2},"outputType":{"type":"string","id":3},"options":{"type":"MethodOptions","id":4},"clientStreaming":{"type":"bool","id":5,"options":{"default":false}},"serverStreaming":{"type":"bool","id":6,"options":{"default":false}}}},"FileOptions":{"fields":{"javaPackage":{"type":"string","id":1},"javaOuterClassname":{"type":"string","id":8},"javaMultipleFiles":{"type":"bool","id":10,"options":{"default":false}},"javaGenerateEqualsAndHash":{"type":"bool","id":20,"options":{"deprecated":true}},"javaStringCheckUtf8":{"type":"bool","id":27,"options":{"default":false}},"optimizeFor":{"type":"OptimizeMode","id":9,"options":{"default":"SPEED"}},"goPackage":{"type":"string","id":11},"ccGenericServices":{"type":"bool","id":16,"options":{"default":false}},"javaGenericServices":{"type":"bool","id":17,"options":{"default":false}},"pyGenericServices":{"type":"bool","id":18,"options":{"default":false}},"phpGenericServices":{"type":"bool","id":42,"options":{"default":false}},"deprecated":{"type":"bool","id":23,"options":{"default":false}},"ccEnableArenas":{"type":"bool","id":31,"options":{"default":true}},"objcClassPrefix":{"type":"string","id":36},"csharpNamespace":{"type":"string","id":37},"swiftPrefix":{"type":"string","id":39},"phpClassPrefix":{"type":"string","id":40},"phpNamespace":{"type":"string","id":41},"phpMetadataNamespace":{"type":"string","id":44},"rubyPackage":{"type":"string","id":45},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]],"reserved":[[38,38]],"nested":{"OptimizeMode":{"values":{"SPEED":1,"CODE_SIZE":2,"LITE_RUNTIME":3}}}},"MessageOptions":{"fields":{"messageSetWireFormat":{"type":"bool","id":1,"options":{"default":false}},"noStandardDescriptorAccessor":{"type":"bool","id":2,"options":{"default":false}},"deprecated":{"type":"bool","id":3,"options":{"default":false}},"mapEntry":{"type":"bool","id":7},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]],"reserved":[[8,8],[9,9]]},"FieldOptions":{"fields":{"ctype":{"type":"CType","id":1,"options":{"default":"STRING"}},"packed":{"type":"bool","id":2},"jstype":{"type":"JSType","id":6,"options":{"default":"JS_NORMAL"}},"lazy":{"type":"bool","id":5,"options":{"default":false}},"deprecated":{"type":"bool","id":3,"options":{"default":false}},"weak":{"type":"bool","id":10,"options":{"default":false}},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]],"reserved":[[4,4]],"nested":{"CType":{"values":{"STRING":0,"CORD":1,"STRING_PIECE":2}},"JSType":{"values":{"JS_NORMAL":0,"JS_STRING":1,"JS_NUMBER":2}}}},"OneofOptions":{"fields":{"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]]},"EnumOptions":{"fields":{"allowAlias":{"type":"bool","id":2},"deprecated":{"type":"bool","id":3,"options":{"default":false}},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]],"reserved":[[5,5]]},"EnumValueOptions":{"fields":{"deprecated":{"type":"bool","id":1,"options":{"default":false}},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]]},"ServiceOptions":{"fields":{"deprecated":{"type":"bool","id":33,"options":{"default":false}},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]]},"MethodOptions":{"fields":{"deprecated":{"type":"bool","id":33,"options":{"default":false}},"idempotencyLevel":{"type":"IdempotencyLevel","id":34,"options":{"default":"IDEMPOTENCY_UNKNOWN"}},"uninterpretedOption":{"rule":"repeated","type":"UninterpretedOption","id":999}},"extensions":[[1e3,536870911]],"nested":{"IdempotencyLevel":{"values":{"IDEMPOTENCY_UNKNOWN":0,"NO_SIDE_EFFECTS":1,"IDEMPOTENT":2}}}},"UninterpretedOption":{"fields":{"name":{"rule":"repeated","type":"NamePart","id":2},"identifierValue":{"type":"string","id":3},"positiveIntValue":{"type":"uint64","id":4},"negativeIntValue":{"type":"int64","id":5},"doubleValue":{"type":"double","id":6},"stringValue":{"type":"bytes","id":7},"aggregateValue":{"type":"string","id":8}},"nested":{"NamePart":{"fields":{"namePart":{"rule":"required","type":"string","id":1},"isExtension":{"rule":"required","type":"bool","id":2}}}}},"SourceCodeInfo":{"fields":{"location":{"rule":"repeated","type":"Location","id":1}},"nested":{"Location":{"fields":{"path":{"rule":"repeated","type":"int32","id":1},"span":{"rule":"repeated","type":"int32","id":2},"leadingComments":{"type":"string","id":3},"trailingComments":{"type":"string","id":4},"leadingDetachedComments":{"rule":"repeated","type":"string","id":6}}}}},"GeneratedCodeInfo":{"fields":{"annotation":{"rule":"repeated","type":"Annotation","id":1}},"nested":{"Annotation":{"fields":{"path":{"rule":"repeated","type":"int32","id":1},"sourceFile":{"type":"string","id":2},"begin":{"type":"int32","id":3},"end":{"type":"int32","id":4}}}}},"Any":{"fields":{"type_url":{"type":"string","id":1},"value":{"type":"bytes","id":2}}}}}}}}} \ No newline at end of file diff --git a/dist/protos/operations.d.ts b/dist/protos/operations.d.ts new file mode 100644 index 0000000..b76e34b --- /dev/null +++ b/dist/protos/operations.d.ts @@ -0,0 +1,4783 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import Long = require('long'); +import * as $protobuf from "protobufjs"; +/** Namespace google. */ +export namespace google { + + /** Namespace longrunning. */ + namespace longrunning { + + /** Represents an Operations */ + class Operations extends $protobuf.rpc.Service { + + /** + * Constructs a new Operations service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new Operations service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): Operations; + + /** + * Calls ListOperations. + * @param request ListOperationsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ListOperationsResponse + */ + public listOperations(request: google.longrunning.IListOperationsRequest, callback: google.longrunning.Operations.ListOperationsCallback): void; + + /** + * Calls ListOperations. + * @param request ListOperationsRequest message or plain object + * @returns Promise + */ + public listOperations(request: google.longrunning.IListOperationsRequest): Promise; + + /** + * Calls GetOperation. + * @param request GetOperationRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Operation + */ + public getOperation(request: google.longrunning.IGetOperationRequest, callback: google.longrunning.Operations.GetOperationCallback): void; + + /** + * Calls GetOperation. + * @param request GetOperationRequest message or plain object + * @returns Promise + */ + public getOperation(request: google.longrunning.IGetOperationRequest): Promise; + + /** + * Calls DeleteOperation. + * @param request DeleteOperationRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Empty + */ + public deleteOperation(request: google.longrunning.IDeleteOperationRequest, callback: google.longrunning.Operations.DeleteOperationCallback): void; + + /** + * Calls DeleteOperation. + * @param request DeleteOperationRequest message or plain object + * @returns Promise + */ + public deleteOperation(request: google.longrunning.IDeleteOperationRequest): Promise; + + /** + * Calls CancelOperation. + * @param request CancelOperationRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Empty + */ + public cancelOperation(request: google.longrunning.ICancelOperationRequest, callback: google.longrunning.Operations.CancelOperationCallback): void; + + /** + * Calls CancelOperation. + * @param request CancelOperationRequest message or plain object + * @returns Promise + */ + public cancelOperation(request: google.longrunning.ICancelOperationRequest): Promise; + + /** + * Calls WaitOperation. + * @param request WaitOperationRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Operation + */ + public waitOperation(request: google.longrunning.IWaitOperationRequest, callback: google.longrunning.Operations.WaitOperationCallback): void; + + /** + * Calls WaitOperation. + * @param request WaitOperationRequest message or plain object + * @returns Promise + */ + public waitOperation(request: google.longrunning.IWaitOperationRequest): Promise; + } + + namespace Operations { + + /** + * Callback as used by {@link google.longrunning.Operations#listOperations}. + * @param error Error, if any + * @param [response] ListOperationsResponse + */ + type ListOperationsCallback = (error: (Error|null), response?: google.longrunning.ListOperationsResponse) => void; + + /** + * Callback as used by {@link google.longrunning.Operations#getOperation}. + * @param error Error, if any + * @param [response] Operation + */ + type GetOperationCallback = (error: (Error|null), response?: google.longrunning.Operation) => void; + + /** + * Callback as used by {@link google.longrunning.Operations#deleteOperation}. + * @param error Error, if any + * @param [response] Empty + */ + type DeleteOperationCallback = (error: (Error|null), response?: google.protobuf.Empty) => void; + + /** + * Callback as used by {@link google.longrunning.Operations#cancelOperation}. + * @param error Error, if any + * @param [response] Empty + */ + type CancelOperationCallback = (error: (Error|null), response?: google.protobuf.Empty) => void; + + /** + * Callback as used by {@link google.longrunning.Operations#waitOperation}. + * @param error Error, if any + * @param [response] Operation + */ + type WaitOperationCallback = (error: (Error|null), response?: google.longrunning.Operation) => void; + } + + /** Properties of an Operation. */ + interface IOperation { + + /** Operation name */ + name?: (string|null); + + /** Operation metadata */ + metadata?: (google.protobuf.IAny|null); + + /** Operation done */ + done?: (boolean|null); + + /** Operation error */ + error?: (google.rpc.IStatus|null); + + /** Operation response */ + response?: (google.protobuf.IAny|null); + } + + /** Represents an Operation. */ + class Operation implements IOperation { + + /** + * Constructs a new Operation. + * @param [properties] Properties to set + */ + constructor(properties?: google.longrunning.IOperation); + + /** Operation name. */ + public name: string; + + /** Operation metadata. */ + public metadata?: (google.protobuf.IAny|null); + + /** Operation done. */ + public done: boolean; + + /** Operation error. */ + public error?: (google.rpc.IStatus|null); + + /** Operation response. */ + public response?: (google.protobuf.IAny|null); + + /** Operation result. */ + public result?: ("error"|"response"); + + /** + * Creates a new Operation instance using the specified properties. + * @param [properties] Properties to set + * @returns Operation instance + */ + public static create(properties?: google.longrunning.IOperation): google.longrunning.Operation; + + /** + * Encodes the specified Operation message. Does not implicitly {@link google.longrunning.Operation.verify|verify} messages. + * @param message Operation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.longrunning.IOperation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Operation message, length delimited. Does not implicitly {@link google.longrunning.Operation.verify|verify} messages. + * @param message Operation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.longrunning.IOperation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Operation message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Operation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.longrunning.Operation; + + /** + * Decodes an Operation message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Operation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.longrunning.Operation; + + /** + * Verifies an Operation message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Operation message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Operation + */ + public static fromObject(object: { [k: string]: any }): google.longrunning.Operation; + + /** + * Creates a plain object from an Operation message. Also converts values to other types if specified. + * @param message Operation + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.longrunning.Operation, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Operation to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a GetOperationRequest. */ + interface IGetOperationRequest { + + /** GetOperationRequest name */ + name?: (string|null); + } + + /** Represents a GetOperationRequest. */ + class GetOperationRequest implements IGetOperationRequest { + + /** + * Constructs a new GetOperationRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.longrunning.IGetOperationRequest); + + /** GetOperationRequest name. */ + public name: string; + + /** + * Creates a new GetOperationRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns GetOperationRequest instance + */ + public static create(properties?: google.longrunning.IGetOperationRequest): google.longrunning.GetOperationRequest; + + /** + * Encodes the specified GetOperationRequest message. Does not implicitly {@link google.longrunning.GetOperationRequest.verify|verify} messages. + * @param message GetOperationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.longrunning.IGetOperationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GetOperationRequest message, length delimited. Does not implicitly {@link google.longrunning.GetOperationRequest.verify|verify} messages. + * @param message GetOperationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.longrunning.IGetOperationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GetOperationRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GetOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.longrunning.GetOperationRequest; + + /** + * Decodes a GetOperationRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GetOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.longrunning.GetOperationRequest; + + /** + * Verifies a GetOperationRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GetOperationRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GetOperationRequest + */ + public static fromObject(object: { [k: string]: any }): google.longrunning.GetOperationRequest; + + /** + * Creates a plain object from a GetOperationRequest message. Also converts values to other types if specified. + * @param message GetOperationRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.longrunning.GetOperationRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GetOperationRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ListOperationsRequest. */ + interface IListOperationsRequest { + + /** ListOperationsRequest name */ + name?: (string|null); + + /** ListOperationsRequest filter */ + filter?: (string|null); + + /** ListOperationsRequest pageSize */ + pageSize?: (number|null); + + /** ListOperationsRequest pageToken */ + pageToken?: (string|null); + } + + /** Represents a ListOperationsRequest. */ + class ListOperationsRequest implements IListOperationsRequest { + + /** + * Constructs a new ListOperationsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.longrunning.IListOperationsRequest); + + /** ListOperationsRequest name. */ + public name: string; + + /** ListOperationsRequest filter. */ + public filter: string; + + /** ListOperationsRequest pageSize. */ + public pageSize: number; + + /** ListOperationsRequest pageToken. */ + public pageToken: string; + + /** + * Creates a new ListOperationsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns ListOperationsRequest instance + */ + public static create(properties?: google.longrunning.IListOperationsRequest): google.longrunning.ListOperationsRequest; + + /** + * Encodes the specified ListOperationsRequest message. Does not implicitly {@link google.longrunning.ListOperationsRequest.verify|verify} messages. + * @param message ListOperationsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.longrunning.IListOperationsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ListOperationsRequest message, length delimited. Does not implicitly {@link google.longrunning.ListOperationsRequest.verify|verify} messages. + * @param message ListOperationsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.longrunning.IListOperationsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ListOperationsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ListOperationsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.longrunning.ListOperationsRequest; + + /** + * Decodes a ListOperationsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ListOperationsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.longrunning.ListOperationsRequest; + + /** + * Verifies a ListOperationsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ListOperationsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ListOperationsRequest + */ + public static fromObject(object: { [k: string]: any }): google.longrunning.ListOperationsRequest; + + /** + * Creates a plain object from a ListOperationsRequest message. Also converts values to other types if specified. + * @param message ListOperationsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.longrunning.ListOperationsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ListOperationsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ListOperationsResponse. */ + interface IListOperationsResponse { + + /** ListOperationsResponse operations */ + operations?: (google.longrunning.IOperation[]|null); + + /** ListOperationsResponse nextPageToken */ + nextPageToken?: (string|null); + } + + /** Represents a ListOperationsResponse. */ + class ListOperationsResponse implements IListOperationsResponse { + + /** + * Constructs a new ListOperationsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.longrunning.IListOperationsResponse); + + /** ListOperationsResponse operations. */ + public operations: google.longrunning.IOperation[]; + + /** ListOperationsResponse nextPageToken. */ + public nextPageToken: string; + + /** + * Creates a new ListOperationsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns ListOperationsResponse instance + */ + public static create(properties?: google.longrunning.IListOperationsResponse): google.longrunning.ListOperationsResponse; + + /** + * Encodes the specified ListOperationsResponse message. Does not implicitly {@link google.longrunning.ListOperationsResponse.verify|verify} messages. + * @param message ListOperationsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.longrunning.IListOperationsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ListOperationsResponse message, length delimited. Does not implicitly {@link google.longrunning.ListOperationsResponse.verify|verify} messages. + * @param message ListOperationsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.longrunning.IListOperationsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ListOperationsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ListOperationsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.longrunning.ListOperationsResponse; + + /** + * Decodes a ListOperationsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ListOperationsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.longrunning.ListOperationsResponse; + + /** + * Verifies a ListOperationsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ListOperationsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ListOperationsResponse + */ + public static fromObject(object: { [k: string]: any }): google.longrunning.ListOperationsResponse; + + /** + * Creates a plain object from a ListOperationsResponse message. Also converts values to other types if specified. + * @param message ListOperationsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.longrunning.ListOperationsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ListOperationsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a CancelOperationRequest. */ + interface ICancelOperationRequest { + + /** CancelOperationRequest name */ + name?: (string|null); + } + + /** Represents a CancelOperationRequest. */ + class CancelOperationRequest implements ICancelOperationRequest { + + /** + * Constructs a new CancelOperationRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.longrunning.ICancelOperationRequest); + + /** CancelOperationRequest name. */ + public name: string; + + /** + * Creates a new CancelOperationRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns CancelOperationRequest instance + */ + public static create(properties?: google.longrunning.ICancelOperationRequest): google.longrunning.CancelOperationRequest; + + /** + * Encodes the specified CancelOperationRequest message. Does not implicitly {@link google.longrunning.CancelOperationRequest.verify|verify} messages. + * @param message CancelOperationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.longrunning.ICancelOperationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CancelOperationRequest message, length delimited. Does not implicitly {@link google.longrunning.CancelOperationRequest.verify|verify} messages. + * @param message CancelOperationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.longrunning.ICancelOperationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CancelOperationRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CancelOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.longrunning.CancelOperationRequest; + + /** + * Decodes a CancelOperationRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CancelOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.longrunning.CancelOperationRequest; + + /** + * Verifies a CancelOperationRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CancelOperationRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CancelOperationRequest + */ + public static fromObject(object: { [k: string]: any }): google.longrunning.CancelOperationRequest; + + /** + * Creates a plain object from a CancelOperationRequest message. Also converts values to other types if specified. + * @param message CancelOperationRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.longrunning.CancelOperationRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CancelOperationRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a DeleteOperationRequest. */ + interface IDeleteOperationRequest { + + /** DeleteOperationRequest name */ + name?: (string|null); + } + + /** Represents a DeleteOperationRequest. */ + class DeleteOperationRequest implements IDeleteOperationRequest { + + /** + * Constructs a new DeleteOperationRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.longrunning.IDeleteOperationRequest); + + /** DeleteOperationRequest name. */ + public name: string; + + /** + * Creates a new DeleteOperationRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns DeleteOperationRequest instance + */ + public static create(properties?: google.longrunning.IDeleteOperationRequest): google.longrunning.DeleteOperationRequest; + + /** + * Encodes the specified DeleteOperationRequest message. Does not implicitly {@link google.longrunning.DeleteOperationRequest.verify|verify} messages. + * @param message DeleteOperationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.longrunning.IDeleteOperationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DeleteOperationRequest message, length delimited. Does not implicitly {@link google.longrunning.DeleteOperationRequest.verify|verify} messages. + * @param message DeleteOperationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.longrunning.IDeleteOperationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DeleteOperationRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DeleteOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.longrunning.DeleteOperationRequest; + + /** + * Decodes a DeleteOperationRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DeleteOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.longrunning.DeleteOperationRequest; + + /** + * Verifies a DeleteOperationRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DeleteOperationRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DeleteOperationRequest + */ + public static fromObject(object: { [k: string]: any }): google.longrunning.DeleteOperationRequest; + + /** + * Creates a plain object from a DeleteOperationRequest message. Also converts values to other types if specified. + * @param message DeleteOperationRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.longrunning.DeleteOperationRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DeleteOperationRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a WaitOperationRequest. */ + interface IWaitOperationRequest { + + /** WaitOperationRequest name */ + name?: (string|null); + + /** WaitOperationRequest timeout */ + timeout?: (google.protobuf.IDuration|null); + } + + /** Represents a WaitOperationRequest. */ + class WaitOperationRequest implements IWaitOperationRequest { + + /** + * Constructs a new WaitOperationRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.longrunning.IWaitOperationRequest); + + /** WaitOperationRequest name. */ + public name: string; + + /** WaitOperationRequest timeout. */ + public timeout?: (google.protobuf.IDuration|null); + + /** + * Creates a new WaitOperationRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns WaitOperationRequest instance + */ + public static create(properties?: google.longrunning.IWaitOperationRequest): google.longrunning.WaitOperationRequest; + + /** + * Encodes the specified WaitOperationRequest message. Does not implicitly {@link google.longrunning.WaitOperationRequest.verify|verify} messages. + * @param message WaitOperationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.longrunning.IWaitOperationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified WaitOperationRequest message, length delimited. Does not implicitly {@link google.longrunning.WaitOperationRequest.verify|verify} messages. + * @param message WaitOperationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.longrunning.IWaitOperationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a WaitOperationRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns WaitOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.longrunning.WaitOperationRequest; + + /** + * Decodes a WaitOperationRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns WaitOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.longrunning.WaitOperationRequest; + + /** + * Verifies a WaitOperationRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a WaitOperationRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns WaitOperationRequest + */ + public static fromObject(object: { [k: string]: any }): google.longrunning.WaitOperationRequest; + + /** + * Creates a plain object from a WaitOperationRequest message. Also converts values to other types if specified. + * @param message WaitOperationRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.longrunning.WaitOperationRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this WaitOperationRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an OperationInfo. */ + interface IOperationInfo { + + /** OperationInfo responseType */ + responseType?: (string|null); + + /** OperationInfo metadataType */ + metadataType?: (string|null); + } + + /** Represents an OperationInfo. */ + class OperationInfo implements IOperationInfo { + + /** + * Constructs a new OperationInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.longrunning.IOperationInfo); + + /** OperationInfo responseType. */ + public responseType: string; + + /** OperationInfo metadataType. */ + public metadataType: string; + + /** + * Creates a new OperationInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns OperationInfo instance + */ + public static create(properties?: google.longrunning.IOperationInfo): google.longrunning.OperationInfo; + + /** + * Encodes the specified OperationInfo message. Does not implicitly {@link google.longrunning.OperationInfo.verify|verify} messages. + * @param message OperationInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.longrunning.IOperationInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OperationInfo message, length delimited. Does not implicitly {@link google.longrunning.OperationInfo.verify|verify} messages. + * @param message OperationInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.longrunning.IOperationInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OperationInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OperationInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.longrunning.OperationInfo; + + /** + * Decodes an OperationInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OperationInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.longrunning.OperationInfo; + + /** + * Verifies an OperationInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OperationInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OperationInfo + */ + public static fromObject(object: { [k: string]: any }): google.longrunning.OperationInfo; + + /** + * Creates a plain object from an OperationInfo message. Also converts values to other types if specified. + * @param message OperationInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.longrunning.OperationInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OperationInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Namespace api. */ + namespace api { + + /** Properties of a Http. */ + interface IHttp { + + /** Http rules */ + rules?: (google.api.IHttpRule[]|null); + + /** Http fullyDecodeReservedExpansion */ + fullyDecodeReservedExpansion?: (boolean|null); + } + + /** Represents a Http. */ + class Http implements IHttp { + + /** + * Constructs a new Http. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttp); + + /** Http rules. */ + public rules: google.api.IHttpRule[]; + + /** Http fullyDecodeReservedExpansion. */ + public fullyDecodeReservedExpansion: boolean; + + /** + * Creates a new Http instance using the specified properties. + * @param [properties] Properties to set + * @returns Http instance + */ + public static create(properties?: google.api.IHttp): google.api.Http; + + /** + * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Http message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.Http; + + /** + * Decodes a Http message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.Http; + + /** + * Verifies a Http message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Http message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Http + */ + public static fromObject(object: { [k: string]: any }): google.api.Http; + + /** + * Creates a plain object from a Http message. Also converts values to other types if specified. + * @param message Http + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.Http, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Http to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a HttpRule. */ + interface IHttpRule { + + /** HttpRule selector */ + selector?: (string|null); + + /** HttpRule get */ + get?: (string|null); + + /** HttpRule put */ + put?: (string|null); + + /** HttpRule post */ + post?: (string|null); + + /** HttpRule delete */ + "delete"?: (string|null); + + /** HttpRule patch */ + patch?: (string|null); + + /** HttpRule custom */ + custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body */ + body?: (string|null); + + /** HttpRule responseBody */ + responseBody?: (string|null); + + /** HttpRule additionalBindings */ + additionalBindings?: (google.api.IHttpRule[]|null); + } + + /** Represents a HttpRule. */ + class HttpRule implements IHttpRule { + + /** + * Constructs a new HttpRule. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttpRule); + + /** HttpRule selector. */ + public selector: string; + + /** HttpRule get. */ + public get: string; + + /** HttpRule put. */ + public put: string; + + /** HttpRule post. */ + public post: string; + + /** HttpRule delete. */ + public delete: string; + + /** HttpRule patch. */ + public patch: string; + + /** HttpRule custom. */ + public custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body. */ + public body: string; + + /** HttpRule responseBody. */ + public responseBody: string; + + /** HttpRule additionalBindings. */ + public additionalBindings: google.api.IHttpRule[]; + + /** HttpRule pattern. */ + public pattern?: ("get"|"put"|"post"|"delete"|"patch"|"custom"); + + /** + * Creates a new HttpRule instance using the specified properties. + * @param [properties] Properties to set + * @returns HttpRule instance + */ + public static create(properties?: google.api.IHttpRule): google.api.HttpRule; + + /** + * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a HttpRule message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.HttpRule; + + /** + * Decodes a HttpRule message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.HttpRule; + + /** + * Verifies a HttpRule message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns HttpRule + */ + public static fromObject(object: { [k: string]: any }): google.api.HttpRule; + + /** + * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * @param message HttpRule + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.HttpRule, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this HttpRule to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a CustomHttpPattern. */ + interface ICustomHttpPattern { + + /** CustomHttpPattern kind */ + kind?: (string|null); + + /** CustomHttpPattern path */ + path?: (string|null); + } + + /** Represents a CustomHttpPattern. */ + class CustomHttpPattern implements ICustomHttpPattern { + + /** + * Constructs a new CustomHttpPattern. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ICustomHttpPattern); + + /** CustomHttpPattern kind. */ + public kind: string; + + /** CustomHttpPattern path. */ + public path: string; + + /** + * Creates a new CustomHttpPattern instance using the specified properties. + * @param [properties] Properties to set + * @returns CustomHttpPattern instance + */ + public static create(properties?: google.api.ICustomHttpPattern): google.api.CustomHttpPattern; + + /** + * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CustomHttpPattern; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CustomHttpPattern; + + /** + * Verifies a CustomHttpPattern message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CustomHttpPattern + */ + public static fromObject(object: { [k: string]: any }): google.api.CustomHttpPattern; + + /** + * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * @param message CustomHttpPattern + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.CustomHttpPattern, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CustomHttpPattern to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Namespace protobuf. */ + namespace protobuf { + + /** Properties of a FileDescriptorSet. */ + interface IFileDescriptorSet { + + /** FileDescriptorSet file */ + file?: (google.protobuf.IFileDescriptorProto[]|null); + } + + /** Represents a FileDescriptorSet. */ + class FileDescriptorSet implements IFileDescriptorSet { + + /** + * Constructs a new FileDescriptorSet. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorSet); + + /** FileDescriptorSet file. */ + public file: google.protobuf.IFileDescriptorProto[]; + + /** + * Creates a new FileDescriptorSet instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorSet instance + */ + public static create(properties?: google.protobuf.IFileDescriptorSet): google.protobuf.FileDescriptorSet; + + /** + * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorSet; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorSet; + + /** + * Verifies a FileDescriptorSet message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorSet + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorSet; + + /** + * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. + * @param message FileDescriptorSet + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorSet, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorSet to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FileDescriptorProto. */ + interface IFileDescriptorProto { + + /** FileDescriptorProto name */ + name?: (string|null); + + /** FileDescriptorProto package */ + "package"?: (string|null); + + /** FileDescriptorProto dependency */ + dependency?: (string[]|null); + + /** FileDescriptorProto publicDependency */ + publicDependency?: (number[]|null); + + /** FileDescriptorProto weakDependency */ + weakDependency?: (number[]|null); + + /** FileDescriptorProto messageType */ + messageType?: (google.protobuf.IDescriptorProto[]|null); + + /** FileDescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** FileDescriptorProto service */ + service?: (google.protobuf.IServiceDescriptorProto[]|null); + + /** FileDescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** FileDescriptorProto options */ + options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo */ + sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax */ + syntax?: (string|null); + } + + /** Represents a FileDescriptorProto. */ + class FileDescriptorProto implements IFileDescriptorProto { + + /** + * Constructs a new FileDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorProto); + + /** FileDescriptorProto name. */ + public name: string; + + /** FileDescriptorProto package. */ + public package: string; + + /** FileDescriptorProto dependency. */ + public dependency: string[]; + + /** FileDescriptorProto publicDependency. */ + public publicDependency: number[]; + + /** FileDescriptorProto weakDependency. */ + public weakDependency: number[]; + + /** FileDescriptorProto messageType. */ + public messageType: google.protobuf.IDescriptorProto[]; + + /** FileDescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** FileDescriptorProto service. */ + public service: google.protobuf.IServiceDescriptorProto[]; + + /** FileDescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** FileDescriptorProto options. */ + public options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo. */ + public sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax. */ + public syntax: string; + + /** + * Creates a new FileDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFileDescriptorProto): google.protobuf.FileDescriptorProto; + + /** + * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorProto; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorProto; + + /** + * Verifies a FileDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorProto; + + /** + * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. + * @param message FileDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a DescriptorProto. */ + interface IDescriptorProto { + + /** DescriptorProto name */ + name?: (string|null); + + /** DescriptorProto field */ + field?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto nestedType */ + nestedType?: (google.protobuf.IDescriptorProto[]|null); + + /** DescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** DescriptorProto extensionRange */ + extensionRange?: (google.protobuf.DescriptorProto.IExtensionRange[]|null); + + /** DescriptorProto oneofDecl */ + oneofDecl?: (google.protobuf.IOneofDescriptorProto[]|null); + + /** DescriptorProto options */ + options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange */ + reservedRange?: (google.protobuf.DescriptorProto.IReservedRange[]|null); + + /** DescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents a DescriptorProto. */ + class DescriptorProto implements IDescriptorProto { + + /** + * Constructs a new DescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IDescriptorProto); + + /** DescriptorProto name. */ + public name: string; + + /** DescriptorProto field. */ + public field: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto nestedType. */ + public nestedType: google.protobuf.IDescriptorProto[]; + + /** DescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** DescriptorProto extensionRange. */ + public extensionRange: google.protobuf.DescriptorProto.IExtensionRange[]; + + /** DescriptorProto oneofDecl. */ + public oneofDecl: google.protobuf.IOneofDescriptorProto[]; + + /** DescriptorProto options. */ + public options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange. */ + public reservedRange: google.protobuf.DescriptorProto.IReservedRange[]; + + /** DescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new DescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns DescriptorProto instance + */ + public static create(properties?: google.protobuf.IDescriptorProto): google.protobuf.DescriptorProto; + + /** + * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto; + + /** + * Verifies a DescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto; + + /** + * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. + * @param message DescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace DescriptorProto { + + /** Properties of an ExtensionRange. */ + interface IExtensionRange { + + /** ExtensionRange start */ + start?: (number|null); + + /** ExtensionRange end */ + end?: (number|null); + + /** ExtensionRange options */ + options?: (google.protobuf.IExtensionRangeOptions|null); + } + + /** Represents an ExtensionRange. */ + class ExtensionRange implements IExtensionRange { + + /** + * Constructs a new ExtensionRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IExtensionRange); + + /** ExtensionRange start. */ + public start: number; + + /** ExtensionRange end. */ + public end: number; + + /** ExtensionRange options. */ + public options?: (google.protobuf.IExtensionRangeOptions|null); + + /** + * Creates a new ExtensionRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IExtensionRange): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Verifies an ExtensionRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. + * @param message ExtensionRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ExtensionRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ReservedRange. */ + interface IReservedRange { + + /** ReservedRange start */ + start?: (number|null); + + /** ReservedRange end */ + end?: (number|null); + } + + /** Represents a ReservedRange. */ + class ReservedRange implements IReservedRange { + + /** + * Constructs a new ReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IReservedRange); + + /** ReservedRange start. */ + public start: number; + + /** ReservedRange end. */ + public end: number; + + /** + * Creates a new ReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ReservedRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IReservedRange): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Decodes a ReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Verifies a ReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. + * @param message ReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an ExtensionRangeOptions. */ + interface IExtensionRangeOptions { + + /** ExtensionRangeOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an ExtensionRangeOptions. */ + class ExtensionRangeOptions implements IExtensionRangeOptions { + + /** + * Constructs a new ExtensionRangeOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IExtensionRangeOptions); + + /** ExtensionRangeOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ExtensionRangeOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRangeOptions instance + */ + public static create(properties?: google.protobuf.IExtensionRangeOptions): google.protobuf.ExtensionRangeOptions; + + /** + * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ExtensionRangeOptions; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ExtensionRangeOptions; + + /** + * Verifies an ExtensionRangeOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRangeOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ExtensionRangeOptions; + + /** + * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. + * @param message ExtensionRangeOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ExtensionRangeOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRangeOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FieldDescriptorProto. */ + interface IFieldDescriptorProto { + + /** FieldDescriptorProto name */ + name?: (string|null); + + /** FieldDescriptorProto number */ + number?: (number|null); + + /** FieldDescriptorProto label */ + label?: (google.protobuf.FieldDescriptorProto.Label|null); + + /** FieldDescriptorProto type */ + type?: (google.protobuf.FieldDescriptorProto.Type|null); + + /** FieldDescriptorProto typeName */ + typeName?: (string|null); + + /** FieldDescriptorProto extendee */ + extendee?: (string|null); + + /** FieldDescriptorProto defaultValue */ + defaultValue?: (string|null); + + /** FieldDescriptorProto oneofIndex */ + oneofIndex?: (number|null); + + /** FieldDescriptorProto jsonName */ + jsonName?: (string|null); + + /** FieldDescriptorProto options */ + options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional */ + proto3Optional?: (boolean|null); + } + + /** Represents a FieldDescriptorProto. */ + class FieldDescriptorProto implements IFieldDescriptorProto { + + /** + * Constructs a new FieldDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldDescriptorProto); + + /** FieldDescriptorProto name. */ + public name: string; + + /** FieldDescriptorProto number. */ + public number: number; + + /** FieldDescriptorProto label. */ + public label: google.protobuf.FieldDescriptorProto.Label; + + /** FieldDescriptorProto type. */ + public type: google.protobuf.FieldDescriptorProto.Type; + + /** FieldDescriptorProto typeName. */ + public typeName: string; + + /** FieldDescriptorProto extendee. */ + public extendee: string; + + /** FieldDescriptorProto defaultValue. */ + public defaultValue: string; + + /** FieldDescriptorProto oneofIndex. */ + public oneofIndex: number; + + /** FieldDescriptorProto jsonName. */ + public jsonName: string; + + /** FieldDescriptorProto options. */ + public options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional. */ + public proto3Optional: boolean; + + /** + * Creates a new FieldDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFieldDescriptorProto): google.protobuf.FieldDescriptorProto; + + /** + * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldDescriptorProto; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldDescriptorProto; + + /** + * Verifies a FieldDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldDescriptorProto; + + /** + * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. + * @param message FieldDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FieldDescriptorProto { + + /** Type enum. */ + enum Type { + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + TYPE_GROUP = 10, + TYPE_MESSAGE = 11, + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + TYPE_SINT32 = 17, + TYPE_SINT64 = 18 + } + + /** Label enum. */ + enum Label { + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3 + } + } + + /** Properties of an OneofDescriptorProto. */ + interface IOneofDescriptorProto { + + /** OneofDescriptorProto name */ + name?: (string|null); + + /** OneofDescriptorProto options */ + options?: (google.protobuf.IOneofOptions|null); + } + + /** Represents an OneofDescriptorProto. */ + class OneofDescriptorProto implements IOneofDescriptorProto { + + /** + * Constructs a new OneofDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofDescriptorProto); + + /** OneofDescriptorProto name. */ + public name: string; + + /** OneofDescriptorProto options. */ + public options?: (google.protobuf.IOneofOptions|null); + + /** + * Creates a new OneofDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofDescriptorProto instance + */ + public static create(properties?: google.protobuf.IOneofDescriptorProto): google.protobuf.OneofDescriptorProto; + + /** + * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofDescriptorProto; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofDescriptorProto; + + /** + * Verifies an OneofDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofDescriptorProto; + + /** + * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. + * @param message OneofDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumDescriptorProto. */ + interface IEnumDescriptorProto { + + /** EnumDescriptorProto name */ + name?: (string|null); + + /** EnumDescriptorProto value */ + value?: (google.protobuf.IEnumValueDescriptorProto[]|null); + + /** EnumDescriptorProto options */ + options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange */ + reservedRange?: (google.protobuf.EnumDescriptorProto.IEnumReservedRange[]|null); + + /** EnumDescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents an EnumDescriptorProto. */ + class EnumDescriptorProto implements IEnumDescriptorProto { + + /** + * Constructs a new EnumDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumDescriptorProto); + + /** EnumDescriptorProto name. */ + public name: string; + + /** EnumDescriptorProto value. */ + public value: google.protobuf.IEnumValueDescriptorProto[]; + + /** EnumDescriptorProto options. */ + public options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange. */ + public reservedRange: google.protobuf.EnumDescriptorProto.IEnumReservedRange[]; + + /** EnumDescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new EnumDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumDescriptorProto): google.protobuf.EnumDescriptorProto; + + /** + * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto; + + /** + * Verifies an EnumDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto; + + /** + * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. + * @param message EnumDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace EnumDescriptorProto { + + /** Properties of an EnumReservedRange. */ + interface IEnumReservedRange { + + /** EnumReservedRange start */ + start?: (number|null); + + /** EnumReservedRange end */ + end?: (number|null); + } + + /** Represents an EnumReservedRange. */ + class EnumReservedRange implements IEnumReservedRange { + + /** + * Constructs a new EnumReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange); + + /** EnumReservedRange start. */ + public start: number; + + /** EnumReservedRange end. */ + public end: number; + + /** + * Creates a new EnumReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumReservedRange instance + */ + public static create(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Verifies an EnumReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. + * @param message EnumReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto.EnumReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an EnumValueDescriptorProto. */ + interface IEnumValueDescriptorProto { + + /** EnumValueDescriptorProto name */ + name?: (string|null); + + /** EnumValueDescriptorProto number */ + number?: (number|null); + + /** EnumValueDescriptorProto options */ + options?: (google.protobuf.IEnumValueOptions|null); + } + + /** Represents an EnumValueDescriptorProto. */ + class EnumValueDescriptorProto implements IEnumValueDescriptorProto { + + /** + * Constructs a new EnumValueDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueDescriptorProto); + + /** EnumValueDescriptorProto name. */ + public name: string; + + /** EnumValueDescriptorProto number. */ + public number: number; + + /** EnumValueDescriptorProto options. */ + public options?: (google.protobuf.IEnumValueOptions|null); + + /** + * Creates a new EnumValueDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumValueDescriptorProto): google.protobuf.EnumValueDescriptorProto; + + /** + * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueDescriptorProto; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueDescriptorProto; + + /** + * Verifies an EnumValueDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueDescriptorProto; + + /** + * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. + * @param message EnumValueDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ServiceDescriptorProto. */ + interface IServiceDescriptorProto { + + /** ServiceDescriptorProto name */ + name?: (string|null); + + /** ServiceDescriptorProto method */ + method?: (google.protobuf.IMethodDescriptorProto[]|null); + + /** ServiceDescriptorProto options */ + options?: (google.protobuf.IServiceOptions|null); + } + + /** Represents a ServiceDescriptorProto. */ + class ServiceDescriptorProto implements IServiceDescriptorProto { + + /** + * Constructs a new ServiceDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceDescriptorProto); + + /** ServiceDescriptorProto name. */ + public name: string; + + /** ServiceDescriptorProto method. */ + public method: google.protobuf.IMethodDescriptorProto[]; + + /** ServiceDescriptorProto options. */ + public options?: (google.protobuf.IServiceOptions|null); + + /** + * Creates a new ServiceDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceDescriptorProto instance + */ + public static create(properties?: google.protobuf.IServiceDescriptorProto): google.protobuf.ServiceDescriptorProto; + + /** + * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceDescriptorProto; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceDescriptorProto; + + /** + * Verifies a ServiceDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceDescriptorProto; + + /** + * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. + * @param message ServiceDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a MethodDescriptorProto. */ + interface IMethodDescriptorProto { + + /** MethodDescriptorProto name */ + name?: (string|null); + + /** MethodDescriptorProto inputType */ + inputType?: (string|null); + + /** MethodDescriptorProto outputType */ + outputType?: (string|null); + + /** MethodDescriptorProto options */ + options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming */ + clientStreaming?: (boolean|null); + + /** MethodDescriptorProto serverStreaming */ + serverStreaming?: (boolean|null); + } + + /** Represents a MethodDescriptorProto. */ + class MethodDescriptorProto implements IMethodDescriptorProto { + + /** + * Constructs a new MethodDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodDescriptorProto); + + /** MethodDescriptorProto name. */ + public name: string; + + /** MethodDescriptorProto inputType. */ + public inputType: string; + + /** MethodDescriptorProto outputType. */ + public outputType: string; + + /** MethodDescriptorProto options. */ + public options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming. */ + public clientStreaming: boolean; + + /** MethodDescriptorProto serverStreaming. */ + public serverStreaming: boolean; + + /** + * Creates a new MethodDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodDescriptorProto instance + */ + public static create(properties?: google.protobuf.IMethodDescriptorProto): google.protobuf.MethodDescriptorProto; + + /** + * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodDescriptorProto; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodDescriptorProto; + + /** + * Verifies a MethodDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodDescriptorProto; + + /** + * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. + * @param message MethodDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FileOptions. */ + interface IFileOptions { + + /** FileOptions javaPackage */ + javaPackage?: (string|null); + + /** FileOptions javaOuterClassname */ + javaOuterClassname?: (string|null); + + /** FileOptions javaMultipleFiles */ + javaMultipleFiles?: (boolean|null); + + /** FileOptions javaGenerateEqualsAndHash */ + javaGenerateEqualsAndHash?: (boolean|null); + + /** FileOptions javaStringCheckUtf8 */ + javaStringCheckUtf8?: (boolean|null); + + /** FileOptions optimizeFor */ + optimizeFor?: (google.protobuf.FileOptions.OptimizeMode|null); + + /** FileOptions goPackage */ + goPackage?: (string|null); + + /** FileOptions ccGenericServices */ + ccGenericServices?: (boolean|null); + + /** FileOptions javaGenericServices */ + javaGenericServices?: (boolean|null); + + /** FileOptions pyGenericServices */ + pyGenericServices?: (boolean|null); + + /** FileOptions phpGenericServices */ + phpGenericServices?: (boolean|null); + + /** FileOptions deprecated */ + deprecated?: (boolean|null); + + /** FileOptions ccEnableArenas */ + ccEnableArenas?: (boolean|null); + + /** FileOptions objcClassPrefix */ + objcClassPrefix?: (string|null); + + /** FileOptions csharpNamespace */ + csharpNamespace?: (string|null); + + /** FileOptions swiftPrefix */ + swiftPrefix?: (string|null); + + /** FileOptions phpClassPrefix */ + phpClassPrefix?: (string|null); + + /** FileOptions phpNamespace */ + phpNamespace?: (string|null); + + /** FileOptions phpMetadataNamespace */ + phpMetadataNamespace?: (string|null); + + /** FileOptions rubyPackage */ + rubyPackage?: (string|null); + + /** FileOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents a FileOptions. */ + class FileOptions implements IFileOptions { + + /** + * Constructs a new FileOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileOptions); + + /** FileOptions javaPackage. */ + public javaPackage: string; + + /** FileOptions javaOuterClassname. */ + public javaOuterClassname: string; + + /** FileOptions javaMultipleFiles. */ + public javaMultipleFiles: boolean; + + /** FileOptions javaGenerateEqualsAndHash. */ + public javaGenerateEqualsAndHash: boolean; + + /** FileOptions javaStringCheckUtf8. */ + public javaStringCheckUtf8: boolean; + + /** FileOptions optimizeFor. */ + public optimizeFor: google.protobuf.FileOptions.OptimizeMode; + + /** FileOptions goPackage. */ + public goPackage: string; + + /** FileOptions ccGenericServices. */ + public ccGenericServices: boolean; + + /** FileOptions javaGenericServices. */ + public javaGenericServices: boolean; + + /** FileOptions pyGenericServices. */ + public pyGenericServices: boolean; + + /** FileOptions phpGenericServices. */ + public phpGenericServices: boolean; + + /** FileOptions deprecated. */ + public deprecated: boolean; + + /** FileOptions ccEnableArenas. */ + public ccEnableArenas: boolean; + + /** FileOptions objcClassPrefix. */ + public objcClassPrefix: string; + + /** FileOptions csharpNamespace. */ + public csharpNamespace: string; + + /** FileOptions swiftPrefix. */ + public swiftPrefix: string; + + /** FileOptions phpClassPrefix. */ + public phpClassPrefix: string; + + /** FileOptions phpNamespace. */ + public phpNamespace: string; + + /** FileOptions phpMetadataNamespace. */ + public phpMetadataNamespace: string; + + /** FileOptions rubyPackage. */ + public rubyPackage: string; + + /** FileOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new FileOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns FileOptions instance + */ + public static create(properties?: google.protobuf.IFileOptions): google.protobuf.FileOptions; + + /** + * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileOptions; + + /** + * Decodes a FileOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileOptions; + + /** + * Verifies a FileOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileOptions; + + /** + * Creates a plain object from a FileOptions message. Also converts values to other types if specified. + * @param message FileOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FileOptions { + + /** OptimizeMode enum. */ + enum OptimizeMode { + SPEED = 1, + CODE_SIZE = 2, + LITE_RUNTIME = 3 + } + } + + /** Properties of a MessageOptions. */ + interface IMessageOptions { + + /** MessageOptions messageSetWireFormat */ + messageSetWireFormat?: (boolean|null); + + /** MessageOptions noStandardDescriptorAccessor */ + noStandardDescriptorAccessor?: (boolean|null); + + /** MessageOptions deprecated */ + deprecated?: (boolean|null); + + /** MessageOptions mapEntry */ + mapEntry?: (boolean|null); + + /** MessageOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents a MessageOptions. */ + class MessageOptions implements IMessageOptions { + + /** + * Constructs a new MessageOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMessageOptions); + + /** MessageOptions messageSetWireFormat. */ + public messageSetWireFormat: boolean; + + /** MessageOptions noStandardDescriptorAccessor. */ + public noStandardDescriptorAccessor: boolean; + + /** MessageOptions deprecated. */ + public deprecated: boolean; + + /** MessageOptions mapEntry. */ + public mapEntry: boolean; + + /** MessageOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new MessageOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns MessageOptions instance + */ + public static create(properties?: google.protobuf.IMessageOptions): google.protobuf.MessageOptions; + + /** + * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MessageOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MessageOptions; + + /** + * Decodes a MessageOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MessageOptions; + + /** + * Verifies a MessageOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MessageOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MessageOptions; + + /** + * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. + * @param message MessageOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MessageOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MessageOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FieldOptions. */ + interface IFieldOptions { + + /** FieldOptions ctype */ + ctype?: (google.protobuf.FieldOptions.CType|null); + + /** FieldOptions packed */ + packed?: (boolean|null); + + /** FieldOptions jstype */ + jstype?: (google.protobuf.FieldOptions.JSType|null); + + /** FieldOptions lazy */ + lazy?: (boolean|null); + + /** FieldOptions deprecated */ + deprecated?: (boolean|null); + + /** FieldOptions weak */ + weak?: (boolean|null); + + /** FieldOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents a FieldOptions. */ + class FieldOptions implements IFieldOptions { + + /** + * Constructs a new FieldOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldOptions); + + /** FieldOptions ctype. */ + public ctype: google.protobuf.FieldOptions.CType; + + /** FieldOptions packed. */ + public packed: boolean; + + /** FieldOptions jstype. */ + public jstype: google.protobuf.FieldOptions.JSType; + + /** FieldOptions lazy. */ + public lazy: boolean; + + /** FieldOptions deprecated. */ + public deprecated: boolean; + + /** FieldOptions weak. */ + public weak: boolean; + + /** FieldOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new FieldOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldOptions instance + */ + public static create(properties?: google.protobuf.IFieldOptions): google.protobuf.FieldOptions; + + /** + * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions; + + /** + * Decodes a FieldOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions; + + /** + * Verifies a FieldOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions; + + /** + * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. + * @param message FieldOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FieldOptions { + + /** CType enum. */ + enum CType { + STRING = 0, + CORD = 1, + STRING_PIECE = 2 + } + + /** JSType enum. */ + enum JSType { + JS_NORMAL = 0, + JS_STRING = 1, + JS_NUMBER = 2 + } + } + + /** Properties of an OneofOptions. */ + interface IOneofOptions { + + /** OneofOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an OneofOptions. */ + class OneofOptions implements IOneofOptions { + + /** + * Constructs a new OneofOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofOptions); + + /** OneofOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new OneofOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofOptions instance + */ + public static create(properties?: google.protobuf.IOneofOptions): google.protobuf.OneofOptions; + + /** + * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofOptions; + + /** + * Decodes an OneofOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofOptions; + + /** + * Verifies an OneofOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofOptions; + + /** + * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. + * @param message OneofOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumOptions. */ + interface IEnumOptions { + + /** EnumOptions allowAlias */ + allowAlias?: (boolean|null); + + /** EnumOptions deprecated */ + deprecated?: (boolean|null); + + /** EnumOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an EnumOptions. */ + class EnumOptions implements IEnumOptions { + + /** + * Constructs a new EnumOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumOptions); + + /** EnumOptions allowAlias. */ + public allowAlias: boolean; + + /** EnumOptions deprecated. */ + public deprecated: boolean; + + /** EnumOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new EnumOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumOptions instance + */ + public static create(properties?: google.protobuf.IEnumOptions): google.protobuf.EnumOptions; + + /** + * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumOptions; + + /** + * Decodes an EnumOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumOptions; + + /** + * Verifies an EnumOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumOptions; + + /** + * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. + * @param message EnumOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumValueOptions. */ + interface IEnumValueOptions { + + /** EnumValueOptions deprecated */ + deprecated?: (boolean|null); + + /** EnumValueOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an EnumValueOptions. */ + class EnumValueOptions implements IEnumValueOptions { + + /** + * Constructs a new EnumValueOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueOptions); + + /** EnumValueOptions deprecated. */ + public deprecated: boolean; + + /** EnumValueOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new EnumValueOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueOptions instance + */ + public static create(properties?: google.protobuf.IEnumValueOptions): google.protobuf.EnumValueOptions; + + /** + * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueOptions; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueOptions; + + /** + * Verifies an EnumValueOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueOptions; + + /** + * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. + * @param message EnumValueOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ServiceOptions. */ + interface IServiceOptions { + + /** ServiceOptions deprecated */ + deprecated?: (boolean|null); + + /** ServiceOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** ServiceOptions .google.api.defaultHost */ + ".google.api.defaultHost"?: (string|null); + + /** ServiceOptions .google.api.oauthScopes */ + ".google.api.oauthScopes"?: (string|null); + } + + /** Represents a ServiceOptions. */ + class ServiceOptions implements IServiceOptions { + + /** + * Constructs a new ServiceOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceOptions); + + /** ServiceOptions deprecated. */ + public deprecated: boolean; + + /** ServiceOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ServiceOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceOptions instance + */ + public static create(properties?: google.protobuf.IServiceOptions): google.protobuf.ServiceOptions; + + /** + * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceOptions; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceOptions; + + /** + * Verifies a ServiceOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceOptions; + + /** + * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. + * @param message ServiceOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a MethodOptions. */ + interface IMethodOptions { + + /** MethodOptions deprecated */ + deprecated?: (boolean|null); + + /** MethodOptions idempotencyLevel */ + idempotencyLevel?: (google.protobuf.MethodOptions.IdempotencyLevel|null); + + /** MethodOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** MethodOptions .google.longrunning.operationInfo */ + ".google.longrunning.operationInfo"?: (google.longrunning.IOperationInfo|null); + + /** MethodOptions .google.api.http */ + ".google.api.http"?: (google.api.IHttpRule|null); + + /** MethodOptions .google.api.methodSignature */ + ".google.api.methodSignature"?: (string[]|null); + } + + /** Represents a MethodOptions. */ + class MethodOptions implements IMethodOptions { + + /** + * Constructs a new MethodOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodOptions); + + /** MethodOptions deprecated. */ + public deprecated: boolean; + + /** MethodOptions idempotencyLevel. */ + public idempotencyLevel: google.protobuf.MethodOptions.IdempotencyLevel; + + /** MethodOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new MethodOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodOptions instance + */ + public static create(properties?: google.protobuf.IMethodOptions): google.protobuf.MethodOptions; + + /** + * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodOptions; + + /** + * Decodes a MethodOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodOptions; + + /** + * Verifies a MethodOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodOptions; + + /** + * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. + * @param message MethodOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace MethodOptions { + + /** IdempotencyLevel enum. */ + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + NO_SIDE_EFFECTS = 1, + IDEMPOTENT = 2 + } + } + + /** Properties of an UninterpretedOption. */ + interface IUninterpretedOption { + + /** UninterpretedOption name */ + name?: (google.protobuf.UninterpretedOption.INamePart[]|null); + + /** UninterpretedOption identifierValue */ + identifierValue?: (string|null); + + /** UninterpretedOption positiveIntValue */ + positiveIntValue?: (number|Long|null); + + /** UninterpretedOption negativeIntValue */ + negativeIntValue?: (number|Long|null); + + /** UninterpretedOption doubleValue */ + doubleValue?: (number|null); + + /** UninterpretedOption stringValue */ + stringValue?: (Uint8Array|null); + + /** UninterpretedOption aggregateValue */ + aggregateValue?: (string|null); + } + + /** Represents an UninterpretedOption. */ + class UninterpretedOption implements IUninterpretedOption { + + /** + * Constructs a new UninterpretedOption. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IUninterpretedOption); + + /** UninterpretedOption name. */ + public name: google.protobuf.UninterpretedOption.INamePart[]; + + /** UninterpretedOption identifierValue. */ + public identifierValue: string; + + /** UninterpretedOption positiveIntValue. */ + public positiveIntValue: (number|Long); + + /** UninterpretedOption negativeIntValue. */ + public negativeIntValue: (number|Long); + + /** UninterpretedOption doubleValue. */ + public doubleValue: number; + + /** UninterpretedOption stringValue. */ + public stringValue: Uint8Array; + + /** UninterpretedOption aggregateValue. */ + public aggregateValue: string; + + /** + * Creates a new UninterpretedOption instance using the specified properties. + * @param [properties] Properties to set + * @returns UninterpretedOption instance + */ + public static create(properties?: google.protobuf.IUninterpretedOption): google.protobuf.UninterpretedOption; + + /** + * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption; + + /** + * Verifies an UninterpretedOption message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns UninterpretedOption + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption; + + /** + * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. + * @param message UninterpretedOption + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this UninterpretedOption to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace UninterpretedOption { + + /** Properties of a NamePart. */ + interface INamePart { + + /** NamePart namePart */ + namePart: string; + + /** NamePart isExtension */ + isExtension: boolean; + } + + /** Represents a NamePart. */ + class NamePart implements INamePart { + + /** + * Constructs a new NamePart. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.UninterpretedOption.INamePart); + + /** NamePart namePart. */ + public namePart: string; + + /** NamePart isExtension. */ + public isExtension: boolean; + + /** + * Creates a new NamePart instance using the specified properties. + * @param [properties] Properties to set + * @returns NamePart instance + */ + public static create(properties?: google.protobuf.UninterpretedOption.INamePart): google.protobuf.UninterpretedOption.NamePart; + + /** + * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a NamePart message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption.NamePart; + + /** + * Decodes a NamePart message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption.NamePart; + + /** + * Verifies a NamePart message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a NamePart message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns NamePart + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption.NamePart; + + /** + * Creates a plain object from a NamePart message. Also converts values to other types if specified. + * @param message NamePart + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption.NamePart, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this NamePart to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a SourceCodeInfo. */ + interface ISourceCodeInfo { + + /** SourceCodeInfo location */ + location?: (google.protobuf.SourceCodeInfo.ILocation[]|null); + } + + /** Represents a SourceCodeInfo. */ + class SourceCodeInfo implements ISourceCodeInfo { + + /** + * Constructs a new SourceCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ISourceCodeInfo); + + /** SourceCodeInfo location. */ + public location: google.protobuf.SourceCodeInfo.ILocation[]; + + /** + * Creates a new SourceCodeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns SourceCodeInfo instance + */ + public static create(properties?: google.protobuf.ISourceCodeInfo): google.protobuf.SourceCodeInfo; + + /** + * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo; + + /** + * Verifies a SourceCodeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SourceCodeInfo + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo; + + /** + * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. + * @param message SourceCodeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SourceCodeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace SourceCodeInfo { + + /** Properties of a Location. */ + interface ILocation { + + /** Location path */ + path?: (number[]|null); + + /** Location span */ + span?: (number[]|null); + + /** Location leadingComments */ + leadingComments?: (string|null); + + /** Location trailingComments */ + trailingComments?: (string|null); + + /** Location leadingDetachedComments */ + leadingDetachedComments?: (string[]|null); + } + + /** Represents a Location. */ + class Location implements ILocation { + + /** + * Constructs a new Location. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.SourceCodeInfo.ILocation); + + /** Location path. */ + public path: number[]; + + /** Location span. */ + public span: number[]; + + /** Location leadingComments. */ + public leadingComments: string; + + /** Location trailingComments. */ + public trailingComments: string; + + /** Location leadingDetachedComments. */ + public leadingDetachedComments: string[]; + + /** + * Creates a new Location instance using the specified properties. + * @param [properties] Properties to set + * @returns Location instance + */ + public static create(properties?: google.protobuf.SourceCodeInfo.ILocation): google.protobuf.SourceCodeInfo.Location; + + /** + * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Location message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo.Location; + + /** + * Decodes a Location message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo.Location; + + /** + * Verifies a Location message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Location message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Location + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo.Location; + + /** + * Creates a plain object from a Location message. Also converts values to other types if specified. + * @param message Location + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo.Location, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Location to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a GeneratedCodeInfo. */ + interface IGeneratedCodeInfo { + + /** GeneratedCodeInfo annotation */ + annotation?: (google.protobuf.GeneratedCodeInfo.IAnnotation[]|null); + } + + /** Represents a GeneratedCodeInfo. */ + class GeneratedCodeInfo implements IGeneratedCodeInfo { + + /** + * Constructs a new GeneratedCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IGeneratedCodeInfo); + + /** GeneratedCodeInfo annotation. */ + public annotation: google.protobuf.GeneratedCodeInfo.IAnnotation[]; + + /** + * Creates a new GeneratedCodeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns GeneratedCodeInfo instance + */ + public static create(properties?: google.protobuf.IGeneratedCodeInfo): google.protobuf.GeneratedCodeInfo; + + /** + * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo; + + /** + * Verifies a GeneratedCodeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GeneratedCodeInfo + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo; + + /** + * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. + * @param message GeneratedCodeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.GeneratedCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GeneratedCodeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace GeneratedCodeInfo { + + /** Properties of an Annotation. */ + interface IAnnotation { + + /** Annotation path */ + path?: (number[]|null); + + /** Annotation sourceFile */ + sourceFile?: (string|null); + + /** Annotation begin */ + begin?: (number|null); + + /** Annotation end */ + end?: (number|null); + } + + /** Represents an Annotation. */ + class Annotation implements IAnnotation { + + /** + * Constructs a new Annotation. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation); + + /** Annotation path. */ + public path: number[]; + + /** Annotation sourceFile. */ + public sourceFile: string; + + /** Annotation begin. */ + public begin: number; + + /** Annotation end. */ + public end: number; + + /** + * Creates a new Annotation instance using the specified properties. + * @param [properties] Properties to set + * @returns Annotation instance + */ + public static create(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Annotation message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Decodes an Annotation message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Verifies an Annotation message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Annotation message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Annotation + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Creates a plain object from an Annotation message. Also converts values to other types if specified. + * @param message Annotation + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.GeneratedCodeInfo.Annotation, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Annotation to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an Any. */ + interface IAny { + + /** Any type_url */ + type_url?: (string|null); + + /** Any value */ + value?: (Uint8Array|null); + } + + /** Represents an Any. */ + class Any implements IAny { + + /** + * Constructs a new Any. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IAny); + + /** Any type_url. */ + public type_url: string; + + /** Any value. */ + public value: Uint8Array; + + /** + * Creates a new Any instance using the specified properties. + * @param [properties] Properties to set + * @returns Any instance + */ + public static create(properties?: google.protobuf.IAny): google.protobuf.Any; + + /** + * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @param message Any message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @param message Any message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Any message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Any; + + /** + * Decodes an Any message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Any; + + /** + * Verifies an Any message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Any message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Any + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Any; + + /** + * Creates a plain object from an Any message. Also converts values to other types if specified. + * @param message Any + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Any, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Any to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a Duration. */ + interface IDuration { + + /** Duration seconds */ + seconds?: (number|Long|null); + + /** Duration nanos */ + nanos?: (number|null); + } + + /** Represents a Duration. */ + class Duration implements IDuration { + + /** + * Constructs a new Duration. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IDuration); + + /** Duration seconds. */ + public seconds: (number|Long); + + /** Duration nanos. */ + public nanos: number; + + /** + * Creates a new Duration instance using the specified properties. + * @param [properties] Properties to set + * @returns Duration instance + */ + public static create(properties?: google.protobuf.IDuration): google.protobuf.Duration; + + /** + * Encodes the specified Duration message. Does not implicitly {@link google.protobuf.Duration.verify|verify} messages. + * @param message Duration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IDuration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Duration message, length delimited. Does not implicitly {@link google.protobuf.Duration.verify|verify} messages. + * @param message Duration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IDuration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Duration message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Duration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Duration; + + /** + * Decodes a Duration message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Duration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Duration; + + /** + * Verifies a Duration message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Duration message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Duration + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Duration; + + /** + * Creates a plain object from a Duration message. Also converts values to other types if specified. + * @param message Duration + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Duration, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Duration to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an Empty. */ + interface IEmpty { + } + + /** Represents an Empty. */ + class Empty implements IEmpty { + + /** + * Constructs a new Empty. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEmpty); + + /** + * Creates a new Empty instance using the specified properties. + * @param [properties] Properties to set + * @returns Empty instance + */ + public static create(properties?: google.protobuf.IEmpty): google.protobuf.Empty; + + /** + * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @param message Empty message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @param message Empty message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Empty message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Empty + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Empty; + + /** + * Decodes an Empty message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Empty + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Empty; + + /** + * Verifies an Empty message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Empty message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Empty + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Empty; + + /** + * Creates a plain object from an Empty message. Also converts values to other types if specified. + * @param message Empty + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Empty, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Empty to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Namespace rpc. */ + namespace rpc { + + /** Properties of a Status. */ + interface IStatus { + + /** Status code */ + code?: (number|null); + + /** Status message */ + message?: (string|null); + + /** Status details */ + details?: (google.protobuf.IAny[]|null); + } + + /** Represents a Status. */ + class Status implements IStatus { + + /** + * Constructs a new Status. + * @param [properties] Properties to set + */ + constructor(properties?: google.rpc.IStatus); + + /** Status code. */ + public code: number; + + /** Status message. */ + public message: string; + + /** Status details. */ + public details: google.protobuf.IAny[]; + + /** + * Creates a new Status instance using the specified properties. + * @param [properties] Properties to set + * @returns Status instance + */ + public static create(properties?: google.rpc.IStatus): google.rpc.Status; + + /** + * Encodes the specified Status message. Does not implicitly {@link google.rpc.Status.verify|verify} messages. + * @param message Status message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.rpc.IStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Status message, length delimited. Does not implicitly {@link google.rpc.Status.verify|verify} messages. + * @param message Status message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.rpc.IStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Status message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Status + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.rpc.Status; + + /** + * Decodes a Status message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Status + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.rpc.Status; + + /** + * Verifies a Status message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Status message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Status + */ + public static fromObject(object: { [k: string]: any }): google.rpc.Status; + + /** + * Creates a plain object from a Status message. Also converts values to other types if specified. + * @param message Status + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.rpc.Status, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Status to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } +} diff --git a/dist/protos/operations.js b/dist/protos/operations.js new file mode 100644 index 0000000..808ce91 --- /dev/null +++ b/dist/protos/operations.js @@ -0,0 +1 @@ +(e=>{"function"==typeof define&&define.amd?define(["protobufjs/minimal"],e):"function"==typeof require&&"object"==typeof module&&module&&module.exports&&(module.exports=e(require("protobufjs/minimal")))})(function(o){var e,t,n,F,a=o.Reader,r=o.Writer,i=o.util,p=o.roots.operations_protos||(o.roots.operations_protos={});function G(e,t,n){o.rpc.Service.call(this,e,t,n)}function l(e){if(e)for(var t=Object.keys(e),n=0;n>>3){case 1:o.name=e.string();break;case 2:o.metadata=p.google.protobuf.Any.decode(e,e.uint32());break;case 3:o.done=e.bool();break;case 4:o.error=p.google.rpc.Status.decode(e,e.uint32());break;case 5:o.response=p.google.protobuf.Any.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},l.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},l.verify=function(e){if("object"!=typeof e||null===e)return"object expected";var t,n={};if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.metadata&&e.hasOwnProperty("metadata")&&(t=p.google.protobuf.Any.verify(e.metadata)))return"metadata."+t;if(null!=e.done&&e.hasOwnProperty("done")&&"boolean"!=typeof e.done)return"done: boolean expected";if(null!=e.error&&e.hasOwnProperty("error")&&(n.result=1,t=p.google.rpc.Status.verify(e.error)))return"error."+t;if(null!=e.response&&e.hasOwnProperty("response")){if(1===n.result)return"result: multiple values";if(n.result=1,t=p.google.protobuf.Any.verify(e.response))return"response."+t}return null},l.fromObject=function(e){if(e instanceof p.google.longrunning.Operation)return e;var t=new p.google.longrunning.Operation;if(null!=e.name&&(t.name=String(e.name)),null!=e.metadata){if("object"!=typeof e.metadata)throw TypeError(".google.longrunning.Operation.metadata: object expected");t.metadata=p.google.protobuf.Any.fromObject(e.metadata)}if(null!=e.done&&(t.done=Boolean(e.done)),null!=e.error){if("object"!=typeof e.error)throw TypeError(".google.longrunning.Operation.error: object expected");t.error=p.google.rpc.Status.fromObject(e.error)}if(null!=e.response){if("object"!=typeof e.response)throw TypeError(".google.longrunning.Operation.response: object expected");t.response=p.google.protobuf.Any.fromObject(e.response)}return t},l.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.metadata=null,n.done=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.metadata&&e.hasOwnProperty("metadata")&&(n.metadata=p.google.protobuf.Any.toObject(e.metadata,t)),null!=e.done&&e.hasOwnProperty("done")&&(n.done=e.done),null!=e.error&&e.hasOwnProperty("error")&&(n.error=p.google.rpc.Status.toObject(e.error,t),t.oneofs)&&(n.result="error"),null!=e.response&&e.hasOwnProperty("response")&&(n.response=p.google.protobuf.Any.toObject(e.response,t),t.oneofs)&&(n.result="response"),n},l.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},l),t.GetOperationRequest=(B.prototype.name="",B.create=function(e){return new B(e)},B.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),t},B.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},B.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.longrunning.GetOperationRequest;e.pos>>3==1?o.name=e.string():e.skipType(7&r)}return o},B.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},B.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name)?"name: string expected":null},B.fromObject=function(e){var t;return e instanceof p.google.longrunning.GetOperationRequest?e:(t=new p.google.longrunning.GetOperationRequest,null!=e.name&&(t.name=String(e.name)),t)},B.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name=""),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),n},B.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},B),t.ListOperationsRequest=(s.prototype.name="",s.prototype.filter="",s.prototype.pageSize=0,s.prototype.pageToken="",s.create=function(e){return new s(e)},s.encode=function(e,t){return t=t||r.create(),null!=e.filter&&Object.hasOwnProperty.call(e,"filter")&&t.uint32(10).string(e.filter),null!=e.pageSize&&Object.hasOwnProperty.call(e,"pageSize")&&t.uint32(16).int32(e.pageSize),null!=e.pageToken&&Object.hasOwnProperty.call(e,"pageToken")&&t.uint32(26).string(e.pageToken),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(34).string(e.name),t},s.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},s.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.longrunning.ListOperationsRequest;e.pos>>3){case 4:o.name=e.string();break;case 1:o.filter=e.string();break;case 2:o.pageSize=e.int32();break;case 3:o.pageToken=e.string();break;default:e.skipType(7&r)}}return o},s.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},s.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name)?"name: string expected":null!=e.filter&&e.hasOwnProperty("filter")&&!i.isString(e.filter)?"filter: string expected":null!=e.pageSize&&e.hasOwnProperty("pageSize")&&!i.isInteger(e.pageSize)?"pageSize: integer expected":null!=e.pageToken&&e.hasOwnProperty("pageToken")&&!i.isString(e.pageToken)?"pageToken: string expected":null},s.fromObject=function(e){var t;return e instanceof p.google.longrunning.ListOperationsRequest?e:(t=new p.google.longrunning.ListOperationsRequest,null!=e.name&&(t.name=String(e.name)),null!=e.filter&&(t.filter=String(e.filter)),null!=e.pageSize&&(t.pageSize=0|e.pageSize),null!=e.pageToken&&(t.pageToken=String(e.pageToken)),t)},s.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.filter="",n.pageSize=0,n.pageToken="",n.name=""),null!=e.filter&&e.hasOwnProperty("filter")&&(n.filter=e.filter),null!=e.pageSize&&e.hasOwnProperty("pageSize")&&(n.pageSize=e.pageSize),null!=e.pageToken&&e.hasOwnProperty("pageToken")&&(n.pageToken=e.pageToken),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),n},s.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},s),t.ListOperationsResponse=(u.prototype.operations=i.emptyArray,u.prototype.nextPageToken="",u.create=function(e){return new u(e)},u.encode=function(e,t){if(t=t||r.create(),null!=e.operations&&e.operations.length)for(var n=0;n>>3){case 1:o.operations&&o.operations.length||(o.operations=[]),o.operations.push(p.google.longrunning.Operation.decode(e,e.uint32()));break;case 2:o.nextPageToken=e.string();break;default:e.skipType(7&r)}}return o},u.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},u.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.operations&&e.hasOwnProperty("operations")){if(!Array.isArray(e.operations))return"operations: array expected";for(var t=0;t>>3==1?o.name=e.string():e.skipType(7&r)}return o},L.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},L.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name)?"name: string expected":null},L.fromObject=function(e){var t;return e instanceof p.google.longrunning.CancelOperationRequest?e:(t=new p.google.longrunning.CancelOperationRequest,null!=e.name&&(t.name=String(e.name)),t)},L.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name=""),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),n},L.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},L),t.DeleteOperationRequest=(U.prototype.name="",U.create=function(e){return new U(e)},U.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),t},U.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},U.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.longrunning.DeleteOperationRequest;e.pos>>3==1?o.name=e.string():e.skipType(7&r)}return o},U.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},U.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name)?"name: string expected":null},U.fromObject=function(e){var t;return e instanceof p.google.longrunning.DeleteOperationRequest?e:(t=new p.google.longrunning.DeleteOperationRequest,null!=e.name&&(t.name=String(e.name)),t)},U.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name=""),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),n},U.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},U),t.WaitOperationRequest=(c.prototype.name="",c.prototype.timeout=null,c.create=function(e){return new c(e)},c.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.timeout&&Object.hasOwnProperty.call(e,"timeout")&&p.google.protobuf.Duration.encode(e.timeout,t.uint32(18).fork()).ldelim(),t},c.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},c.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.longrunning.WaitOperationRequest;e.pos>>3){case 1:o.name=e.string();break;case 2:o.timeout=p.google.protobuf.Duration.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},c.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},c.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.timeout&&e.hasOwnProperty("timeout")){e=p.google.protobuf.Duration.verify(e.timeout);if(e)return"timeout."+e}return null},c.fromObject=function(e){if(e instanceof p.google.longrunning.WaitOperationRequest)return e;var t=new p.google.longrunning.WaitOperationRequest;if(null!=e.name&&(t.name=String(e.name)),null!=e.timeout){if("object"!=typeof e.timeout)throw TypeError(".google.longrunning.WaitOperationRequest.timeout: object expected");t.timeout=p.google.protobuf.Duration.fromObject(e.timeout)}return t},c.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.timeout=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.timeout&&e.hasOwnProperty("timeout")&&(n.timeout=p.google.protobuf.Duration.toObject(e.timeout,t)),n},c.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},c),t.OperationInfo=(d.prototype.responseType="",d.prototype.metadataType="",d.create=function(e){return new d(e)},d.encode=function(e,t){return t=t||r.create(),null!=e.responseType&&Object.hasOwnProperty.call(e,"responseType")&&t.uint32(10).string(e.responseType),null!=e.metadataType&&Object.hasOwnProperty.call(e,"metadataType")&&t.uint32(18).string(e.metadataType),t},d.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},d.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.longrunning.OperationInfo;e.pos>>3){case 1:o.responseType=e.string();break;case 2:o.metadataType=e.string();break;default:e.skipType(7&r)}}return o},d.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},d.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.responseType&&e.hasOwnProperty("responseType")&&!i.isString(e.responseType)?"responseType: string expected":null!=e.metadataType&&e.hasOwnProperty("metadataType")&&!i.isString(e.metadataType)?"metadataType: string expected":null},d.fromObject=function(e){var t;return e instanceof p.google.longrunning.OperationInfo?e:(t=new p.google.longrunning.OperationInfo,null!=e.responseType&&(t.responseType=String(e.responseType)),null!=e.metadataType&&(t.metadataType=String(e.metadataType)),t)},d.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.responseType="",n.metadataType=""),null!=e.responseType&&e.hasOwnProperty("responseType")&&(n.responseType=e.responseType),null!=e.metadataType&&e.hasOwnProperty("metadataType")&&(n.metadataType=e.metadataType),n},d.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},d),t),F.api=((n={}).Http=(g.prototype.rules=i.emptyArray,g.prototype.fullyDecodeReservedExpansion=!1,g.create=function(e){return new g(e)},g.encode=function(e,t){if(t=t||r.create(),null!=e.rules&&e.rules.length)for(var n=0;n>>3){case 1:o.rules&&o.rules.length||(o.rules=[]),o.rules.push(p.google.api.HttpRule.decode(e,e.uint32()));break;case 2:o.fullyDecodeReservedExpansion=e.bool();break;default:e.skipType(7&r)}}return o},g.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},g.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.rules&&e.hasOwnProperty("rules")){if(!Array.isArray(e.rules))return"rules: array expected";for(var t=0;t>>3){case 1:o.selector=e.string();break;case 2:o.get=e.string();break;case 3:o.put=e.string();break;case 4:o.post=e.string();break;case 5:o.delete=e.string();break;case 6:o.patch=e.string();break;case 8:o.custom=p.google.api.CustomHttpPattern.decode(e,e.uint32());break;case 7:o.body=e.string();break;case 12:o.responseBody=e.string();break;case 11:o.additionalBindings&&o.additionalBindings.length||(o.additionalBindings=[]),o.additionalBindings.push(p.google.api.HttpRule.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},f.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},f.verify=function(e){if("object"!=typeof e||null===e)return"object expected";var t={};if(null!=e.selector&&e.hasOwnProperty("selector")&&!i.isString(e.selector))return"selector: string expected";if(null!=e.get&&e.hasOwnProperty("get")&&(t.pattern=1,!i.isString(e.get)))return"get: string expected";if(null!=e.put&&e.hasOwnProperty("put")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!i.isString(e.put))return"put: string expected"}if(null!=e.post&&e.hasOwnProperty("post")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!i.isString(e.post))return"post: string expected"}if(null!=e.delete&&e.hasOwnProperty("delete")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!i.isString(e.delete))return"delete: string expected"}if(null!=e.patch&&e.hasOwnProperty("patch")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,!i.isString(e.patch))return"patch: string expected"}if(null!=e.custom&&e.hasOwnProperty("custom")){if(1===t.pattern)return"pattern: multiple values";if(t.pattern=1,n=p.google.api.CustomHttpPattern.verify(e.custom))return"custom."+n}if(null!=e.body&&e.hasOwnProperty("body")&&!i.isString(e.body))return"body: string expected";if(null!=e.responseBody&&e.hasOwnProperty("responseBody")&&!i.isString(e.responseBody))return"responseBody: string expected";if(null!=e.additionalBindings&&e.hasOwnProperty("additionalBindings")){if(!Array.isArray(e.additionalBindings))return"additionalBindings: array expected";for(var n,o=0;o>>3){case 1:o.kind=e.string();break;case 2:o.path=e.string();break;default:e.skipType(7&r)}}return o},y.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},y.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.kind&&e.hasOwnProperty("kind")&&!i.isString(e.kind)?"kind: string expected":null!=e.path&&e.hasOwnProperty("path")&&!i.isString(e.path)?"path: string expected":null},y.fromObject=function(e){var t;return e instanceof p.google.api.CustomHttpPattern?e:(t=new p.google.api.CustomHttpPattern,null!=e.kind&&(t.kind=String(e.kind)),null!=e.path&&(t.path=String(e.path)),t)},y.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.kind="",n.path=""),null!=e.kind&&e.hasOwnProperty("kind")&&(n.kind=e.kind),null!=e.path&&e.hasOwnProperty("path")&&(n.path=e.path),n},y.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},y),n),F.protobuf=((t={}).FileDescriptorSet=(J.prototype.file=i.emptyArray,J.create=function(e){return new J(e)},J.encode=function(e,t){if(t=t||r.create(),null!=e.file&&e.file.length)for(var n=0;n>>3==1?(o.file&&o.file.length||(o.file=[]),o.file.push(p.google.protobuf.FileDescriptorProto.decode(e,e.uint32()))):e.skipType(7&r)}return o},J.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},J.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.file&&e.hasOwnProperty("file")){if(!Array.isArray(e.file))return"file: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.package=e.string();break;case 3:o.dependency&&o.dependency.length||(o.dependency=[]),o.dependency.push(e.string());break;case 10:if(o.publicDependency&&o.publicDependency.length||(o.publicDependency=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.name=e.string();break;case 2:o.field&&o.field.length||(o.field=[]),o.field.push(p.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 6:o.extension&&o.extension.length||(o.extension=[]),o.extension.push(p.google.protobuf.FieldDescriptorProto.decode(e,e.uint32()));break;case 3:o.nestedType&&o.nestedType.length||(o.nestedType=[]),o.nestedType.push(p.google.protobuf.DescriptorProto.decode(e,e.uint32()));break;case 4:o.enumType&&o.enumType.length||(o.enumType=[]),o.enumType.push(p.google.protobuf.EnumDescriptorProto.decode(e,e.uint32()));break;case 5:o.extensionRange&&o.extensionRange.length||(o.extensionRange=[]),o.extensionRange.push(p.google.protobuf.DescriptorProto.ExtensionRange.decode(e,e.uint32()));break;case 8:o.oneofDecl&&o.oneofDecl.length||(o.oneofDecl=[]),o.oneofDecl.push(p.google.protobuf.OneofDescriptorProto.decode(e,e.uint32()));break;case 7:o.options=p.google.protobuf.MessageOptions.decode(e,e.uint32());break;case 9:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(p.google.protobuf.DescriptorProto.ReservedRange.decode(e,e.uint32()));break;case 10:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},O.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},O.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.field&&e.hasOwnProperty("field")){if(!Array.isArray(e.field))return"field: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;case 3:o.options=p.google.protobuf.ExtensionRangeOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},b.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},b.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.start&&e.hasOwnProperty("start")&&!i.isInteger(e.start))return"start: integer expected";if(null!=e.end&&e.hasOwnProperty("end")&&!i.isInteger(e.end))return"end: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=p.google.protobuf.ExtensionRangeOptions.verify(e.options);if(e)return"options."+e}return null},b.fromObject=function(e){if(e instanceof p.google.protobuf.DescriptorProto.ExtensionRange)return e;var t=new p.google.protobuf.DescriptorProto.ExtensionRange;if(null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected");t.options=p.google.protobuf.ExtensionRangeOptions.fromObject(e.options)}return t},b.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0,n.options=null),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),null!=e.options&&e.hasOwnProperty("options")&&(n.options=p.google.protobuf.ExtensionRangeOptions.toObject(e.options,t)),n},b.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},b),O.ReservedRange=(m.prototype.start=0,m.prototype.end=0,m.create=function(e){return new m(e)},m.encode=function(e,t){return t=t||r.create(),null!=e.start&&Object.hasOwnProperty.call(e,"start")&&t.uint32(8).int32(e.start),null!=e.end&&Object.hasOwnProperty.call(e,"end")&&t.uint32(16).int32(e.end),t},m.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},m.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.protobuf.DescriptorProto.ReservedRange;e.pos>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},m.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},m.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!i.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!i.isInteger(e.end)?"end: integer expected":null},m.fromObject=function(e){var t;return e instanceof p.google.protobuf.DescriptorProto.ReservedRange?e:(t=new p.google.protobuf.DescriptorProto.ReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},m.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},m.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},m),O),t.ExtensionRangeOptions=(M.prototype.uninterpretedOption=i.emptyArray,M.create=function(e){return new M(e)},M.encode=function(e,t){if(t=t||r.create(),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},M.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},M.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 3:o.number=e.int32();break;case 4:o.label=e.int32();break;case 5:o.type=e.int32();break;case 6:o.typeName=e.string();break;case 2:o.extendee=e.string();break;case 7:o.defaultValue=e.string();break;case 9:o.oneofIndex=e.int32();break;case 10:o.jsonName=e.string();break;case 8:o.options=p.google.protobuf.FieldOptions.decode(e,e.uint32());break;case 17:o.proto3Optional=e.bool();break;default:e.skipType(7&r)}}return o},v.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},v.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!i.isInteger(e.number))return"number: integer expected";if(null!=e.label&&e.hasOwnProperty("label"))switch(e.label){default:return"label: enum value expected";case 1:case 2:case 3:}if(null!=e.type&&e.hasOwnProperty("type"))switch(e.type){default:return"type: enum value expected";case 1:case 2:case 3:case 4:case 5:case 6:case 7:case 8:case 9:case 10:case 11:case 12:case 13:case 14:case 15:case 16:case 17:case 18:}if(null!=e.typeName&&e.hasOwnProperty("typeName")&&!i.isString(e.typeName))return"typeName: string expected";if(null!=e.extendee&&e.hasOwnProperty("extendee")&&!i.isString(e.extendee))return"extendee: string expected";if(null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&!i.isString(e.defaultValue))return"defaultValue: string expected";if(null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&!i.isInteger(e.oneofIndex))return"oneofIndex: integer expected";if(null!=e.jsonName&&e.hasOwnProperty("jsonName")&&!i.isString(e.jsonName))return"jsonName: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=p.google.protobuf.FieldOptions.verify(e.options);if(t)return"options."+t}return null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&"boolean"!=typeof e.proto3Optional?"proto3Optional: boolean expected":null},v.fromObject=function(e){if(e instanceof p.google.protobuf.FieldDescriptorProto)return e;var t=new p.google.protobuf.FieldDescriptorProto;switch(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),e.label){case"LABEL_OPTIONAL":case 1:t.label=1;break;case"LABEL_REQUIRED":case 2:t.label=2;break;case"LABEL_REPEATED":case 3:t.label=3}switch(e.type){case"TYPE_DOUBLE":case 1:t.type=1;break;case"TYPE_FLOAT":case 2:t.type=2;break;case"TYPE_INT64":case 3:t.type=3;break;case"TYPE_UINT64":case 4:t.type=4;break;case"TYPE_INT32":case 5:t.type=5;break;case"TYPE_FIXED64":case 6:t.type=6;break;case"TYPE_FIXED32":case 7:t.type=7;break;case"TYPE_BOOL":case 8:t.type=8;break;case"TYPE_STRING":case 9:t.type=9;break;case"TYPE_GROUP":case 10:t.type=10;break;case"TYPE_MESSAGE":case 11:t.type=11;break;case"TYPE_BYTES":case 12:t.type=12;break;case"TYPE_UINT32":case 13:t.type=13;break;case"TYPE_ENUM":case 14:t.type=14;break;case"TYPE_SFIXED32":case 15:t.type=15;break;case"TYPE_SFIXED64":case 16:t.type=16;break;case"TYPE_SINT32":case 17:t.type=17;break;case"TYPE_SINT64":case 18:t.type=18}if(null!=e.typeName&&(t.typeName=String(e.typeName)),null!=e.extendee&&(t.extendee=String(e.extendee)),null!=e.defaultValue&&(t.defaultValue=String(e.defaultValue)),null!=e.oneofIndex&&(t.oneofIndex=0|e.oneofIndex),null!=e.jsonName&&(t.jsonName=String(e.jsonName)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected");t.options=p.google.protobuf.FieldOptions.fromObject(e.options)}return null!=e.proto3Optional&&(t.proto3Optional=Boolean(e.proto3Optional)),t},v.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.extendee="",n.number=0,n.label=t.enums===String?"LABEL_OPTIONAL":1,n.type=t.enums===String?"TYPE_DOUBLE":1,n.typeName="",n.defaultValue="",n.options=null,n.oneofIndex=0,n.jsonName="",n.proto3Optional=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.extendee&&e.hasOwnProperty("extendee")&&(n.extendee=e.extendee),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.label&&e.hasOwnProperty("label")&&(n.label=t.enums===String?p.google.protobuf.FieldDescriptorProto.Label[e.label]:e.label),null!=e.type&&e.hasOwnProperty("type")&&(n.type=t.enums===String?p.google.protobuf.FieldDescriptorProto.Type[e.type]:e.type),null!=e.typeName&&e.hasOwnProperty("typeName")&&(n.typeName=e.typeName),null!=e.defaultValue&&e.hasOwnProperty("defaultValue")&&(n.defaultValue=e.defaultValue),null!=e.options&&e.hasOwnProperty("options")&&(n.options=p.google.protobuf.FieldOptions.toObject(e.options,t)),null!=e.oneofIndex&&e.hasOwnProperty("oneofIndex")&&(n.oneofIndex=e.oneofIndex),null!=e.jsonName&&e.hasOwnProperty("jsonName")&&(n.jsonName=e.jsonName),null!=e.proto3Optional&&e.hasOwnProperty("proto3Optional")&&(n.proto3Optional=e.proto3Optional),n},v.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},v.Type=(n={},(e=Object.create(n))[n[1]="TYPE_DOUBLE"]=1,e[n[2]="TYPE_FLOAT"]=2,e[n[3]="TYPE_INT64"]=3,e[n[4]="TYPE_UINT64"]=4,e[n[5]="TYPE_INT32"]=5,e[n[6]="TYPE_FIXED64"]=6,e[n[7]="TYPE_FIXED32"]=7,e[n[8]="TYPE_BOOL"]=8,e[n[9]="TYPE_STRING"]=9,e[n[10]="TYPE_GROUP"]=10,e[n[11]="TYPE_MESSAGE"]=11,e[n[12]="TYPE_BYTES"]=12,e[n[13]="TYPE_UINT32"]=13,e[n[14]="TYPE_ENUM"]=14,e[n[15]="TYPE_SFIXED32"]=15,e[n[16]="TYPE_SFIXED64"]=16,e[n[17]="TYPE_SINT32"]=17,e[n[18]="TYPE_SINT64"]=18,e),v.Label=(n={},(e=Object.create(n))[n[1]="LABEL_OPTIONAL"]=1,e[n[2]="LABEL_REQUIRED"]=2,e[n[3]="LABEL_REPEATED"]=3,e),v),t.OneofDescriptorProto=(w.prototype.name="",w.prototype.options=null,w.create=function(e){return new w(e)},w.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&p.google.protobuf.OneofOptions.encode(e.options,t.uint32(18).fork()).ldelim(),t},w.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},w.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.protobuf.OneofDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.options=p.google.protobuf.OneofOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},w.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},w.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.options&&e.hasOwnProperty("options")){e=p.google.protobuf.OneofOptions.verify(e.options);if(e)return"options."+e}return null},w.fromObject=function(e){if(e instanceof p.google.protobuf.OneofDescriptorProto)return e;var t=new p.google.protobuf.OneofDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected");t.options=p.google.protobuf.OneofOptions.fromObject(e.options)}return t},w.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.options&&e.hasOwnProperty("options")&&(n.options=p.google.protobuf.OneofOptions.toObject(e.options,t)),n},w.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},w),t.EnumDescriptorProto=(P.prototype.name="",P.prototype.value=i.emptyArray,P.prototype.options=null,P.prototype.reservedRange=i.emptyArray,P.prototype.reservedName=i.emptyArray,P.create=function(e){return new P(e)},P.encode=function(e,t){if(t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.value&&e.value.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.value&&o.value.length||(o.value=[]),o.value.push(p.google.protobuf.EnumValueDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=p.google.protobuf.EnumOptions.decode(e,e.uint32());break;case 4:o.reservedRange&&o.reservedRange.length||(o.reservedRange=[]),o.reservedRange.push(p.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(e,e.uint32()));break;case 5:o.reservedName&&o.reservedName.length||(o.reservedName=[]),o.reservedName.push(e.string());break;default:e.skipType(7&r)}}return o},P.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},P.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.value&&e.hasOwnProperty("value")){if(!Array.isArray(e.value))return"value: array expected";for(var t=0;t>>3){case 1:o.start=e.int32();break;case 2:o.end=e.int32();break;default:e.skipType(7&r)}}return o},_.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},_.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.start&&e.hasOwnProperty("start")&&!i.isInteger(e.start)?"start: integer expected":null!=e.end&&e.hasOwnProperty("end")&&!i.isInteger(e.end)?"end: integer expected":null},_.fromObject=function(e){var t;return e instanceof p.google.protobuf.EnumDescriptorProto.EnumReservedRange?e:(t=new p.google.protobuf.EnumDescriptorProto.EnumReservedRange,null!=e.start&&(t.start=0|e.start),null!=e.end&&(t.end=0|e.end),t)},_.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.start=0,n.end=0),null!=e.start&&e.hasOwnProperty("start")&&(n.start=e.start),null!=e.end&&e.hasOwnProperty("end")&&(n.end=e.end),n},_.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},_),P),t.EnumValueDescriptorProto=(j.prototype.name="",j.prototype.number=0,j.prototype.options=null,j.create=function(e){return new j(e)},j.encode=function(e,t){return t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.number&&Object.hasOwnProperty.call(e,"number")&&t.uint32(16).int32(e.number),null!=e.options&&Object.hasOwnProperty.call(e,"options")&&p.google.protobuf.EnumValueOptions.encode(e.options,t.uint32(26).fork()).ldelim(),t},j.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},j.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.protobuf.EnumValueDescriptorProto;e.pos>>3){case 1:o.name=e.string();break;case 2:o.number=e.int32();break;case 3:o.options=p.google.protobuf.EnumValueOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},j.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},j.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.number&&e.hasOwnProperty("number")&&!i.isInteger(e.number))return"number: integer expected";if(null!=e.options&&e.hasOwnProperty("options")){e=p.google.protobuf.EnumValueOptions.verify(e.options);if(e)return"options."+e}return null},j.fromObject=function(e){if(e instanceof p.google.protobuf.EnumValueDescriptorProto)return e;var t=new p.google.protobuf.EnumValueDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.number&&(t.number=0|e.number),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected");t.options=p.google.protobuf.EnumValueOptions.fromObject(e.options)}return t},j.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.number=0,n.options=null),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.number&&e.hasOwnProperty("number")&&(n.number=e.number),null!=e.options&&e.hasOwnProperty("options")&&(n.options=p.google.protobuf.EnumValueOptions.toObject(e.options,t)),n},j.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},j),t.ServiceDescriptorProto=(S.prototype.name="",S.prototype.method=i.emptyArray,S.prototype.options=null,S.create=function(e){return new S(e)},S.encode=function(e,t){if(t=t||r.create(),null!=e.name&&Object.hasOwnProperty.call(e,"name")&&t.uint32(10).string(e.name),null!=e.method&&e.method.length)for(var n=0;n>>3){case 1:o.name=e.string();break;case 2:o.method&&o.method.length||(o.method=[]),o.method.push(p.google.protobuf.MethodDescriptorProto.decode(e,e.uint32()));break;case 3:o.options=p.google.protobuf.ServiceOptions.decode(e,e.uint32());break;default:e.skipType(7&r)}}return o},S.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},S.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.method&&e.hasOwnProperty("method")){if(!Array.isArray(e.method))return"method: array expected";for(var t=0;t>>3){case 1:o.name=e.string();break;case 2:o.inputType=e.string();break;case 3:o.outputType=e.string();break;case 4:o.options=p.google.protobuf.MethodOptions.decode(e,e.uint32());break;case 5:o.clientStreaming=e.bool();break;case 6:o.serverStreaming=e.bool();break;default:e.skipType(7&r)}}return o},x.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},x.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")&&!i.isString(e.name))return"name: string expected";if(null!=e.inputType&&e.hasOwnProperty("inputType")&&!i.isString(e.inputType))return"inputType: string expected";if(null!=e.outputType&&e.hasOwnProperty("outputType")&&!i.isString(e.outputType))return"outputType: string expected";if(null!=e.options&&e.hasOwnProperty("options")){var t=p.google.protobuf.MethodOptions.verify(e.options);if(t)return"options."+t}return null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&"boolean"!=typeof e.clientStreaming?"clientStreaming: boolean expected":null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&"boolean"!=typeof e.serverStreaming?"serverStreaming: boolean expected":null},x.fromObject=function(e){if(e instanceof p.google.protobuf.MethodDescriptorProto)return e;var t=new p.google.protobuf.MethodDescriptorProto;if(null!=e.name&&(t.name=String(e.name)),null!=e.inputType&&(t.inputType=String(e.inputType)),null!=e.outputType&&(t.outputType=String(e.outputType)),null!=e.options){if("object"!=typeof e.options)throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected");t.options=p.google.protobuf.MethodOptions.fromObject(e.options)}return null!=e.clientStreaming&&(t.clientStreaming=Boolean(e.clientStreaming)),null!=e.serverStreaming&&(t.serverStreaming=Boolean(e.serverStreaming)),t},x.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.name="",n.inputType="",n.outputType="",n.options=null,n.clientStreaming=!1,n.serverStreaming=!1),null!=e.name&&e.hasOwnProperty("name")&&(n.name=e.name),null!=e.inputType&&e.hasOwnProperty("inputType")&&(n.inputType=e.inputType),null!=e.outputType&&e.hasOwnProperty("outputType")&&(n.outputType=e.outputType),null!=e.options&&e.hasOwnProperty("options")&&(n.options=p.google.protobuf.MethodOptions.toObject(e.options,t)),null!=e.clientStreaming&&e.hasOwnProperty("clientStreaming")&&(n.clientStreaming=e.clientStreaming),null!=e.serverStreaming&&e.hasOwnProperty("serverStreaming")&&(n.serverStreaming=e.serverStreaming),n},x.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},x),t.FileOptions=(k.prototype.javaPackage="",k.prototype.javaOuterClassname="",k.prototype.javaMultipleFiles=!1,k.prototype.javaGenerateEqualsAndHash=!1,k.prototype.javaStringCheckUtf8=!1,k.prototype.optimizeFor=1,k.prototype.goPackage="",k.prototype.ccGenericServices=!1,k.prototype.javaGenericServices=!1,k.prototype.pyGenericServices=!1,k.prototype.phpGenericServices=!1,k.prototype.deprecated=!1,k.prototype.ccEnableArenas=!0,k.prototype.objcClassPrefix="",k.prototype.csharpNamespace="",k.prototype.swiftPrefix="",k.prototype.phpClassPrefix="",k.prototype.phpNamespace="",k.prototype.phpMetadataNamespace="",k.prototype.rubyPackage="",k.prototype.uninterpretedOption=i.emptyArray,k.create=function(e){return new k(e)},k.encode=function(e,t){if(t=t||r.create(),null!=e.javaPackage&&Object.hasOwnProperty.call(e,"javaPackage")&&t.uint32(10).string(e.javaPackage),null!=e.javaOuterClassname&&Object.hasOwnProperty.call(e,"javaOuterClassname")&&t.uint32(66).string(e.javaOuterClassname),null!=e.optimizeFor&&Object.hasOwnProperty.call(e,"optimizeFor")&&t.uint32(72).int32(e.optimizeFor),null!=e.javaMultipleFiles&&Object.hasOwnProperty.call(e,"javaMultipleFiles")&&t.uint32(80).bool(e.javaMultipleFiles),null!=e.goPackage&&Object.hasOwnProperty.call(e,"goPackage")&&t.uint32(90).string(e.goPackage),null!=e.ccGenericServices&&Object.hasOwnProperty.call(e,"ccGenericServices")&&t.uint32(128).bool(e.ccGenericServices),null!=e.javaGenericServices&&Object.hasOwnProperty.call(e,"javaGenericServices")&&t.uint32(136).bool(e.javaGenericServices),null!=e.pyGenericServices&&Object.hasOwnProperty.call(e,"pyGenericServices")&&t.uint32(144).bool(e.pyGenericServices),null!=e.javaGenerateEqualsAndHash&&Object.hasOwnProperty.call(e,"javaGenerateEqualsAndHash")&&t.uint32(160).bool(e.javaGenerateEqualsAndHash),null!=e.deprecated&&Object.hasOwnProperty.call(e,"deprecated")&&t.uint32(184).bool(e.deprecated),null!=e.javaStringCheckUtf8&&Object.hasOwnProperty.call(e,"javaStringCheckUtf8")&&t.uint32(216).bool(e.javaStringCheckUtf8),null!=e.ccEnableArenas&&Object.hasOwnProperty.call(e,"ccEnableArenas")&&t.uint32(248).bool(e.ccEnableArenas),null!=e.objcClassPrefix&&Object.hasOwnProperty.call(e,"objcClassPrefix")&&t.uint32(290).string(e.objcClassPrefix),null!=e.csharpNamespace&&Object.hasOwnProperty.call(e,"csharpNamespace")&&t.uint32(298).string(e.csharpNamespace),null!=e.swiftPrefix&&Object.hasOwnProperty.call(e,"swiftPrefix")&&t.uint32(314).string(e.swiftPrefix),null!=e.phpClassPrefix&&Object.hasOwnProperty.call(e,"phpClassPrefix")&&t.uint32(322).string(e.phpClassPrefix),null!=e.phpNamespace&&Object.hasOwnProperty.call(e,"phpNamespace")&&t.uint32(330).string(e.phpNamespace),null!=e.phpGenericServices&&Object.hasOwnProperty.call(e,"phpGenericServices")&&t.uint32(336).bool(e.phpGenericServices),null!=e.phpMetadataNamespace&&Object.hasOwnProperty.call(e,"phpMetadataNamespace")&&t.uint32(354).string(e.phpMetadataNamespace),null!=e.rubyPackage&&Object.hasOwnProperty.call(e,"rubyPackage")&&t.uint32(362).string(e.rubyPackage),null!=e.uninterpretedOption&&e.uninterpretedOption.length)for(var n=0;n>>3){case 1:o.javaPackage=e.string();break;case 8:o.javaOuterClassname=e.string();break;case 10:o.javaMultipleFiles=e.bool();break;case 20:o.javaGenerateEqualsAndHash=e.bool();break;case 27:o.javaStringCheckUtf8=e.bool();break;case 9:o.optimizeFor=e.int32();break;case 11:o.goPackage=e.string();break;case 16:o.ccGenericServices=e.bool();break;case 17:o.javaGenericServices=e.bool();break;case 18:o.pyGenericServices=e.bool();break;case 42:o.phpGenericServices=e.bool();break;case 23:o.deprecated=e.bool();break;case 31:o.ccEnableArenas=e.bool();break;case 36:o.objcClassPrefix=e.string();break;case 37:o.csharpNamespace=e.string();break;case 39:o.swiftPrefix=e.string();break;case 40:o.phpClassPrefix=e.string();break;case 41:o.phpNamespace=e.string();break;case 44:o.phpMetadataNamespace=e.string();break;case 45:o.rubyPackage=e.string();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},k.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},k.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.javaPackage&&e.hasOwnProperty("javaPackage")&&!i.isString(e.javaPackage))return"javaPackage: string expected";if(null!=e.javaOuterClassname&&e.hasOwnProperty("javaOuterClassname")&&!i.isString(e.javaOuterClassname))return"javaOuterClassname: string expected";if(null!=e.javaMultipleFiles&&e.hasOwnProperty("javaMultipleFiles")&&"boolean"!=typeof e.javaMultipleFiles)return"javaMultipleFiles: boolean expected";if(null!=e.javaGenerateEqualsAndHash&&e.hasOwnProperty("javaGenerateEqualsAndHash")&&"boolean"!=typeof e.javaGenerateEqualsAndHash)return"javaGenerateEqualsAndHash: boolean expected";if(null!=e.javaStringCheckUtf8&&e.hasOwnProperty("javaStringCheckUtf8")&&"boolean"!=typeof e.javaStringCheckUtf8)return"javaStringCheckUtf8: boolean expected";if(null!=e.optimizeFor&&e.hasOwnProperty("optimizeFor"))switch(e.optimizeFor){default:return"optimizeFor: enum value expected";case 1:case 2:case 3:}if(null!=e.goPackage&&e.hasOwnProperty("goPackage")&&!i.isString(e.goPackage))return"goPackage: string expected";if(null!=e.ccGenericServices&&e.hasOwnProperty("ccGenericServices")&&"boolean"!=typeof e.ccGenericServices)return"ccGenericServices: boolean expected";if(null!=e.javaGenericServices&&e.hasOwnProperty("javaGenericServices")&&"boolean"!=typeof e.javaGenericServices)return"javaGenericServices: boolean expected";if(null!=e.pyGenericServices&&e.hasOwnProperty("pyGenericServices")&&"boolean"!=typeof e.pyGenericServices)return"pyGenericServices: boolean expected";if(null!=e.phpGenericServices&&e.hasOwnProperty("phpGenericServices")&&"boolean"!=typeof e.phpGenericServices)return"phpGenericServices: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.ccEnableArenas&&e.hasOwnProperty("ccEnableArenas")&&"boolean"!=typeof e.ccEnableArenas)return"ccEnableArenas: boolean expected";if(null!=e.objcClassPrefix&&e.hasOwnProperty("objcClassPrefix")&&!i.isString(e.objcClassPrefix))return"objcClassPrefix: string expected";if(null!=e.csharpNamespace&&e.hasOwnProperty("csharpNamespace")&&!i.isString(e.csharpNamespace))return"csharpNamespace: string expected";if(null!=e.swiftPrefix&&e.hasOwnProperty("swiftPrefix")&&!i.isString(e.swiftPrefix))return"swiftPrefix: string expected";if(null!=e.phpClassPrefix&&e.hasOwnProperty("phpClassPrefix")&&!i.isString(e.phpClassPrefix))return"phpClassPrefix: string expected";if(null!=e.phpNamespace&&e.hasOwnProperty("phpNamespace")&&!i.isString(e.phpNamespace))return"phpNamespace: string expected";if(null!=e.phpMetadataNamespace&&e.hasOwnProperty("phpMetadataNamespace")&&!i.isString(e.phpMetadataNamespace))return"phpMetadataNamespace: string expected";if(null!=e.rubyPackage&&e.hasOwnProperty("rubyPackage")&&!i.isString(e.rubyPackage))return"rubyPackage: string expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.messageSetWireFormat=e.bool();break;case 2:o.noStandardDescriptorAccessor=e.bool();break;case 3:o.deprecated=e.bool();break;case 7:o.mapEntry=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},D.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},D.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.messageSetWireFormat&&e.hasOwnProperty("messageSetWireFormat")&&"boolean"!=typeof e.messageSetWireFormat)return"messageSetWireFormat: boolean expected";if(null!=e.noStandardDescriptorAccessor&&e.hasOwnProperty("noStandardDescriptorAccessor")&&"boolean"!=typeof e.noStandardDescriptorAccessor)return"noStandardDescriptorAccessor: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.mapEntry&&e.hasOwnProperty("mapEntry")&&"boolean"!=typeof e.mapEntry)return"mapEntry: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.ctype=e.int32();break;case 2:o.packed=e.bool();break;case 6:o.jstype=e.int32();break;case 5:o.lazy=e.bool();break;case 3:o.deprecated=e.bool();break;case 10:o.weak=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},T.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},T.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.ctype&&e.hasOwnProperty("ctype"))switch(e.ctype){default:return"ctype: enum value expected";case 0:case 1:case 2:}if(null!=e.packed&&e.hasOwnProperty("packed")&&"boolean"!=typeof e.packed)return"packed: boolean expected";if(null!=e.jstype&&e.hasOwnProperty("jstype"))switch(e.jstype){default:return"jstype: enum value expected";case 0:case 1:case 2:}if(null!=e.lazy&&e.hasOwnProperty("lazy")&&"boolean"!=typeof e.lazy)return"lazy: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.weak&&e.hasOwnProperty("weak")&&"boolean"!=typeof e.weak)return"weak: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3==999?(o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()))):e.skipType(7&r)}return o},H.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},H.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.allowAlias=e.bool();break;case 3:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},E.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},E.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.allowAlias&&e.hasOwnProperty("allowAlias")&&"boolean"!=typeof e.allowAlias)return"allowAlias: boolean expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 1:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},z.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},z.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1049:o[".google.api.defaultHost"]=e.string();break;case 1050:o[".google.api.oauthScopes"]=e.string();break;default:e.skipType(7&r)}}return o},A.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},A.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 33:o.deprecated=e.bool();break;case 34:o.idempotencyLevel=e.int32();break;case 999:o.uninterpretedOption&&o.uninterpretedOption.length||(o.uninterpretedOption=[]),o.uninterpretedOption.push(p.google.protobuf.UninterpretedOption.decode(e,e.uint32()));break;case 1049:o[".google.longrunning.operationInfo"]=p.google.longrunning.OperationInfo.decode(e,e.uint32());break;case 72295728:o[".google.api.http"]=p.google.api.HttpRule.decode(e,e.uint32());break;case 1051:o[".google.api.methodSignature"]&&o[".google.api.methodSignature"].length||(o[".google.api.methodSignature"]=[]),o[".google.api.methodSignature"].push(e.string());break;default:e.skipType(7&r)}}return o},N.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},N.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.deprecated&&e.hasOwnProperty("deprecated")&&"boolean"!=typeof e.deprecated)return"deprecated: boolean expected";if(null!=e.idempotencyLevel&&e.hasOwnProperty("idempotencyLevel"))switch(e.idempotencyLevel){default:return"idempotencyLevel: enum value expected";case 0:case 1:case 2:}if(null!=e.uninterpretedOption&&e.hasOwnProperty("uninterpretedOption")){if(!Array.isArray(e.uninterpretedOption))return"uninterpretedOption: array expected";for(var t=0;t>>3){case 2:o.name&&o.name.length||(o.name=[]),o.name.push(p.google.protobuf.UninterpretedOption.NamePart.decode(e,e.uint32()));break;case 3:o.identifierValue=e.string();break;case 4:o.positiveIntValue=e.uint64();break;case 5:o.negativeIntValue=e.int64();break;case 6:o.doubleValue=e.double();break;case 7:o.stringValue=e.bytes();break;case 8:o.aggregateValue=e.string();break;default:e.skipType(7&r)}}return o},I.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},I.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.name&&e.hasOwnProperty("name")){if(!Array.isArray(e.name))return"name: array expected";for(var t=0;t>>0,e.positiveIntValue.high>>>0).toNumber(!0))),null!=e.negativeIntValue&&(i.Long?(t.negativeIntValue=i.Long.fromValue(e.negativeIntValue)).unsigned=!1:"string"==typeof e.negativeIntValue?t.negativeIntValue=parseInt(e.negativeIntValue,10):"number"==typeof e.negativeIntValue?t.negativeIntValue=e.negativeIntValue:"object"==typeof e.negativeIntValue&&(t.negativeIntValue=new i.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber())),null!=e.doubleValue&&(t.doubleValue=Number(e.doubleValue)),null!=e.stringValue&&("string"==typeof e.stringValue?i.base64.decode(e.stringValue,t.stringValue=i.newBuffer(i.base64.length(e.stringValue)),0):e.stringValue.length&&(t.stringValue=e.stringValue)),null!=e.aggregateValue&&(t.aggregateValue=String(e.aggregateValue)),t},I.toObject=function(e,t){var n,o={};if(((t=t||{}).arrays||t.defaults)&&(o.name=[]),t.defaults&&(o.identifierValue="",i.Long?(n=new i.Long(0,0,!0),o.positiveIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.positiveIntValue=t.longs===String?"0":0,i.Long?(n=new i.Long(0,0,!1),o.negativeIntValue=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.negativeIntValue=t.longs===String?"0":0,o.doubleValue=0,t.bytes===String?o.stringValue="":(o.stringValue=[],t.bytes!==Array&&(o.stringValue=i.newBuffer(o.stringValue))),o.aggregateValue=""),e.name&&e.name.length){o.name=[];for(var r=0;r>>0,e.positiveIntValue.high>>>0).toNumber(!0):e.positiveIntValue),null!=e.negativeIntValue&&e.hasOwnProperty("negativeIntValue")&&("number"==typeof e.negativeIntValue?o.negativeIntValue=t.longs===String?String(e.negativeIntValue):e.negativeIntValue:o.negativeIntValue=t.longs===String?i.Long.prototype.toString.call(e.negativeIntValue):t.longs===Number?new i.LongBits(e.negativeIntValue.low>>>0,e.negativeIntValue.high>>>0).toNumber():e.negativeIntValue),null!=e.doubleValue&&e.hasOwnProperty("doubleValue")&&(o.doubleValue=t.json&&!isFinite(e.doubleValue)?String(e.doubleValue):e.doubleValue),null!=e.stringValue&&e.hasOwnProperty("stringValue")&&(o.stringValue=t.bytes===String?i.base64.encode(e.stringValue,0,e.stringValue.length):t.bytes===Array?Array.prototype.slice.call(e.stringValue):e.stringValue),null!=e.aggregateValue&&e.hasOwnProperty("aggregateValue")&&(o.aggregateValue=e.aggregateValue),o},I.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},I.NamePart=(q.prototype.namePart="",q.prototype.isExtension=!1,q.create=function(e){return new q(e)},q.encode=function(e,t){return(t=t||r.create()).uint32(10).string(e.namePart),t.uint32(16).bool(e.isExtension),t},q.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},q.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.protobuf.UninterpretedOption.NamePart;e.pos>>3){case 1:o.namePart=e.string();break;case 2:o.isExtension=e.bool();break;default:e.skipType(7&r)}}if(!o.hasOwnProperty("namePart"))throw i.ProtocolError("missing required 'namePart'",{instance:o});if(o.hasOwnProperty("isExtension"))return o;throw i.ProtocolError("missing required 'isExtension'",{instance:o})},q.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},q.verify=function(e){return"object"!=typeof e||null===e?"object expected":i.isString(e.namePart)?"boolean"!=typeof e.isExtension?"isExtension: boolean expected":null:"namePart: string expected"},q.fromObject=function(e){var t;return e instanceof p.google.protobuf.UninterpretedOption.NamePart?e:(t=new p.google.protobuf.UninterpretedOption.NamePart,null!=e.namePart&&(t.namePart=String(e.namePart)),null!=e.isExtension&&(t.isExtension=Boolean(e.isExtension)),t)},q.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.namePart="",n.isExtension=!1),null!=e.namePart&&e.hasOwnProperty("namePart")&&(n.namePart=e.namePart),null!=e.isExtension&&e.hasOwnProperty("isExtension")&&(n.isExtension=e.isExtension),n},q.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},q),I),t.SourceCodeInfo=(Y.prototype.location=i.emptyArray,Y.create=function(e){return new Y(e)},Y.encode=function(e,t){if(t=t||r.create(),null!=e.location&&e.location.length)for(var n=0;n>>3==1?(o.location&&o.location.length||(o.location=[]),o.location.push(p.google.protobuf.SourceCodeInfo.Location.decode(e,e.uint32()))):e.skipType(7&r)}return o},Y.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},Y.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.location&&e.hasOwnProperty("location")){if(!Array.isArray(e.location))return"location: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3==1?(o.annotation&&o.annotation.length||(o.annotation=[]),o.annotation.push(p.google.protobuf.GeneratedCodeInfo.Annotation.decode(e,e.uint32()))):e.skipType(7&r)}return o},W.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},W.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.annotation&&e.hasOwnProperty("annotation")){if(!Array.isArray(e.annotation))return"annotation: array expected";for(var t=0;t>>3){case 1:if(o.path&&o.path.length||(o.path=[]),2==(7&r))for(var i=e.uint32()+e.pos;e.pos>>3){case 1:o.type_url=e.string();break;case 2:o.value=e.bytes();break;default:e.skipType(7&r)}}return o},X.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},X.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.type_url&&e.hasOwnProperty("type_url")&&!i.isString(e.type_url)?"type_url: string expected":null!=e.value&&e.hasOwnProperty("value")&&!(e.value&&"number"==typeof e.value.length||i.isString(e.value))?"value: buffer expected":null},X.fromObject=function(e){var t;return e instanceof p.google.protobuf.Any?e:(t=new p.google.protobuf.Any,null!=e.type_url&&(t.type_url=String(e.type_url)),null!=e.value&&("string"==typeof e.value?i.base64.decode(e.value,t.value=i.newBuffer(i.base64.length(e.value)),0):e.value.length&&(t.value=e.value)),t)},X.toObject=function(e,t){var n={};return(t=t||{}).defaults&&(n.type_url="",t.bytes===String?n.value="":(n.value=[],t.bytes!==Array&&(n.value=i.newBuffer(n.value)))),null!=e.type_url&&e.hasOwnProperty("type_url")&&(n.type_url=e.type_url),null!=e.value&&e.hasOwnProperty("value")&&(n.value=t.bytes===String?i.base64.encode(e.value,0,e.value.length):t.bytes===Array?Array.prototype.slice.call(e.value):e.value),n},X.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},X),t.Duration=(K.prototype.seconds=i.Long?i.Long.fromBits(0,0,!1):0,K.prototype.nanos=0,K.create=function(e){return new K(e)},K.encode=function(e,t){return t=t||r.create(),null!=e.seconds&&Object.hasOwnProperty.call(e,"seconds")&&t.uint32(8).int64(e.seconds),null!=e.nanos&&Object.hasOwnProperty.call(e,"nanos")&&t.uint32(16).int32(e.nanos),t},K.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},K.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,o=new p.google.protobuf.Duration;e.pos>>3){case 1:o.seconds=e.int64();break;case 2:o.nanos=e.int32();break;default:e.skipType(7&r)}}return o},K.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},K.verify=function(e){return"object"!=typeof e||null===e?"object expected":null!=e.seconds&&e.hasOwnProperty("seconds")&&!(i.isInteger(e.seconds)||e.seconds&&i.isInteger(e.seconds.low)&&i.isInteger(e.seconds.high))?"seconds: integer|Long expected":null!=e.nanos&&e.hasOwnProperty("nanos")&&!i.isInteger(e.nanos)?"nanos: integer expected":null},K.fromObject=function(e){var t;return e instanceof p.google.protobuf.Duration?e:(t=new p.google.protobuf.Duration,null!=e.seconds&&(i.Long?(t.seconds=i.Long.fromValue(e.seconds)).unsigned=!1:"string"==typeof e.seconds?t.seconds=parseInt(e.seconds,10):"number"==typeof e.seconds?t.seconds=e.seconds:"object"==typeof e.seconds&&(t.seconds=new i.LongBits(e.seconds.low>>>0,e.seconds.high>>>0).toNumber())),null!=e.nanos&&(t.nanos=0|e.nanos),t)},K.toObject=function(e,t){var n,o={};return(t=t||{}).defaults&&(i.Long?(n=new i.Long(0,0,!1),o.seconds=t.longs===String?n.toString():t.longs===Number?n.toNumber():n):o.seconds=t.longs===String?"0":0,o.nanos=0),null!=e.seconds&&e.hasOwnProperty("seconds")&&("number"==typeof e.seconds?o.seconds=t.longs===String?String(e.seconds):e.seconds:o.seconds=t.longs===String?i.Long.prototype.toString.call(e.seconds):t.longs===Number?new i.LongBits(e.seconds.low>>>0,e.seconds.high>>>0).toNumber():e.seconds),null!=e.nanos&&e.hasOwnProperty("nanos")&&(o.nanos=e.nanos),o},K.prototype.toJSON=function(){return this.constructor.toObject(this,o.util.toJSONOptions)},K),t.Empty=(Q.create=function(e){return new Q(e)},Q.encode=function(e,t){return t=t||r.create()},Q.encodeDelimited=function(e,t){return this.encode(e,t).ldelim()},Q.decode=function(e,t){e instanceof a||(e=a.create(e));for(var n=void 0===t?e.len:e.pos+t,t=new p.google.protobuf.Empty;e.pos>>3){case 1:o.code=e.int32();break;case 2:o.message=e.string();break;case 3:o.details&&o.details.length||(o.details=[]),o.details.push(p.google.protobuf.Any.decode(e,e.uint32()));break;default:e.skipType(7&r)}}return o},V.decodeDelimited=function(e){return e instanceof a||(e=new a(e)),this.decode(e,e.uint32())},V.verify=function(e){if("object"!=typeof e||null===e)return"object expected";if(null!=e.code&&e.hasOwnProperty("code")&&!i.isInteger(e.code))return"code: integer expected";if(null!=e.message&&e.hasOwnProperty("message")&&!i.isString(e.message))return"message: string expected";if(null!=e.details&&e.hasOwnProperty("details")){if(!Array.isArray(e.details))return"details: array expected";for(var t=0;t=0.10.0" + } + }, + "node_modules/@actions/cache": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz", + "integrity": "sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg==", + "dependencies": { + "@actions/core": "^1.10.0", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.1.1", + "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.13.0", + "semver": "^6.1.0", + "uuid": "^3.3.3" + } + }, + "node_modules/@actions/cache/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/@actions/core": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", + "license": "MIT", + "dependencies": { + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" + } + }, + "node_modules/@actions/exec": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", + "dependencies": { + "@actions/io": "^1.0.1" + } + }, + "node_modules/@actions/github": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.0.tgz", + "integrity": "sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g==", + "dependencies": { + "@actions/http-client": "^2.2.0", + "@octokit/core": "^5.0.1", + "@octokit/plugin-paginate-rest": "^9.0.0", + "@octokit/plugin-rest-endpoint-methods": "^10.0.0" + } + }, + "node_modules/@actions/glob": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", + "integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==", + "dependencies": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } + }, + "node_modules/@actions/http-client": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.0.tgz", + "integrity": "sha512-q+epW0trjVUUHboliPb4UF9g2msf+w61b32tAkFEwL/IwP0DQWgbCMM0Hbe3e3WXSKz5VcUXbzJQgy8Hkra/Lg==", + "dependencies": { + "tunnel": "^0.0.6", + "undici": "^5.25.4" + } + }, + "node_modules/@actions/io": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", + "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==", + "license": "MIT" + }, + "node_modules/@actions/tool-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-2.0.1.tgz", + "integrity": "sha512-iPU+mNwrbA8jodY8eyo/0S/QqCKDajiR8OxWTnSk/SnYg0sj8Hp4QcUEVC1YFpHWXtrfbQrE13Jz4k4HXJQKcA==", + "dependencies": { + "@actions/core": "^1.2.6", + "@actions/exec": "^1.0.0", + "@actions/http-client": "^2.0.1", + "@actions/io": "^1.1.1", + "semver": "^6.1.0", + "uuid": "^3.3.2" + } + }, + "node_modules/@actions/tool-cache/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@aws-crypto/crc32": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-3.0.0.tgz", + "integrity": "sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA==", + "dependencies": { + "@aws-crypto/util": "^3.0.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/crc32/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/@aws-crypto/crc32c": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/crc32c/-/crc32c-3.0.0.tgz", + "integrity": "sha512-ENNPPManmnVJ4BTXlOjAgD7URidbAznURqD0KvfREyc4o20DPYdEldU1f5cQ7Jbj0CJJSPaMIk/9ZshdB3210w==", + "dependencies": { + "@aws-crypto/util": "^3.0.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/crc32c/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/@aws-crypto/ie11-detection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz", + "integrity": "sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==", + "dependencies": { + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/ie11-detection/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/@aws-crypto/sha1-browser": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha1-browser/-/sha1-browser-3.0.0.tgz", + "integrity": "sha512-NJth5c997GLHs6nOYTzFKTbYdMNA6/1XlKVgnZoaZcQ7z7UJlOgj2JdbHE8tiYLS3fzXNCguct77SPGat2raSw==", + "dependencies": { + "@aws-crypto/ie11-detection": "^3.0.0", + "@aws-crypto/supports-web-crypto": "^3.0.0", + "@aws-crypto/util": "^3.0.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@aws-sdk/util-utf8-browser": "^3.0.0", + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/sha1-browser/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/@aws-crypto/sha256-browser": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz", + "integrity": "sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ==", + "dependencies": { + "@aws-crypto/ie11-detection": "^3.0.0", + "@aws-crypto/sha256-js": "^3.0.0", + "@aws-crypto/supports-web-crypto": "^3.0.0", + "@aws-crypto/util": "^3.0.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@aws-sdk/util-utf8-browser": "^3.0.0", + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz", + "integrity": "sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==", + "dependencies": { + "@aws-crypto/util": "^3.0.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/sha256-js/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz", + "integrity": "sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==", + "dependencies": { + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/supports-web-crypto/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/@aws-crypto/util": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-3.0.0.tgz", + "integrity": "sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-utf8-browser": "^3.0.0", + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/util/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/@aws-sdk/client-dynamodb": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-dynamodb/-/client-dynamodb-3.474.0.tgz", + "integrity": "sha512-lEUmxBdJ6f2uwbUDyojvF0aXXBzhLJcM6h6t9zgkyA6+N4CueTrtqpgXignideR5+AEgRAQD3JO2MkYRvQuBYQ==", + "dependencies": { + "@aws-crypto/sha256-browser": "3.0.0", + "@aws-crypto/sha256-js": "3.0.0", + "@aws-sdk/client-sts": "3.474.0", + "@aws-sdk/core": "3.474.0", + "@aws-sdk/credential-provider-node": "3.474.0", + "@aws-sdk/middleware-endpoint-discovery": "3.470.0", + "@aws-sdk/middleware-host-header": "3.468.0", + "@aws-sdk/middleware-logger": "3.468.0", + "@aws-sdk/middleware-recursion-detection": "3.468.0", + "@aws-sdk/middleware-signing": "3.468.0", + "@aws-sdk/middleware-user-agent": "3.470.0", + "@aws-sdk/region-config-resolver": "3.470.0", + "@aws-sdk/types": "3.468.0", + "@aws-sdk/util-endpoints": "3.470.0", + "@aws-sdk/util-user-agent-browser": "3.468.0", + "@aws-sdk/util-user-agent-node": "3.470.0", + "@smithy/config-resolver": "^2.0.21", + "@smithy/fetch-http-handler": "^2.3.1", + "@smithy/hash-node": "^2.0.17", + "@smithy/invalid-dependency": "^2.0.15", + "@smithy/middleware-content-length": "^2.0.17", + "@smithy/middleware-endpoint": "^2.2.3", + "@smithy/middleware-retry": "^2.0.24", + "@smithy/middleware-serde": "^2.0.15", + "@smithy/middleware-stack": "^2.0.9", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/node-http-handler": "^2.2.1", + "@smithy/protocol-http": "^3.0.11", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "@smithy/url-parser": "^2.0.15", + "@smithy/util-base64": "^2.0.1", + "@smithy/util-body-length-browser": "^2.0.1", + "@smithy/util-body-length-node": "^2.1.0", + "@smithy/util-defaults-mode-browser": "^2.0.22", + "@smithy/util-defaults-mode-node": "^2.0.29", + "@smithy/util-endpoints": "^1.0.7", + "@smithy/util-retry": "^2.0.8", + "@smithy/util-utf8": "^2.0.2", + "@smithy/util-waiter": "^2.0.15", + "tslib": "^2.5.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@aws-sdk/client-s3": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.474.0.tgz", + "integrity": "sha512-uqji9u2yIhFMx6E18+iIlKqimZE1SUEewS78iYYzOKRoQQ+XqFnQXtHTvBGfTExEvdwZUXYg8FqSP2UpQiEf/g==", + "dependencies": { + "@aws-crypto/sha1-browser": "3.0.0", + "@aws-crypto/sha256-browser": "3.0.0", + "@aws-crypto/sha256-js": "3.0.0", + "@aws-sdk/client-sts": "3.474.0", + "@aws-sdk/core": "3.474.0", + "@aws-sdk/credential-provider-node": "3.474.0", + "@aws-sdk/middleware-bucket-endpoint": "3.470.0", + "@aws-sdk/middleware-expect-continue": "3.468.0", + "@aws-sdk/middleware-flexible-checksums": "3.468.0", + "@aws-sdk/middleware-host-header": "3.468.0", + "@aws-sdk/middleware-location-constraint": "3.468.0", + "@aws-sdk/middleware-logger": "3.468.0", + "@aws-sdk/middleware-recursion-detection": "3.468.0", + "@aws-sdk/middleware-sdk-s3": "3.474.0", + "@aws-sdk/middleware-signing": "3.468.0", + "@aws-sdk/middleware-ssec": "3.468.0", + "@aws-sdk/middleware-user-agent": "3.470.0", + "@aws-sdk/region-config-resolver": "3.470.0", + "@aws-sdk/signature-v4-multi-region": "3.474.0", + "@aws-sdk/types": "3.468.0", + "@aws-sdk/util-endpoints": "3.470.0", + "@aws-sdk/util-user-agent-browser": "3.468.0", + "@aws-sdk/util-user-agent-node": "3.470.0", + "@aws-sdk/xml-builder": "3.472.0", + "@smithy/config-resolver": "^2.0.21", + "@smithy/eventstream-serde-browser": "^2.0.15", + "@smithy/eventstream-serde-config-resolver": "^2.0.15", + "@smithy/eventstream-serde-node": "^2.0.15", + "@smithy/fetch-http-handler": "^2.3.1", + "@smithy/hash-blob-browser": "^2.0.16", + "@smithy/hash-node": "^2.0.17", + "@smithy/hash-stream-node": "^2.0.17", + "@smithy/invalid-dependency": "^2.0.15", + "@smithy/md5-js": "^2.0.17", + "@smithy/middleware-content-length": "^2.0.17", + "@smithy/middleware-endpoint": "^2.2.3", + "@smithy/middleware-retry": "^2.0.24", + "@smithy/middleware-serde": "^2.0.15", + "@smithy/middleware-stack": "^2.0.9", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/node-http-handler": "^2.2.1", + "@smithy/protocol-http": "^3.0.11", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "@smithy/url-parser": "^2.0.15", + "@smithy/util-base64": "^2.0.1", + "@smithy/util-body-length-browser": "^2.0.1", + "@smithy/util-body-length-node": "^2.1.0", + "@smithy/util-defaults-mode-browser": "^2.0.22", + "@smithy/util-defaults-mode-node": "^2.0.29", + "@smithy/util-endpoints": "^1.0.7", + "@smithy/util-retry": "^2.0.8", + "@smithy/util-stream": "^2.0.23", + "@smithy/util-utf8": "^2.0.2", + "@smithy/util-waiter": "^2.0.15", + "fast-xml-parser": "4.2.5", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-s3/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.474.0.tgz", + "integrity": "sha512-6toUmQUIHkDM/P2/nyLEO/mcWOIPByTlegqX9VCHhYh9Fs5MDT2nit7I6fZzBjZjB5oVTwKjbzgxae9cE3bhqw==", + "dependencies": { + "@aws-crypto/sha256-browser": "3.0.0", + "@aws-crypto/sha256-js": "3.0.0", + "@aws-sdk/core": "3.474.0", + "@aws-sdk/middleware-host-header": "3.468.0", + "@aws-sdk/middleware-logger": "3.468.0", + "@aws-sdk/middleware-recursion-detection": "3.468.0", + "@aws-sdk/middleware-user-agent": "3.470.0", + "@aws-sdk/region-config-resolver": "3.470.0", + "@aws-sdk/types": "3.468.0", + "@aws-sdk/util-endpoints": "3.470.0", + "@aws-sdk/util-user-agent-browser": "3.468.0", + "@aws-sdk/util-user-agent-node": "3.470.0", + "@smithy/config-resolver": "^2.0.21", + "@smithy/fetch-http-handler": "^2.3.1", + "@smithy/hash-node": "^2.0.17", + "@smithy/invalid-dependency": "^2.0.15", + "@smithy/middleware-content-length": "^2.0.17", + "@smithy/middleware-endpoint": "^2.2.3", + "@smithy/middleware-retry": "^2.0.24", + "@smithy/middleware-serde": "^2.0.15", + "@smithy/middleware-stack": "^2.0.9", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/node-http-handler": "^2.2.1", + "@smithy/protocol-http": "^3.0.11", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "@smithy/url-parser": "^2.0.15", + "@smithy/util-base64": "^2.0.1", + "@smithy/util-body-length-browser": "^2.0.1", + "@smithy/util-body-length-node": "^2.1.0", + "@smithy/util-defaults-mode-browser": "^2.0.22", + "@smithy/util-defaults-mode-node": "^2.0.29", + "@smithy/util-endpoints": "^1.0.7", + "@smithy/util-retry": "^2.0.8", + "@smithy/util-utf8": "^2.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-sso/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-sts": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.474.0.tgz", + "integrity": "sha512-qPPMbrDVAUJgYiFWVewFG7dg0VyMfuGNNK4IC1nZr0eXejUTbdm8cio6IZ8OkWtK+A+L+wx1vX5686WYVgQ0dQ==", + "dependencies": { + "@aws-crypto/sha256-browser": "3.0.0", + "@aws-crypto/sha256-js": "3.0.0", + "@aws-sdk/core": "3.474.0", + "@aws-sdk/credential-provider-node": "3.474.0", + "@aws-sdk/middleware-host-header": "3.468.0", + "@aws-sdk/middleware-logger": "3.468.0", + "@aws-sdk/middleware-recursion-detection": "3.468.0", + "@aws-sdk/middleware-user-agent": "3.470.0", + "@aws-sdk/region-config-resolver": "3.470.0", + "@aws-sdk/types": "3.468.0", + "@aws-sdk/util-endpoints": "3.470.0", + "@aws-sdk/util-user-agent-browser": "3.468.0", + "@aws-sdk/util-user-agent-node": "3.470.0", + "@smithy/config-resolver": "^2.0.21", + "@smithy/core": "^1.1.0", + "@smithy/fetch-http-handler": "^2.3.1", + "@smithy/hash-node": "^2.0.17", + "@smithy/invalid-dependency": "^2.0.15", + "@smithy/middleware-content-length": "^2.0.17", + "@smithy/middleware-endpoint": "^2.2.3", + "@smithy/middleware-retry": "^2.0.24", + "@smithy/middleware-serde": "^2.0.15", + "@smithy/middleware-stack": "^2.0.9", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/node-http-handler": "^2.2.1", + "@smithy/protocol-http": "^3.0.11", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "@smithy/url-parser": "^2.0.15", + "@smithy/util-base64": "^2.0.1", + "@smithy/util-body-length-browser": "^2.0.1", + "@smithy/util-body-length-node": "^2.1.0", + "@smithy/util-defaults-mode-browser": "^2.0.22", + "@smithy/util-defaults-mode-node": "^2.0.29", + "@smithy/util-endpoints": "^1.0.7", + "@smithy/util-middleware": "^2.0.8", + "@smithy/util-retry": "^2.0.8", + "@smithy/util-utf8": "^2.0.2", + "fast-xml-parser": "4.2.5", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-sts/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.474.0.tgz", + "integrity": "sha512-eVRdeB+AoTNSzfc4viHfr0jfkHujSlf4ToExJtTuxS1wlgmIyyxRNrVKxbf0K78YK/TXRsRlJPoS5QCD5h1S2w==", + "dependencies": { + "@smithy/core": "^1.1.0", + "@smithy/protocol-http": "^3.0.11", + "@smithy/signature-v4": "^2.0.0", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.468.0.tgz", + "integrity": "sha512-k/1WHd3KZn0EQYjadooj53FC0z24/e4dUZhbSKTULgmxyO62pwh9v3Brvw4WRa/8o2wTffU/jo54tf4vGuP/ZA==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/property-provider": "^2.0.0", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.474.0.tgz", + "integrity": "sha512-3Y2fHI4ZCNjdOO47Vh/xBgLXOrKm3KwBkYkBKKT2g02FUGNT8NLjJg8WBo3D4RQX2h34qx4mtW5nTY6YcGP80Q==", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.468.0", + "@aws-sdk/credential-provider-process": "3.468.0", + "@aws-sdk/credential-provider-sso": "3.474.0", + "@aws-sdk/credential-provider-web-identity": "3.468.0", + "@aws-sdk/types": "3.468.0", + "@smithy/credential-provider-imds": "^2.0.0", + "@smithy/property-provider": "^2.0.0", + "@smithy/shared-ini-file-loader": "^2.0.6", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.474.0.tgz", + "integrity": "sha512-3OVVVGnb8Ru5hWeeHkg76YZT5mrufweIiWr6ge5zn7FYxc7WkyqIJ0XehqUqG5VQfaYhqh7uq/zmk8OE2B04lQ==", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.468.0", + "@aws-sdk/credential-provider-ini": "3.474.0", + "@aws-sdk/credential-provider-process": "3.468.0", + "@aws-sdk/credential-provider-sso": "3.474.0", + "@aws-sdk/credential-provider-web-identity": "3.468.0", + "@aws-sdk/types": "3.468.0", + "@smithy/credential-provider-imds": "^2.0.0", + "@smithy/property-provider": "^2.0.0", + "@smithy/shared-ini-file-loader": "^2.0.6", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.468.0.tgz", + "integrity": "sha512-OYSn1A/UsyPJ7Z8Q2cNhTf55O36shPmSsvOfND04nSfu1nPaR+VUvvsP7v+brhGpwC/GAKTIdGAo4blH31BS6A==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/property-provider": "^2.0.0", + "@smithy/shared-ini-file-loader": "^2.0.6", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.474.0.tgz", + "integrity": "sha512-ik4rzhQtcRLSHB/MLQfi/dSpILxPd3zITb79DIEnqT3gpZRNjoARkZ3Hi68pujkU2530NYf8NcFwLCWoV1hS7Q==", + "dependencies": { + "@aws-sdk/client-sso": "3.474.0", + "@aws-sdk/token-providers": "3.470.0", + "@aws-sdk/types": "3.468.0", + "@smithy/property-provider": "^2.0.0", + "@smithy/shared-ini-file-loader": "^2.0.6", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.468.0.tgz", + "integrity": "sha512-rexymPmXjtkwCPfhnUq3EjO1rSkf39R4Jz9CqiM7OsqK2qlT5Y/V3gnMKn0ZMXsYaQOMfM3cT5xly5R+OKDHlw==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/property-provider": "^2.0.0", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/endpoint-cache": { + "version": "3.465.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/endpoint-cache/-/endpoint-cache-3.465.0.tgz", + "integrity": "sha512-0cuotk23hVSrqxHkJ3TTWC9MVMRgwlUvCatyegJEauJnk8kpLSGXE5KVdExlUBwShGNlj7ac29okZ9m17iTi5Q==", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/lib-dynamodb/-/lib-dynamodb-3.474.0.tgz", + "integrity": "sha512-uZOuqdue5b85NF5XQGR3vRlKBzUMfSab4YCHovo5E06UYwstS5KGDvjV+29uoK43QEcaGtXA9VTWJugIC6cgyA==", + "dependencies": { + "@aws-sdk/util-dynamodb": "3.474.0", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.0.0" + } + }, + "node_modules/@aws-sdk/middleware-bucket-endpoint": { + "version": "3.470.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.470.0.tgz", + "integrity": "sha512-vLXXNWtsRmEIwzJ9HUQfIuTNAsEzvCv0Icsnkvt2BiBZXnmHdp2vIC3e3+kfy1D7dVQloXqMmnfcLu/BUMu2Jw==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@aws-sdk/util-arn-parser": "3.465.0", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/protocol-http": "^3.0.11", + "@smithy/types": "^2.7.0", + "@smithy/util-config-provider": "^2.0.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-bucket-endpoint/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-endpoint-discovery": { + "version": "3.470.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-endpoint-discovery/-/middleware-endpoint-discovery-3.470.0.tgz", + "integrity": "sha512-pN+3Y7W3Xvs6pE2RlkXmO7ugOGLXsGR3zJI/fiGOLoCOGESuM3fq3CXdasOl76wch0L9iB1lPmoHMabkxKugGQ==", + "dependencies": { + "@aws-sdk/endpoint-cache": "3.465.0", + "@aws-sdk/types": "3.468.0", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/protocol-http": "^3.0.11", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-endpoint-discovery/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-expect-continue": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.468.0.tgz", + "integrity": "sha512-/wmLjmfgeulxhhmnxX3X3N933TvGsYckVIFjAtDSpLjqkbwzEcNiLq7AdmNJ4BfxG0MCMgcht561DCCD19x8Bg==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/protocol-http": "^3.0.11", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-expect-continue/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-flexible-checksums": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.468.0.tgz", + "integrity": "sha512-LQwL/N5MCj3Y5keLLewHTqeAXUIMsHFZyxDXRm/uxrOon9ufLKDvGvzAmfwn1/CuSUo66ZfT8VPSA4BsC90RtA==", + "dependencies": { + "@aws-crypto/crc32": "3.0.0", + "@aws-crypto/crc32c": "3.0.0", + "@aws-sdk/types": "3.468.0", + "@smithy/is-array-buffer": "^2.0.0", + "@smithy/protocol-http": "^3.0.11", + "@smithy/types": "^2.7.0", + "@smithy/util-utf8": "^2.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.468.0.tgz", + "integrity": "sha512-gwQ+/QhX+lhof304r6zbZ/V5l5cjhGRxLL3CjH1uJPMcOAbw9wUlMdl+ibr8UwBZ5elfKFGiB1cdW/0uMchw0w==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/protocol-http": "^3.0.11", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-location-constraint": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.468.0.tgz", + "integrity": "sha512-0gBX/lDynQr4YIhM9h1dVnkVWqrg+34iOCVIUq8jHxzUzgZWglGkG9lHGGg0r1xkLTmegeoo1OKH8wrQ6n33Cg==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-location-constraint/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.468.0.tgz", + "integrity": "sha512-X5XHKV7DHRXI3f29SAhJPe/OxWRFgDWDMMCALfzhmJfCi6Jfh0M14cJKoC+nl+dk9lB+36+jKjhjETZaL2bPlA==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.468.0.tgz", + "integrity": "sha512-vch9IQib2Ng9ucSyRW2eKNQXHUPb5jUPCLA5otTW/8nGjcOU37LxQG4WrxO7uaJ9Oe8hjHO+hViE3P0KISUhtA==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/protocol-http": "^3.0.11", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-sdk-s3": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.474.0.tgz", + "integrity": "sha512-62aAo/8u5daIabeJ+gseYeHeShe9eYH6mH+kfWmLsHybXCCv1EaD/ZkdXWNhL0HZ3bUI1z1SF1p8jjTAWALnwA==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@aws-sdk/util-arn-parser": "3.465.0", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/protocol-http": "^3.0.11", + "@smithy/signature-v4": "^2.0.0", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "@smithy/util-config-provider": "^2.0.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-signing": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-signing/-/middleware-signing-3.468.0.tgz", + "integrity": "sha512-s+7fSB1gdnnTj5O0aCCarX3z5Vppop8kazbNSZADdkfHIDWCN80IH4ZNjY3OWqaAz0HmR4LNNrovdR304ojb4Q==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/property-provider": "^2.0.0", + "@smithy/protocol-http": "^3.0.11", + "@smithy/signature-v4": "^2.0.0", + "@smithy/types": "^2.7.0", + "@smithy/util-middleware": "^2.0.8", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-signing/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-ssec": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.468.0.tgz", + "integrity": "sha512-y1qLW24wRkOGBTK5d6eJXf6d8HYo4rzT4a1mNDN1rd18NSffwQ6Yke5qeUiIaxa0y/l+FvvNYErbhYtij2rJoQ==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-ssec/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.470.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.470.0.tgz", + "integrity": "sha512-s0YRGgf4fT5KwwTefpoNUQfB5JghzXyvmPfY1QuFEMeVQNxv0OPuydzo3rY2oXPkZjkulKDtpm5jzIHwut75hA==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@aws-sdk/util-endpoints": "3.470.0", + "@smithy/protocol-http": "^3.0.11", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.470.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.470.0.tgz", + "integrity": "sha512-C1o1J06iIw8cyAAOvHqT4Bbqf+PgQ/RDlSyjt2gFfP2OovDpc2o2S90dE8f8iZdSGpg70N5MikT1DBhW9NbhtQ==", + "dependencies": { + "@smithy/node-config-provider": "^2.1.8", + "@smithy/types": "^2.7.0", + "@smithy/util-config-provider": "^2.0.0", + "@smithy/util-middleware": "^2.0.8", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/signature-v4-multi-region": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.474.0.tgz", + "integrity": "sha512-93OWRQgTJZASXLrlUNX7mmXknNkYxFYldRLARmYQccONmnIqgYQW0lQj8BFwqkHJTzSMik3/UsU0SHKwZ9ynYA==", + "dependencies": { + "@aws-sdk/middleware-sdk-s3": "3.474.0", + "@aws-sdk/types": "3.468.0", + "@smithy/protocol-http": "^3.0.11", + "@smithy/signature-v4": "^2.0.0", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/signature-v4-multi-region/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client": { + "version": "3.374.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/smithy-client/-/smithy-client-3.374.0.tgz", + "integrity": "sha512-YQBdO/Nv5EXBg/qfMF4GgYYLNN3Y/06MyuVBYILC1TKAnMoLy2FV0VOYyediagepAcWPdJqyUq4MCNNBy0CPRg==", + "deprecated": "This package has moved to @smithy/smithy-client", + "dependencies": { + "@smithy/smithy-client": "^1.0.3", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-5imgGUlZL4dW4YWdMYAKLmal9ny/tlenM81QZY7xYyb76z9Z/QOg7oM5Ak9HQl8QfFTlGVWwcMXl+54jroRgEQ==", + "dependencies": { + "@smithy/types": "^1.2.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/fetch-http-handler": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-1.1.0.tgz", + "integrity": "sha512-N22C9R44u5WGlcY+Wuv8EXmCAq62wWwriRAuoczMEwAIjPbvHSthyPSLqI4S7kAST1j6niWg8kwpeJ3ReAv3xg==", + "dependencies": { + "@smithy/protocol-http": "^1.2.0", + "@smithy/querystring-builder": "^1.1.0", + "@smithy/types": "^1.2.0", + "@smithy/util-base64": "^1.1.0", + "tslib": "^2.5.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/is-array-buffer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-1.1.0.tgz", + "integrity": "sha512-twpQ/n+3OWZJ7Z+xu43MJErmhB/WO/mMTnqR6PwWQShvSJ/emx5d1N59LQZk6ZpTAeuRWrc+eHhkzTp9NFjNRQ==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/middleware-stack": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-1.1.0.tgz", + "integrity": "sha512-XynYiIvXNea2BbLcppvpNK0zu8o2woJqgnmxqYTn4FWagH/Hr2QIk8LOsUz7BIJ4tooFhmx8urHKCdlPbbPDCA==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/node-http-handler": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-1.1.0.tgz", + "integrity": "sha512-d3kRriEgaIiGXLziAM8bjnaLn1fthCJeTLZIwEIpzQqe6yPX0a+yQoLCTyjb2fvdLwkMoG4p7THIIB5cj5lkbg==", + "dependencies": { + "@smithy/abort-controller": "^1.1.0", + "@smithy/protocol-http": "^1.2.0", + "@smithy/querystring-builder": "^1.1.0", + "@smithy/types": "^1.2.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/protocol-http": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-1.2.0.tgz", + "integrity": "sha512-GfGfruksi3nXdFok5RhgtOnWe5f6BndzYfmEXISD+5gAGdayFGpjWu5pIqIweTudMtse20bGbc+7MFZXT1Tb8Q==", + "dependencies": { + "@smithy/types": "^1.2.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/querystring-builder": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-1.1.0.tgz", + "integrity": "sha512-gDEi4LxIGLbdfjrjiY45QNbuDmpkwh9DX4xzrR2AzjjXpxwGyfSpbJaYhXARw9p17VH0h9UewnNQXNwaQyYMDA==", + "dependencies": { + "@smithy/types": "^1.2.0", + "@smithy/util-uri-escape": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/smithy-client": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-1.1.0.tgz", + "integrity": "sha512-j32SGgVhv2G9nBTmel9u3OXux8KG20ssxuFakJrEeDug3kqbl1qrGzVLCe+Eib402UDtA0Sp1a4NZ2SEXDBxag==", + "dependencies": { + "@smithy/middleware-stack": "^1.1.0", + "@smithy/types": "^1.2.0", + "@smithy/util-stream": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/types": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-1.2.0.tgz", + "integrity": "sha512-z1r00TvBqF3dh4aHhya7nz1HhvCg4TRmw51fjMrh5do3h+ngSstt/yKlNbHeb9QxJmFbmN8KEVSWgb1bRvfEoA==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/util-base64": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-1.1.0.tgz", + "integrity": "sha512-FpYmDmVbOXAxqvoVCwqehUN0zXS+lN8V7VS9O7I8MKeVHdSTsZzlwiMEvGoyTNOXWn8luF4CTDYgNHnZViR30g==", + "dependencies": { + "@smithy/util-buffer-from": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/util-buffer-from": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-1.1.0.tgz", + "integrity": "sha512-9m6NXE0ww+ra5HKHCHig20T+FAwxBAm7DIdwc/767uGWbRcY720ybgPacQNB96JMOI7xVr/CDa3oMzKmW4a+kw==", + "dependencies": { + "@smithy/is-array-buffer": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/util-hex-encoding": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-1.1.0.tgz", + "integrity": "sha512-7UtIE9eH0u41zpB60Jzr0oNCQ3hMJUabMcKRUVjmyHTXiWDE4vjSqN6qlih7rCNeKGbioS7f/y2Jgym4QZcKFg==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/util-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-1.1.0.tgz", + "integrity": "sha512-w3lsdGsntaLQIrwDWJkIFKrFscgZXwU/oxsse09aSTNv5TckPhDeYea3LhsDrU5MGAG3vprhVZAKr33S45coVA==", + "dependencies": { + "@smithy/fetch-http-handler": "^1.1.0", + "@smithy/node-http-handler": "^1.1.0", + "@smithy/types": "^1.2.0", + "@smithy/util-base64": "^1.1.0", + "@smithy/util-buffer-from": "^1.1.0", + "@smithy/util-hex-encoding": "^1.1.0", + "@smithy/util-utf8": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/util-uri-escape": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-1.1.0.tgz", + "integrity": "sha512-/jL/V1xdVRt5XppwiaEU8Etp5WHZj609n0xMTuehmCqdoOFbId1M+aEeDWZsQ+8JbEB/BJ6ynY2SlYmOaKtt8w==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/smithy-client/node_modules/@smithy/util-utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-1.1.0.tgz", + "integrity": "sha512-p/MYV+JmqmPyjdgyN2UxAeYDj9cBqCjp0C/NsTWnnjoZUVqoeZ6IrW915L9CAKWVECgv9lVQGc4u/yz26/bI1A==", + "dependencies": { + "@smithy/util-buffer-from": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.470.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.470.0.tgz", + "integrity": "sha512-rzxnJxEUJiV69Cxsf0AHXTqJqTACITwcSH/PL4lWP4uvtzdrzSi3KA3u2aWHWpOcdE6+JFvdICscsbBSo3/TOg==", + "dependencies": { + "@aws-crypto/sha256-browser": "3.0.0", + "@aws-crypto/sha256-js": "3.0.0", + "@aws-sdk/middleware-host-header": "3.468.0", + "@aws-sdk/middleware-logger": "3.468.0", + "@aws-sdk/middleware-recursion-detection": "3.468.0", + "@aws-sdk/middleware-user-agent": "3.470.0", + "@aws-sdk/region-config-resolver": "3.470.0", + "@aws-sdk/types": "3.468.0", + "@aws-sdk/util-endpoints": "3.470.0", + "@aws-sdk/util-user-agent-browser": "3.468.0", + "@aws-sdk/util-user-agent-node": "3.470.0", + "@smithy/config-resolver": "^2.0.21", + "@smithy/fetch-http-handler": "^2.3.1", + "@smithy/hash-node": "^2.0.17", + "@smithy/invalid-dependency": "^2.0.15", + "@smithy/middleware-content-length": "^2.0.17", + "@smithy/middleware-endpoint": "^2.2.3", + "@smithy/middleware-retry": "^2.0.24", + "@smithy/middleware-serde": "^2.0.15", + "@smithy/middleware-stack": "^2.0.9", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/node-http-handler": "^2.2.1", + "@smithy/property-provider": "^2.0.0", + "@smithy/protocol-http": "^3.0.11", + "@smithy/shared-ini-file-loader": "^2.0.6", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "@smithy/url-parser": "^2.0.15", + "@smithy/util-base64": "^2.0.1", + "@smithy/util-body-length-browser": "^2.0.1", + "@smithy/util-body-length-node": "^2.1.0", + "@smithy/util-defaults-mode-browser": "^2.0.22", + "@smithy/util-defaults-mode-node": "^2.0.29", + "@smithy/util-endpoints": "^1.0.7", + "@smithy/util-retry": "^2.0.8", + "@smithy/util-utf8": "^2.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/token-providers/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.692.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.692.0.tgz", + "integrity": "sha512-RpNvzD7zMEhiKgmlxGzyXaEcg2khvM7wd5sSHVapOcrde1awQSOMGI4zKBQ+wy5TnDfrm170ROz/ERLYtrjPZA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/types/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/util-arn-parser": { + "version": "3.465.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.465.0.tgz", + "integrity": "sha512-zOJ82vzDJFqBX9yZBlNeHHrul/kpx/DCoxzW5UBbZeb26kfV53QhMSoEmY8/lEbBqlqargJ/sgRC845GFhHNQw==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/util-dynamodb": { + "version": "3.474.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-dynamodb/-/util-dynamodb-3.474.0.tgz", + "integrity": "sha512-I4wZTpmd8UJUV6siJ4pB2dbv/RzlC8bRAqOj0m/w0ZoDGt9UpVWfC7b+s7jaGSsD8I1vuuQ/CLw58RgESX9anQ==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.0.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.470.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.470.0.tgz", + "integrity": "sha512-6N6VvPCmu+89p5Ez/+gLf+X620iQ9JpIs8p8ECZiCodirzFOe8NC1O2S7eov7YiG9IHSuodqn/0qNq+v+oLe0A==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/util-endpoints": "^1.0.7", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/util-endpoints/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.310.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.310.0.tgz", + "integrity": "sha512-qo2t/vBTnoXpjKxlsC2e1gBrRm80M3bId27r0BRB2VniSSe7bL1mmzM+/HFtujm0iAxtPM+aLEflLJlJeDPg0w==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.468.0.tgz", + "integrity": "sha512-OJyhWWsDEizR3L+dCgMXSUmaCywkiZ7HSbnQytbeKGwokIhD69HTiJcibF/sgcM5gk4k3Mq3puUhGnEZ46GIig==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/types": "^2.7.0", + "bowser": "^2.11.0", + "tslib": "^2.5.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.470.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.470.0.tgz", + "integrity": "sha512-QxsZ9iVHcBB/XRdYvwfM5AMvNp58HfqkIrH88mY0cmxuvtlIGDfWjczdDrZMJk9y0vIq+cuoCHsGXHu7PyiEAQ==", + "dependencies": { + "@aws-sdk/types": "3.468.0", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@aws-sdk/util-user-agent-node/node_modules/@aws-sdk/types": { + "version": "3.468.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz", + "integrity": "sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/util-utf8-browser": { + "version": "3.259.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz", + "integrity": "sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw==", + "dependencies": { + "tslib": "^2.3.1" + } + }, + "node_modules/@aws-sdk/xml-builder": { + "version": "3.472.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.472.0.tgz", + "integrity": "sha512-PwjVxz1hr9up8QkddabuScPZ/d5aDHgvHYgK4acHYzltXL4wngfvimi5ZqXTzVWF2QANxHmWnHUr45QJX71oJQ==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@azure/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-auth": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.5.0.tgz", + "integrity": "sha512-udzoBuYG1VBoHVohDTrvKjyzel34zt77Bhp7dQntVGGD0ehVq48owENbBG8fIgkHRNUBQH5k1r0hpoMu5L8+kw==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-util": "^1.1.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@azure/core-client": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/@azure/core-client/-/core-client-1.7.3.tgz", + "integrity": "sha512-kleJ1iUTxcO32Y06dH9Pfi9K4U+Tlb111WXEnbt7R/ne+NLRwppZiTGJuTD5VVoxTMK5NTbEtm5t2vcdNCFe2g==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-rest-pipeline": "^1.9.1", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.0.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@azure/core-http-compat": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/core-http-compat/-/core-http-compat-2.1.2.tgz", + "integrity": "sha512-5MnV1yqzZwgNLLjlizsU3QqOeQChkIXw781Fwh1xdAqJR5AA32IUaq6xv1BICJvfbHoa+JYcaij2HFkhLbNTJQ==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-client": "^1.3.0", + "@azure/core-rest-pipeline": "^1.3.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-http-compat/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-lro": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.2.tgz", + "integrity": "sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.2.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-lro/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-paging": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.2.tgz", + "integrity": "sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-rest-pipeline": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.13.0.tgz", + "integrity": "sha512-a62aP/wppgmnfIkJLfcB4ssPBcH94WzrzPVJ3tlJt050zX4lfmtnvy95D3igDo3f31StO+9BgPrzvkj4aOxnoA==", + "dependencies": { + "@azure/abort-controller": "^1.1.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-tracing": "^1.0.1", + "@azure/core-util": "^1.3.0", + "@azure/logger": "^1.0.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-tracing": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.2.0.tgz", + "integrity": "sha512-UKTiEJPkWcESPYJz3X5uKRYyOcJD+4nYph+KpfdPRnQJVrZfk0KJgdnaAWKfhsBBtAf/D58Az4AvCJEmWgIBAg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-util": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.6.1.tgz", + "integrity": "sha512-h5taHeySlsV9qxuK64KZxy4iln1BtMYlNt5jbuEFN3UFSAd1EwKg/Gjl5a6tZ/W8t6li3xPnutOx7zbDyXnPmQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@azure/core-xml": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/@azure/core-xml/-/core-xml-1.4.4.tgz", + "integrity": "sha512-J4FYAqakGXcbfeZjwjMzjNcpcH4E+JtEBv+xcV1yL0Ydn/6wbQfeFKTCHh9wttAi0lmajHw7yBbHPRG+YHckZQ==", + "license": "MIT", + "dependencies": { + "fast-xml-parser": "^4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-xml/node_modules/fast-xml-parser": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.0.tgz", + "integrity": "sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "license": "MIT", + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/@azure/cosmos": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@azure/cosmos/-/cosmos-4.0.0.tgz", + "integrity": "sha512-/Z27p1+FTkmjmm8jk90zi/HrczPHw2t8WecFnsnTe4xGocWl0Z4clP0YlLUTJPhRLWYa5upwD9rMvKJkS1f1kg==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-rest-pipeline": "^1.2.0", + "@azure/core-tracing": "^1.0.0", + "debug": "^4.1.1", + "fast-json-stable-stringify": "^2.1.0", + "jsbi": "^3.1.3", + "node-abort-controller": "^3.0.0", + "priorityqueuejs": "^1.0.0", + "semaphore": "^1.0.5", + "tslib": "^2.2.0", + "universal-user-agent": "^6.0.0", + "uuid": "^8.3.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@azure/cosmos/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@azure/identity": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@azure/identity/-/identity-4.0.0.tgz", + "integrity": "sha512-gtPYxIL0kI39Dw4t3HvlbfhOdXqKD2MqDgynlklF0j728j51dcKgRo6FLX0QzpBw/1gGfLxjMXqq3nKOSQ2lmA==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.5.0", + "@azure/core-client": "^1.4.0", + "@azure/core-rest-pipeline": "^1.1.0", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.0.0", + "@azure/logger": "^1.0.0", + "@azure/msal-browser": "^3.5.0", + "@azure/msal-node": "^2.5.1", + "events": "^3.0.0", + "jws": "^4.0.0", + "open": "^8.0.0", + "stoppable": "^1.1.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/logger": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.0.4.tgz", + "integrity": "sha512-ustrPY8MryhloQj7OWGe+HrYx+aoiOxzbXTtgblbV3xwCqpzUK36phH3XNHQKj3EPonyFUuDTfR3qFhTEAuZEg==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@azure/ms-rest-js": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz", + "integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==", + "license": "MIT", + "dependencies": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.5.0" + } + }, + "node_modules/@azure/ms-rest-js/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "license": "0BSD" + }, + "node_modules/@azure/ms-rest-js/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@azure/msal-browser": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@azure/msal-browser/-/msal-browser-3.6.0.tgz", + "integrity": "sha512-FrFBJXRJMyWXjAjg4cUNZwEKktzfzD/YD9+S1kj2ors67hKoveam4aL0bZuCZU/jTiHTn0xDQGQh2ksCMXTXtA==", + "dependencies": { + "@azure/msal-common": "14.5.0" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-common": { + "version": "14.5.0", + "resolved": "https://registry.npmjs.org/@azure/msal-common/-/msal-common-14.5.0.tgz", + "integrity": "sha512-Gx5rZbiZV/HiZ2nEKfjfAF/qDdZ4/QWxMvMo2jhIFVz528dVKtaZyFAOtsX2Ak8+TQvRsGCaEfuwJFuXB6tu1A==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-node": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/@azure/msal-node/-/msal-node-2.6.0.tgz", + "integrity": "sha512-RWAWCYYrSldIYC47oWtofIun41e6SB9TBYgGYsezq6ednagwo9ZRFyRsvl1NabmdTkdDDXRAABIdveeN2Gtd8w==", + "dependencies": { + "@azure/msal-common": "14.5.0", + "jsonwebtoken": "^9.0.0", + "uuid": "^8.3.0" + }, + "engines": { + "node": "16|| 18 || 20" + } + }, + "node_modules/@azure/msal-node/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@azure/storage-blob": { + "version": "12.25.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.25.0.tgz", + "integrity": "sha512-oodouhA3nCCIh843tMMbxty3WqfNT+Vgzj3Xo5jqR9UPnzq3d7mzLjlHAYz7lW+b4km3SIgz+NAgztvhm7Z6kQ==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.4.0", + "@azure/core-client": "^1.6.2", + "@azure/core-http-compat": "^2.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-rest-pipeline": "^1.10.1", + "@azure/core-tracing": "^1.1.2", + "@azure/core-util": "^1.6.1", + "@azure/core-xml": "^1.4.3", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/storage-blob/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.2.tgz", + "integrity": "sha512-Z0WgzSEa+aUcdiJuCIqgujCshpMWgUpgOxXotrYPSA53hA3qopNaqcJpyr0hVb1FeWdnqFA35/fUtXgBK8srQg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz", + "integrity": "sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.26.0", + "@babel/generator": "^7.26.0", + "@babel/helper-compilation-targets": "^7.25.9", + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helpers": "^7.26.0", + "@babel/parser": "^7.26.0", + "@babel/template": "^7.25.9", + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.26.0", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/generator": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.2.tgz", + "integrity": "sha512-zevQbhbau95nkoxSq3f/DC/SC+EEOUZd3DYqfSkMhY2/wfSeaHV1Ew4vk8e+x8lja31IbyuUa2uQ3JONqKbysw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.26.2", + "@babel/types": "^7.26.0", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.9.tgz", + "integrity": "sha512-j9Db8Suy6yV/VHa4qzrj9yZfZxhLWQdVnRlXxmKLYlhWUVB1sB2G5sxuWYXk/whHD9iW76PmNzxZ4UCnTQTVEQ==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.25.9", + "@babel/helper-validator-option": "^7.25.9", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", + "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", + "dev": true, + "dependencies": { + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", + "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", + "dev": true, + "dependencies": { + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.25.9.tgz", + "integrity": "sha512-kSMlyUVdWe25rEsRGviIgOWnoT/nfABVWlqt9N19/dIPWViAOW2s9wznP5tURbs/IDuNk4gPy3YdYRgH3uxhBw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", + "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.0.tgz", + "integrity": "sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw==", + "dev": true, + "dependencies": { + "@babel/template": "^7.25.9", + "@babel/types": "^7.26.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.2.tgz", + "integrity": "sha512-DWMCZH9WA4Maitz2q21SRKHo9QXZxkDsbNZoVD62gusNtNBBqDg9i7uOhASfTfIGNzW+O+r7+jAlM8dwphcJKQ==", + "dev": true, + "dependencies": { + "@babel/types": "^7.26.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.26.0.tgz", + "integrity": "sha512-e2dttdsJ1ZTpi3B9UYGLw41hifAubg19AtCu/2I/F1QNVclOBr1dYpTdmdyZ84Xiz43BS/tCUkMAZNLv12Pi+A==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz", + "integrity": "sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz", + "integrity": "sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz", + "integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.25.9", + "@babel/parser": "^7.25.9", + "@babel/types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.9.tgz", + "integrity": "sha512-ZCuvfwOwlz/bawvAuvcj8rrithP2/N55Tzz342AkTvq4qaWbGfmCk/tKhNaV2cthijKrPAA8SRJV5WWe7IBMJw==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.25.9", + "@babel/generator": "^7.25.9", + "@babel/parser": "^7.25.9", + "@babel/template": "^7.25.9", + "@babel/types": "^7.25.9", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/types": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.0.tgz", + "integrity": "sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "node_modules/@bundled-es-modules/cookie": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@bundled-es-modules/cookie/-/cookie-2.0.0.tgz", + "integrity": "sha512-Or6YHg/kamKHpxULAdSqhGqnWFneIXu1NKvvfBBzKGwpVsYuFIQ5aBPHDnnoR3ghW1nvSkALd+EF9iMtY7Vjxw==", + "dev": true, + "dependencies": { + "cookie": "^0.5.0" + } + }, + "node_modules/@bundled-es-modules/js-levenshtein": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@bundled-es-modules/js-levenshtein/-/js-levenshtein-2.0.1.tgz", + "integrity": "sha512-DERMS3yfbAljKsQc0U2wcqGKUWpdFjwqWuoMugEJlqBnKO180/n+4SR/J8MRDt1AN48X1ovgoD9KrdVXcaa3Rg==", + "dev": true, + "dependencies": { + "js-levenshtein": "^1.1.6" + } + }, + "node_modules/@bundled-es-modules/statuses": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@bundled-es-modules/statuses/-/statuses-1.0.1.tgz", + "integrity": "sha512-yn7BklA5acgcBr+7w064fGV+SGIFySjCKpqjcWgBAIfrAkY+4GQTJJHQMeT3V/sgz23VTEVV8TtOmkvJAhFVfg==", + "dev": true, + "dependencies": { + "statuses": "^2.0.1" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz", + "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/js": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz", + "integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/firestore": { + "version": "7.10.0", + "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.10.0.tgz", + "integrity": "sha512-VFNhdHvfnmqcHHs6YhmSNHHxQqaaD64GwiL0c+e1qz85S8SWZPC2XFRf8p9yHRTF40Kow424s1KBU9f0fdQa+Q==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api": "^1.3.0", + "fast-deep-equal": "^3.1.1", + "functional-red-black-tree": "^1.0.1", + "google-gax": "^4.3.3", + "protobufjs": "^7.2.6" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/paginator": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz", + "integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==", + "dev": true, + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/projectify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz", + "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/promisify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz", + "integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/storage": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.14.0.tgz", + "integrity": "sha512-H41bPL2cMfSi4EEnFzKvg7XSb7T67ocSXrmF7MPjfgFB0L6CKGzfIYJheAZi1iqXjz6XaCT1OBf6HCG5vDBTOQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@google-cloud/paginator": "^5.0.0", + "@google-cloud/projectify": "^4.0.0", + "@google-cloud/promisify": "^4.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "duplexify": "^4.1.3", + "fast-xml-parser": "^4.4.1", + "gaxios": "^6.0.2", + "google-auth-library": "^9.6.3", + "html-entities": "^2.5.2", + "mime": "^3.0.0", + "p-limit": "^3.0.1", + "retry-request": "^7.0.0", + "teeny-request": "^9.0.0", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/storage/node_modules/fast-xml-parser": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.0.tgz", + "integrity": "sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/@google-cloud/storage/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@grpc/grpc-js": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.2.tgz", + "integrity": "sha512-bgxdZmgTrJZX50OjyVwz3+mNEnCTNkh3cIqGPWVNeW9jX6bn1ZkU80uPd+67/ZpIJIjRQ9qaHCjhavyoWYxumg==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/@grpc/proto-loader": { + "version": "0.7.13", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz", + "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==", + "license": "Apache-2.0", + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.2.5", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.13", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.13.tgz", + "integrity": "sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", + "dev": true + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", + "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/core": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", + "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "dev": true, + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/reporters": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.7.0", + "jest-config": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-resolve-dependencies": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "jest-watcher": "^29.7.0", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/environment": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", + "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "dev": true, + "dependencies": { + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "dev": true, + "dependencies": { + "expect": "^29.7.0", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", + "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "dev": true, + "dependencies": { + "jest-get-type": "^29.6.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", + "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", + "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/types": "^29.6.3", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", + "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "dev": true, + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", + "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", + "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "dev": true, + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", + "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "dev": true, + "dependencies": { + "@jest/test-result": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", + "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, + "node_modules/@mswjs/cookies": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@mswjs/cookies/-/cookies-1.1.0.tgz", + "integrity": "sha512-0ZcCVQxifZmhwNBoQIrystCb+2sWBY2Zw8lpfJBPCHGCA/HWqehITeCRVIv4VMy8MPlaHo2w2pTHFV2pFfqKPw==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/@mswjs/interceptors": { + "version": "0.25.13", + "resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.25.13.tgz", + "integrity": "sha512-xfjR81WwXPHwhDbqJRHlxYmboJuiSaIKpP4I5TJVFl/EmByOU13jOBT9hmEnxcjR3jvFYoqoNKt7MM9uqerj9A==", + "dev": true, + "dependencies": { + "@open-draft/deferred-promise": "^2.2.0", + "@open-draft/logger": "^0.3.0", + "@open-draft/until": "^2.0.0", + "is-node-process": "^1.2.0", + "outvariant": "^1.2.1", + "strict-event-emitter": "^0.5.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@octokit/app": { + "version": "14.0.2", + "resolved": "https://registry.npmjs.org/@octokit/app/-/app-14.0.2.tgz", + "integrity": "sha512-NCSCktSx+XmjuSUVn2dLfqQ9WIYePGP95SDJs4I9cn/0ZkeXcPkaoCLl64Us3dRKL2ozC7hArwze5Eu+/qt1tg==", + "dependencies": { + "@octokit/auth-app": "^6.0.0", + "@octokit/auth-unauthenticated": "^5.0.0", + "@octokit/core": "^5.0.0", + "@octokit/oauth-app": "^6.0.0", + "@octokit/plugin-paginate-rest": "^9.0.0", + "@octokit/types": "^12.0.0", + "@octokit/webhooks": "^12.0.4" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-app": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/@octokit/auth-app/-/auth-app-6.1.3.tgz", + "integrity": "sha512-dcaiteA6Y/beAlDLZOPNReN3FGHu+pARD6OHfh3T9f3EO09++ec+5wt3KtGGSSs2Mp5tI8fQwdMOEnrzBLfgUA==", + "dependencies": { + "@octokit/auth-oauth-app": "^7.1.0", + "@octokit/auth-oauth-user": "^4.1.0", + "@octokit/request": "^8.3.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", + "deprecation": "^2.3.1", + "lru-cache": "npm:@wolfy1339/lru-cache@^11.0.2-patch.1", + "universal-github-app-jwt": "^1.1.2", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-app/node_modules/@octokit/request": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz", + "integrity": "sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==", + "dependencies": { + "@octokit/endpoint": "^9.0.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-app/node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-app/node_modules/@octokit/types": { + "version": "13.6.1", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz", + "integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@octokit/auth-app/node_modules/lru-cache": { + "name": "@wolfy1339/lru-cache", + "version": "11.0.2-patch.1", + "resolved": "https://registry.npmjs.org/@wolfy1339/lru-cache/-/lru-cache-11.0.2-patch.1.tgz", + "integrity": "sha512-BgYZfL2ADCXKOw2wJtkM3slhHotawWkgIRRxq4wEybnZQPjvAp71SPX35xepMykTw8gXlzWcWPTY31hlbnRsDA==" + }, + "node_modules/@octokit/auth-oauth-app": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-7.1.0.tgz", + "integrity": "sha512-w+SyJN/b0l/HEb4EOPRudo7uUOSW51jcK1jwLa+4r7PA8FPFpoxEnHBHMITqCsc/3Vo2qqFjgQfz/xUUvsSQnA==", + "dependencies": { + "@octokit/auth-oauth-device": "^6.1.0", + "@octokit/auth-oauth-user": "^4.1.0", + "@octokit/request": "^8.3.1", + "@octokit/types": "^13.0.0", + "@types/btoa-lite": "^1.0.0", + "btoa-lite": "^1.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-app/node_modules/@octokit/request": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz", + "integrity": "sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==", + "dependencies": { + "@octokit/endpoint": "^9.0.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-app/node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-app/node_modules/@octokit/types": { + "version": "13.6.1", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz", + "integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@octokit/auth-oauth-device": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-6.1.0.tgz", + "integrity": "sha512-FNQ7cb8kASufd6Ej4gnJ3f1QB5vJitkoV1O0/g6e6lUsQ7+VsSNRHRmFScN2tV4IgKA12frrr/cegUs0t+0/Lw==", + "dependencies": { + "@octokit/oauth-methods": "^4.1.0", + "@octokit/request": "^8.3.1", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-device/node_modules/@octokit/request": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz", + "integrity": "sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==", + "dependencies": { + "@octokit/endpoint": "^9.0.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-device/node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-device/node_modules/@octokit/types": { + "version": "13.6.1", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz", + "integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@octokit/auth-oauth-user": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-4.1.0.tgz", + "integrity": "sha512-FrEp8mtFuS/BrJyjpur+4GARteUCrPeR/tZJzD8YourzoVhRics7u7we/aDcKv+yywRNwNi/P4fRi631rG/OyQ==", + "dependencies": { + "@octokit/auth-oauth-device": "^6.1.0", + "@octokit/oauth-methods": "^4.1.0", + "@octokit/request": "^8.3.1", + "@octokit/types": "^13.0.0", + "btoa-lite": "^1.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-user/node_modules/@octokit/request": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz", + "integrity": "sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==", + "dependencies": { + "@octokit/endpoint": "^9.0.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-user/node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-user/node_modules/@octokit/types": { + "version": "13.6.1", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz", + "integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@octokit/auth-token": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", + "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-unauthenticated": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@octokit/auth-unauthenticated/-/auth-unauthenticated-5.0.1.tgz", + "integrity": "sha512-oxeWzmBFxWd+XolxKTc4zr+h3mt+yofn4r7OfoIkR/Cj/o70eEGmPsFbueyJE2iBAGpjgTnEOKM3pnuEGVmiqg==", + "dependencies": { + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^12.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/core": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.0.2.tgz", + "integrity": "sha512-cZUy1gUvd4vttMic7C0lwPed8IYXWYp8kHIMatyhY8t8n3Cpw2ILczkV5pGMPqef7v0bLo0pOHrEHarsau2Ydg==", + "dependencies": { + "@octokit/auth-token": "^4.0.0", + "@octokit/graphql": "^7.0.0", + "@octokit/request": "^8.0.2", + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^12.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/endpoint": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.5.tgz", + "integrity": "sha512-ekqR4/+PCLkEBF6qgj8WqJfvDq65RH85OAgrtnVp1mSxaXF03u2xW/hUdweGS5654IlC0wkNYC18Z50tSYTAFw==", + "dependencies": { + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/endpoint/node_modules/@octokit/types": { + "version": "13.6.1", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz", + "integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@octokit/graphql": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.0.tgz", + "integrity": "sha512-r+oZUH7aMFui1ypZnAvZmn0KSqAUgE1/tUXIWaqUCa1758ts/Jio84GZuzsvUkme98kv0WFY8//n0J1Z+vsIsQ==", + "dependencies": { + "@octokit/request": "^8.3.0", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/graphql/node_modules/@octokit/request": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz", + "integrity": "sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==", + "dependencies": { + "@octokit/endpoint": "^9.0.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/graphql/node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/graphql/node_modules/@octokit/types": { + "version": "13.6.1", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz", + "integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@octokit/oauth-app": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@octokit/oauth-app/-/oauth-app-6.1.0.tgz", + "integrity": "sha512-nIn/8eUJ/BKUVzxUXd5vpzl1rwaVxMyYbQkNZjHrF7Vk/yu98/YDF/N2KeWO7uZ0g3b5EyiFXFkZI8rJ+DH1/g==", + "dependencies": { + "@octokit/auth-oauth-app": "^7.0.0", + "@octokit/auth-oauth-user": "^4.0.0", + "@octokit/auth-unauthenticated": "^5.0.0", + "@octokit/core": "^5.0.0", + "@octokit/oauth-authorization-url": "^6.0.2", + "@octokit/oauth-methods": "^4.0.0", + "@types/aws-lambda": "^8.10.83", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/oauth-authorization-url": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-6.0.2.tgz", + "integrity": "sha512-CdoJukjXXxqLNK4y/VOiVzQVjibqoj/xHgInekviUJV73y/BSIcwvJ/4aNHPBPKcPWFnd4/lO9uqRV65jXhcLA==", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/oauth-methods": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@octokit/oauth-methods/-/oauth-methods-4.1.0.tgz", + "integrity": "sha512-4tuKnCRecJ6CG6gr0XcEXdZtkTDbfbnD5oaHBmLERTjTMZNi2CbfEHZxPU41xXLDG4DfKf+sonu00zvKI9NSbw==", + "dependencies": { + "@octokit/oauth-authorization-url": "^6.0.2", + "@octokit/request": "^8.3.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.0.0", + "btoa-lite": "^1.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/oauth-methods/node_modules/@octokit/request": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz", + "integrity": "sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==", + "dependencies": { + "@octokit/endpoint": "^9.0.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/oauth-methods/node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/oauth-methods/node_modules/@octokit/types": { + "version": "13.6.1", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz", + "integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "22.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", + "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" + }, + "node_modules/@octokit/plugin-paginate-graphql": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-4.0.0.tgz", + "integrity": "sha512-7HcYW5tP7/Z6AETAPU14gp5H5KmCPT3hmJrS/5tO7HIgbwenYmgw4OY9Ma54FDySuxMwD+wsJlxtuGWwuZuItA==", + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=5" + } + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "9.1.5", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.1.5.tgz", + "integrity": "sha512-WKTQXxK+bu49qzwv4qKbMMRXej1DU2gq017euWyKVudA6MldaSSQuxtz+vGbhxV4CjxpUxjZu6rM2wfc1FiWVg==", + "dependencies": { + "@octokit/types": "^12.4.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=5" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.2.0.tgz", + "integrity": "sha512-ePbgBMYtGoRNXDyKGvr9cyHjQ163PbwD0y1MkDJCpkO2YH4OeXX40c4wYHKikHGZcpGPbcRLuy0unPUuafco8Q==", + "dependencies": { + "@octokit/types": "^12.3.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=5" + } + }, + "node_modules/@octokit/plugin-retry": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz", + "integrity": "sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog==", + "dependencies": { + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^12.0.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=5" + } + }, + "node_modules/@octokit/plugin-throttling": { + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/@octokit/plugin-throttling/-/plugin-throttling-8.1.3.tgz", + "integrity": "sha512-pfyqaqpc0EXh5Cn4HX9lWYsZ4gGbjnSmUILeu4u2gnuM50K/wIk9s1Pxt3lVeVwekmITgN/nJdoh43Ka+vye8A==", + "dependencies": { + "@octokit/types": "^12.2.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "^5.0.0" + } + }, + "node_modules/@octokit/request": { + "version": "8.1.6", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.1.6.tgz", + "integrity": "sha512-YhPaGml3ncZC1NfXpP3WZ7iliL1ap6tLkAp6MvbK2fTTPytzVUyUesBBogcdMm86uRYO5rHaM1xIWxigWZ17MQ==", + "dependencies": { + "@octokit/endpoint": "^9.0.0", + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^12.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/request-error": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.0.1.tgz", + "integrity": "sha512-X7pnyTMV7MgtGmiXBwmO6M5kIPrntOXdyKZLigNfQWSEQzVxR4a4vo49vJjTWX70mPndj8KhfT4Dx+2Ng3vnBQ==", + "dependencies": { + "@octokit/types": "^12.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/types": { + "version": "12.4.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.4.0.tgz", + "integrity": "sha512-FLWs/AvZllw/AGVs+nJ+ELCDZZJk+kY0zMen118xhL2zD0s1etIUHm1odgjP7epxYU1ln7SZxEUWYop5bhsdgQ==", + "dependencies": { + "@octokit/openapi-types": "^19.1.0" + } + }, + "node_modules/@octokit/types/node_modules/@octokit/openapi-types": { + "version": "19.1.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-19.1.0.tgz", + "integrity": "sha512-6G+ywGClliGQwRsjvqVYpklIfa7oRPA0vyhPQG/1Feh+B+wU0vGH1JiJ5T25d3g1JZYBHzR2qefLi9x8Gt+cpw==" + }, + "node_modules/@octokit/webhooks": { + "version": "12.3.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-12.3.1.tgz", + "integrity": "sha512-BVwtWE3rRXB9IugmQTfKspqjNa8q+ab73ddkV9k1Zok3XbuOxJUi4lTYk5zBZDhfWb/Y2H+RO9Iggm25gsqeow==", + "dependencies": { + "@octokit/request-error": "^5.0.0", + "@octokit/webhooks-methods": "^4.1.0", + "@octokit/webhooks-types": "7.6.1", + "aggregate-error": "^3.1.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/webhooks-methods": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-4.1.0.tgz", + "integrity": "sha512-zoQyKw8h9STNPqtm28UGOYFE7O6D4Il8VJwhAtMHFt2C4L0VQT1qGKLeefUOqHNs1mNRYSadVv7x0z8U2yyeWQ==", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/webhooks-types": { + "version": "7.6.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.6.1.tgz", + "integrity": "sha512-S8u2cJzklBC0FgTwWVLaM8tMrDuDMVE4xiTK4EYXM9GntyvrdbSoxqDQa+Fh57CCNApyIpyeqPhhFEmHPfrXgw==" + }, + "node_modules/@open-draft/deferred-promise": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz", + "integrity": "sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==", + "dev": true + }, + "node_modules/@open-draft/logger": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@open-draft/logger/-/logger-0.3.0.tgz", + "integrity": "sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==", + "dev": true, + "dependencies": { + "is-node-process": "^1.2.0", + "outvariant": "^1.4.0" + } + }, + "node_modules/@open-draft/until": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@open-draft/until/-/until-2.1.0.tgz", + "integrity": "sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==", + "dev": true + }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "license": "Apache-2.0", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", + "license": "BSD-3-Clause" + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", + "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, + "node_modules/@smithy/abort-controller": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-2.2.0.tgz", + "integrity": "sha512-wRlta7GuLWpTqtFfGo+nZyOO1vEvewdNR1R4rTxpC8XU6vG/NDyrFBhwLZsqg1NUoR1noVaXJPC/7ZK47QCySw==", + "dependencies": { + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/abort-controller/node_modules/@smithy/types": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz", + "integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/chunked-blob-reader": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/chunked-blob-reader/-/chunked-blob-reader-2.2.0.tgz", + "integrity": "sha512-3GJNvRwXBGdkDZZOGiziVYzDpn4j6zfyULHMDKAGIUo72yHALpE9CbhfQp/XcLNVoc1byfMpn6uW5H2BqPjgaQ==", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/chunked-blob-reader-native": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-2.2.0.tgz", + "integrity": "sha512-VNB5+1oCgX3Fzs072yuRsUoC2N4Zg/LJ11DTxX3+Qu+Paa6AmbIF0E9sc2wthz9Psrk/zcOlTCyuposlIhPjZQ==", + "dependencies": { + "@smithy/util-base64": "^2.3.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/chunked-blob-reader-native/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/chunked-blob-reader-native/node_modules/@smithy/util-base64": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-2.3.0.tgz", + "integrity": "sha512-s3+eVwNeJuXUwuMbusncZNViuhv2LjVJ1nMwTqSA0XAC7gjKhqqxRdJPhR8+YrkoZ9IiIbFk/yK6ACe/xlF+hw==", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "@smithy/util-utf8": "^2.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/chunked-blob-reader-native/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/chunked-blob-reader-native/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "2.0.21", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-2.0.21.tgz", + "integrity": "sha512-rlLIGT+BeqjnA6C2FWumPRJS1UW07iU5ZxDHtFuyam4W65gIaOFMjkB90ofKCIh+0mLVQrQFrl/VLtQT/6FWTA==", + "dependencies": { + "@smithy/node-config-provider": "^2.1.8", + "@smithy/types": "^2.7.0", + "@smithy/util-config-provider": "^2.0.0", + "@smithy/util-middleware": "^2.0.8", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-1.4.2.tgz", + "integrity": "sha512-2fek3I0KZHWJlRLvRTqxTEri+qV0GRHrJIoLFuBMZB4EMg4WgeBGfF0X6abnrNYpq55KJ6R4D6x4f0vLnhzinA==", + "dependencies": { + "@smithy/middleware-endpoint": "^2.5.1", + "@smithy/middleware-retry": "^2.3.1", + "@smithy/middleware-serde": "^2.3.0", + "@smithy/protocol-http": "^3.3.0", + "@smithy/smithy-client": "^2.5.1", + "@smithy/types": "^2.12.0", + "@smithy/util-middleware": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/fetch-http-handler": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-2.5.0.tgz", + "integrity": "sha512-BOWEBeppWhLn/no/JxUL/ghTfANTjT7kg3Ww2rPqTUY9R4yHPXxJ9JhMe3Z03LN3aPwiwlpDIUcVw1xDyHqEhw==", + "dependencies": { + "@smithy/protocol-http": "^3.3.0", + "@smithy/querystring-builder": "^2.2.0", + "@smithy/types": "^2.12.0", + "@smithy/util-base64": "^2.3.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/middleware-endpoint": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-2.5.1.tgz", + "integrity": "sha512-1/8kFp6Fl4OsSIVTWHnNjLnTL8IqpIb/D3sTSczrKFnrE9VMNWxnrRKNvpUHOJ6zpGD5f62TPm7+17ilTJpiCQ==", + "dependencies": { + "@smithy/middleware-serde": "^2.3.0", + "@smithy/node-config-provider": "^2.3.0", + "@smithy/shared-ini-file-loader": "^2.4.0", + "@smithy/types": "^2.12.0", + "@smithy/url-parser": "^2.2.0", + "@smithy/util-middleware": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/middleware-retry": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-2.3.1.tgz", + "integrity": "sha512-P2bGufFpFdYcWvqpyqqmalRtwFUNUA8vHjJR5iGqbfR6mp65qKOLcUd6lTr4S9Gn/enynSrSf3p3FVgVAf6bXA==", + "dependencies": { + "@smithy/node-config-provider": "^2.3.0", + "@smithy/protocol-http": "^3.3.0", + "@smithy/service-error-classification": "^2.1.5", + "@smithy/smithy-client": "^2.5.1", + "@smithy/types": "^2.12.0", + "@smithy/util-middleware": "^2.2.0", + "@smithy/util-retry": "^2.2.0", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/middleware-serde": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-2.3.0.tgz", + "integrity": "sha512-sIADe7ojwqTyvEQBe1nc/GXB9wdHhi9UwyX0lTyttmUWDJLP655ZYE1WngnNyXREme8I27KCaUhyhZWRXL0q7Q==", + "dependencies": { + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/middleware-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-2.2.0.tgz", + "integrity": "sha512-Qntc3jrtwwrsAC+X8wms8zhrTr0sFXnyEGhZd9sLtsJ/6gGQKFzNB+wWbOcpJd7BR8ThNCoKt76BuQahfMvpeA==", + "dependencies": { + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/node-config-provider": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-2.3.0.tgz", + "integrity": "sha512-0elK5/03a1JPWMDPaS726Iw6LpQg80gFut1tNpPfxFuChEEklo2yL823V94SpTZTxmKlXFtFgsP55uh3dErnIg==", + "dependencies": { + "@smithy/property-provider": "^2.2.0", + "@smithy/shared-ini-file-loader": "^2.4.0", + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/node-http-handler": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-2.5.0.tgz", + "integrity": "sha512-mVGyPBzkkGQsPoxQUbxlEfRjrj6FPyA3u3u2VXGr9hT8wilsoQdZdvKpMBFMB8Crfhv5dNkKHIW0Yyuc7eABqA==", + "dependencies": { + "@smithy/abort-controller": "^2.2.0", + "@smithy/protocol-http": "^3.3.0", + "@smithy/querystring-builder": "^2.2.0", + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/protocol-http": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-3.3.0.tgz", + "integrity": "sha512-Xy5XK1AFWW2nlY/biWZXu6/krgbaf2dg0q492D8M5qthsnU2H+UgFeZLbM76FnH7s6RO/xhQRkj+T6KBO3JzgQ==", + "dependencies": { + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/smithy-client": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-2.5.1.tgz", + "integrity": "sha512-jrbSQrYCho0yDaaf92qWgd+7nAeap5LtHTI51KXqmpIFCceKU3K9+vIVTUH72bOJngBMqa4kyu1VJhRcSrk/CQ==", + "dependencies": { + "@smithy/middleware-endpoint": "^2.5.1", + "@smithy/middleware-stack": "^2.2.0", + "@smithy/protocol-http": "^3.3.0", + "@smithy/types": "^2.12.0", + "@smithy/util-stream": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/types": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz", + "integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/url-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-2.2.0.tgz", + "integrity": "sha512-hoA4zm61q1mNTpksiSWp2nEl1dt3j726HdRhiNgVJQMj7mLp7dprtF57mOB6JvEk/x9d2bsuL5hlqZbBuHQylQ==", + "dependencies": { + "@smithy/querystring-parser": "^2.2.0", + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/util-base64": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-2.3.0.tgz", + "integrity": "sha512-s3+eVwNeJuXUwuMbusncZNViuhv2LjVJ1nMwTqSA0XAC7gjKhqqxRdJPhR8+YrkoZ9IiIbFk/yK6ACe/xlF+hw==", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "@smithy/util-utf8": "^2.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/util-hex-encoding": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-2.2.0.tgz", + "integrity": "sha512-7iKXR+/4TpLK194pVjKiasIyqMtTYJsgKgM242Y9uzt5dhHnUDvMNb+3xIhRJ9QhvqGii/5cRUt4fJn3dtXNHQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/util-middleware": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-2.2.0.tgz", + "integrity": "sha512-L1qpleXf9QD6LwLCJ5jddGkgWyuSvWBkJwWAZ6kFkdifdso+sk3L3O1HdmPvCdnCK3IS4qWyPxev01QMnfHSBw==", + "dependencies": { + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/util-retry": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-2.2.0.tgz", + "integrity": "sha512-q9+pAFPTfftHXRytmZ7GzLFFrEGavqapFc06XxzZFcSIGERXMerXxCitjOG1prVDR9QdjqotF40SWvbqcCpf8g==", + "dependencies": { + "@smithy/service-error-classification": "^2.1.5", + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/util-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-2.2.0.tgz", + "integrity": "sha512-17faEXbYWIRst1aU9SvPZyMdWmqIrduZjVOqCPMIsWFNxs5yQQgFrJL6b2SdiCzyW9mJoDjFtgi53xx7EH+BXA==", + "dependencies": { + "@smithy/fetch-http-handler": "^2.5.0", + "@smithy/node-http-handler": "^2.5.0", + "@smithy/types": "^2.12.0", + "@smithy/util-base64": "^2.3.0", + "@smithy/util-buffer-from": "^2.2.0", + "@smithy/util-hex-encoding": "^2.2.0", + "@smithy/util-utf8": "^2.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-2.3.0.tgz", + "integrity": "sha512-BWB9mIukO1wjEOo1Ojgl6LrG4avcaC7T/ZP6ptmAaW4xluhSIPZhY+/PI5YKzlk+jsm+4sQZB45Bt1OfMeQa3w==", + "dependencies": { + "@smithy/node-config-provider": "^2.3.0", + "@smithy/property-provider": "^2.2.0", + "@smithy/types": "^2.12.0", + "@smithy/url-parser": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds/node_modules/@smithy/node-config-provider": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-2.3.0.tgz", + "integrity": "sha512-0elK5/03a1JPWMDPaS726Iw6LpQg80gFut1tNpPfxFuChEEklo2yL823V94SpTZTxmKlXFtFgsP55uh3dErnIg==", + "dependencies": { + "@smithy/property-provider": "^2.2.0", + "@smithy/shared-ini-file-loader": "^2.4.0", + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds/node_modules/@smithy/types": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz", + "integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds/node_modules/@smithy/url-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-2.2.0.tgz", + "integrity": "sha512-hoA4zm61q1mNTpksiSWp2nEl1dt3j726HdRhiNgVJQMj7mLp7dprtF57mOB6JvEk/x9d2bsuL5hlqZbBuHQylQ==", + "dependencies": { + "@smithy/querystring-parser": "^2.2.0", + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/eventstream-codec": { + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-2.0.15.tgz", + "integrity": "sha512-crjvz3j1gGPwA0us6cwS7+5gAn35CTmqu/oIxVbYJo2Qm/sGAye6zGJnMDk3BKhWZw5kcU1G4MxciTkuBpOZPg==", + "dependencies": { + "@aws-crypto/crc32": "3.0.0", + "@smithy/types": "^2.7.0", + "@smithy/util-hex-encoding": "^2.0.0", + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/eventstream-serde-browser": { + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-2.0.15.tgz", + "integrity": "sha512-WiFG5N9j3jmS5P0z5Xev6dO0c3lf7EJYC2Ncb0xDnWFvShwXNn741AF71ABr5EcZw8F4rQma0362MMjAwJeZog==", + "dependencies": { + "@smithy/eventstream-serde-universal": "^2.0.15", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-config-resolver": { + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-2.0.15.tgz", + "integrity": "sha512-o65d2LRjgCbWYH+VVNlWXtmsI231SO99ZTOL4UuIPa6WTjbSHWtlXvUcJG9libhEKWmEV9DIUiH2IqyPWi7ubA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-node": { + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-2.0.15.tgz", + "integrity": "sha512-9OOXiIhHq1VeOG6xdHkn2ZayfMYM3vzdUTV3zhcCnt+tMqA3BJK3XXTJFRR2BV28rtRM778DzqbBTf+hqwQPTg==", + "dependencies": { + "@smithy/eventstream-serde-universal": "^2.0.15", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-universal": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-2.2.0.tgz", + "integrity": "sha512-pvoe/vvJY0mOpuF84BEtyZoYfbehiFj8KKWk1ds2AT0mTLYFVs+7sBJZmioOFdBXKd48lfrx1vumdPdmGlCLxA==", + "dependencies": { + "@smithy/eventstream-codec": "^2.2.0", + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-universal/node_modules/@smithy/eventstream-codec": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-2.2.0.tgz", + "integrity": "sha512-8janZoJw85nJmQZc4L8TuePp2pk1nxLgkxIR0TUjKJ5Dkj5oelB9WtiSSGXCQvNsJl0VSTvK/2ueMXxvpa9GVw==", + "dependencies": { + "@aws-crypto/crc32": "3.0.0", + "@smithy/types": "^2.12.0", + "@smithy/util-hex-encoding": "^2.2.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/eventstream-serde-universal/node_modules/@smithy/types": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz", + "integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-universal/node_modules/@smithy/util-hex-encoding": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-2.2.0.tgz", + "integrity": "sha512-7iKXR+/4TpLK194pVjKiasIyqMtTYJsgKgM242Y9uzt5dhHnUDvMNb+3xIhRJ9QhvqGii/5cRUt4fJn3dtXNHQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-2.3.1.tgz", + "integrity": "sha512-6MNk16fqb8EwcYY8O8WxB3ArFkLZ2XppsSNo1h7SQcFdDDwIumiJeO6wRzm7iB68xvsOQzsdQKbdtTieS3hfSQ==", + "dependencies": { + "@smithy/protocol-http": "^3.0.11", + "@smithy/querystring-builder": "^2.0.15", + "@smithy/types": "^2.7.0", + "@smithy/util-base64": "^2.0.1", + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/hash-blob-browser": { + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-2.0.16.tgz", + "integrity": "sha512-cSYRi05LA7DZDwjB1HL0BP8B56eUNNeLglVH147QTXFyuXJq/7erAIiLRfsyXB8+GfFHkSS5BHbc76a7k/AYPA==", + "dependencies": { + "@smithy/chunked-blob-reader": "^2.0.0", + "@smithy/chunked-blob-reader-native": "^2.0.1", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "2.0.17", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-2.0.17.tgz", + "integrity": "sha512-Il6WuBcI1nD+e2DM7tTADMf01wEPGK8PAhz4D+YmDUVaoBqlA+CaH2uDJhiySifmuKBZj748IfygXty81znKhw==", + "dependencies": { + "@smithy/types": "^2.7.0", + "@smithy/util-buffer-from": "^2.0.0", + "@smithy/util-utf8": "^2.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/hash-stream-node": { + "version": "2.0.17", + "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-2.0.17.tgz", + "integrity": "sha512-ey8DtnATzp1mOXgS7rqMwSmAki6iJA+jgNucKcxRkhMB1rrICfHg+rhmIF50iLPDHUhTcS5pBMOrLzzpZftvNQ==", + "dependencies": { + "@smithy/types": "^2.7.0", + "@smithy/util-utf8": "^2.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-2.0.15.tgz", + "integrity": "sha512-dlEKBFFwVfzA5QroHlBS94NpgYjXhwN/bFfun+7w3rgxNvVy79SK0w05iGc7UAeC5t+D7gBxrzdnD6hreZnDVQ==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.0.0.tgz", + "integrity": "sha512-z3PjFjMyZNI98JFRJi/U0nGoLWMSJlDjAW4QUX2WNZLas5C0CmVV6LJ01JI0k90l7FvpmixjWxPFmENSClQ7ug==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/md5-js": { + "version": "2.0.17", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-2.0.17.tgz", + "integrity": "sha512-jmISTCnEkOnm2oCNx/rMkvBT/eQh3aA6nktevkzbmn/VYqYEuc5Z2n5sTTqsciMSO01Lvf56wG1A4twDqovYeQ==", + "dependencies": { + "@smithy/types": "^2.7.0", + "@smithy/util-utf8": "^2.0.2", + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "2.0.17", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-2.0.17.tgz", + "integrity": "sha512-OyadvMcKC7lFXTNBa8/foEv7jOaqshQZkjWS9coEXPRZnNnihU/Ls+8ZuJwGNCOrN2WxXZFmDWhegbnM4vak8w==", + "dependencies": { + "@smithy/protocol-http": "^3.0.11", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-2.2.3.tgz", + "integrity": "sha512-nYfxuq0S/xoAjdLbyn1ixeVB6cyH9wYCMtbbOCpcCRYR5u2mMtqUtVjjPAZ/DIdlK3qe0tpB0Q76szFGNuz+kQ==", + "dependencies": { + "@smithy/middleware-serde": "^2.0.15", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/shared-ini-file-loader": "^2.2.7", + "@smithy/types": "^2.7.0", + "@smithy/url-parser": "^2.0.15", + "@smithy/util-middleware": "^2.0.8", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "2.0.24", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-2.0.24.tgz", + "integrity": "sha512-q2SvHTYu96N7lYrn3VSuX3vRpxXHR/Cig6MJpGWxd0BWodUQUWlKvXpWQZA+lTaFJU7tUvpKhRd4p4MU3PbeJg==", + "dependencies": { + "@smithy/node-config-provider": "^2.1.8", + "@smithy/protocol-http": "^3.0.11", + "@smithy/service-error-classification": "^2.0.8", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "@smithy/util-middleware": "^2.0.8", + "@smithy/util-retry": "^2.0.8", + "tslib": "^2.5.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/middleware-retry/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-2.0.15.tgz", + "integrity": "sha512-FOZRFk/zN4AT4wzGuBY+39XWe+ZnCFd0gZtyw3f9Okn2CJPixl9GyWe98TIaljeZdqWkgrzGyPre20AcW2UMHQ==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "2.0.9", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-2.0.9.tgz", + "integrity": "sha512-bCB5dUtGQ5wh7QNL2ELxmDc6g7ih7jWU3Kx6MYH1h4mZbv9xL3WyhKHojRltThCB1arLPyTUFDi+x6fB/oabtA==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-2.1.8.tgz", + "integrity": "sha512-+w26OKakaBUGp+UG+dxYZtFb5fs3tgHg3/QrRrmUZj+rl3cIuw840vFUXX35cVPTUCQIiTqmz7CpVF7+hdINdQ==", + "dependencies": { + "@smithy/property-provider": "^2.0.16", + "@smithy/shared-ini-file-loader": "^2.2.7", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-2.2.1.tgz", + "integrity": "sha512-8iAKQrC8+VFHPAT8pg4/j6hlsTQh+NKOWlctJBrYtQa4ExcxX7aSg3vdQ2XLoYwJotFUurg/NLqFCmZaPRrogw==", + "dependencies": { + "@smithy/abort-controller": "^2.0.15", + "@smithy/protocol-http": "^3.0.11", + "@smithy/querystring-builder": "^2.0.15", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-2.2.0.tgz", + "integrity": "sha512-+xiil2lFhtTRzXkx8F053AV46QnIw6e7MV8od5Mi68E1ICOjCeCHw2XfLnDEUHnT9WGUIkwcqavXjfwuJbGlpg==", + "dependencies": { + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/property-provider/node_modules/@smithy/types": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz", + "integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "3.0.11", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-3.0.11.tgz", + "integrity": "sha512-3ziB8fHuXIRamV/akp/sqiWmNPR6X+9SB8Xxnozzj+Nq7hSpyKdFHd1FLpBkgfGFUTzzcBJQlDZPSyxzmdcx5A==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-2.2.0.tgz", + "integrity": "sha512-L1kSeviUWL+emq3CUVSgdogoM/D9QMFaqxL/dd0X7PCNWmPXqt+ExtrBjqT0V7HLN03Vs9SuiLrG3zy3JGnE5A==", + "dependencies": { + "@smithy/types": "^2.12.0", + "@smithy/util-uri-escape": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/querystring-builder/node_modules/@smithy/types": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz", + "integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-2.2.0.tgz", + "integrity": "sha512-BvHCDrKfbG5Yhbpj4vsbuPV2GgcpHiAkLeIlcA1LtfpMz3jrqizP1+OguSNSj1MwBHEiN+jwNisXLGdajGDQJA==", + "dependencies": { + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/querystring-parser/node_modules/@smithy/types": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz", + "integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-2.1.5.tgz", + "integrity": "sha512-uBDTIBBEdAQryvHdc5W8sS5YX7RQzF683XrHePVdFmAgKiMofU15FLSM0/HU03hKTnazdNRFa0YHS7+ArwoUSQ==", + "dependencies": { + "@smithy/types": "^2.12.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/service-error-classification/node_modules/@smithy/types": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz", + "integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-2.4.0.tgz", + "integrity": "sha512-WyujUJL8e1B6Z4PBfAqC/aGY1+C7T0w20Gih3yrvJSk97gpiVfB+y7c46T4Nunk+ZngLq0rOIdeVeIklk0R3OA==", + "dependencies": { + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader/node_modules/@smithy/types": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz", + "integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-2.0.18.tgz", + "integrity": "sha512-SJRAj9jT/l9ocm8D0GojMbnA1sp7I4JeStOQ4lEXI8A5eHE73vbjlzlqIFB7cLvIgau0oUl4cGVpF9IGCrvjlw==", + "dependencies": { + "@smithy/eventstream-codec": "^2.0.15", + "@smithy/is-array-buffer": "^2.0.0", + "@smithy/types": "^2.7.0", + "@smithy/util-hex-encoding": "^2.0.0", + "@smithy/util-middleware": "^2.0.8", + "@smithy/util-uri-escape": "^2.0.0", + "@smithy/util-utf8": "^2.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "2.1.18", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-2.1.18.tgz", + "integrity": "sha512-7FqdbaJiVaHJDD9IfDhmzhSDbpjyx+ZsfdYuOpDJF09rl8qlIAIlZNoSaflKrQ3cEXZN2YxGPaNWGhbYimyIRQ==", + "dependencies": { + "@smithy/middleware-stack": "^2.0.9", + "@smithy/types": "^2.7.0", + "@smithy/util-stream": "^2.0.23", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.7.0.tgz", + "integrity": "sha512-1OIFyhK+vOkMbu4aN2HZz/MomREkrAC/HqY5mlJMUJfGrPRwijJDTeiN8Rnj9zUaB8ogXAfIOtZrrgqZ4w7Wnw==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-2.0.15.tgz", + "integrity": "sha512-sADUncUj9rNbOTrdDGm4EXlUs0eQ9dyEo+V74PJoULY4jSQxS+9gwEgsPYyiu8PUOv16JC/MpHonOgqP/IEDZA==", + "dependencies": { + "@smithy/querystring-parser": "^2.0.15", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-2.0.1.tgz", + "integrity": "sha512-DlI6XFYDMsIVN+GH9JtcRp3j02JEVuWIn/QOZisVzpIAprdsxGveFed0bjbMRCqmIFe8uetn5rxzNrBtIGrPIQ==", + "dependencies": { + "@smithy/util-buffer-from": "^2.0.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-2.0.1.tgz", + "integrity": "sha512-NXYp3ttgUlwkaug4bjBzJ5+yIbUbUx8VsSLuHZROQpoik+gRkIBeEG9MPVYfvPNpuXb/puqodeeUXcKFe7BLOQ==", + "dependencies": { + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-2.1.0.tgz", + "integrity": "sha512-/li0/kj/y3fQ3vyzn36NTLGmUwAICb7Jbe/CsWCktW363gh1MOcpEcSO3mJ344Gv2dqz8YJCLQpb6hju/0qOWw==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.0.0.tgz", + "integrity": "sha512-/YNnLoHsR+4W4Vf2wL5lGv0ksg8Bmk3GEGxn2vEQt52AQaPSCuaO5PM5VM7lP1K9qHRKHwrPGktqVoAHKWHxzw==", + "dependencies": { + "@smithy/is-array-buffer": "^2.0.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-2.0.0.tgz", + "integrity": "sha512-xCQ6UapcIWKxXHEU4Mcs2s7LcFQRiU3XEluM2WcCjjBtQkUN71Tb+ydGmJFPxMUrW/GWMgQEEGipLym4XG0jZg==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "2.0.22", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-2.0.22.tgz", + "integrity": "sha512-qcF20IHHH96FlktvBRICDXDhLPtpVmtksHmqNGtotb9B0DYWXsC6jWXrkhrrwF7tH26nj+npVTqh9isiFV1gdA==", + "dependencies": { + "@smithy/property-provider": "^2.0.16", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "bowser": "^2.11.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "2.0.29", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-2.0.29.tgz", + "integrity": "sha512-+uG/15VoUh6JV2fdY9CM++vnSuMQ1VKZ6BdnkUM7R++C/vLjnlg+ToiSR1FqKZbMmKBXmsr8c/TsDWMAYvxbxQ==", + "dependencies": { + "@smithy/config-resolver": "^2.0.21", + "@smithy/credential-provider-imds": "^2.1.4", + "@smithy/node-config-provider": "^2.1.8", + "@smithy/property-provider": "^2.0.16", + "@smithy/smithy-client": "^2.1.18", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-1.0.7.tgz", + "integrity": "sha512-Q2gEind3jxoLk6hdKWyESMU7LnXz8aamVwM+VeVjOYzYT1PalGlY/ETa48hv2YpV4+YV604y93YngyzzzQ4IIA==", + "dependencies": { + "@smithy/node-config-provider": "^2.1.8", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-2.0.0.tgz", + "integrity": "sha512-c5xY+NUnFqG6d7HFh1IFfrm3mGl29lC+vF+geHv4ToiuJCBmIfzx6IeHLg+OgRdPFKDXIw6pvi+p3CsscaMcMA==", + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-2.0.8.tgz", + "integrity": "sha512-qkvqQjM8fRGGA8P2ydWylMhenCDP8VlkPn8kiNuFEaFz9xnUKC2irfqsBSJrfrOB9Qt6pQsI58r3zvvumhFMkw==", + "dependencies": { + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-2.0.8.tgz", + "integrity": "sha512-cQTPnVaVFMjjS6cb44WV2yXtHVyXDC5icKyIbejMarJEApYeJWpBU3LINTxHqp/tyLI+MZOUdosr2mZ3sdziNg==", + "dependencies": { + "@smithy/service-error-classification": "^2.0.8", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "2.0.23", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-2.0.23.tgz", + "integrity": "sha512-OJMWq99LAZJUzUwTk+00plyxX3ESktBaGPhqNIEVab+53gLULiWN9B/8bRABLg0K6R6Xg4t80uRdhk3B/LZqMQ==", + "dependencies": { + "@smithy/fetch-http-handler": "^2.3.1", + "@smithy/node-http-handler": "^2.2.1", + "@smithy/types": "^2.7.0", + "@smithy/util-base64": "^2.0.1", + "@smithy/util-buffer-from": "^2.0.0", + "@smithy/util-hex-encoding": "^2.0.0", + "@smithy/util-utf8": "^2.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-2.2.0.tgz", + "integrity": "sha512-jtmJMyt1xMD/d8OtbVJ2gFZOSKc+ueYJZPW20ULW1GOp/q/YIM0wNh+u8ZFao9UaIGz4WoPW8hC64qlWLIfoDA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.0.2.tgz", + "integrity": "sha512-qOiVORSPm6Ce4/Yu6hbSgNHABLP2VMv8QOC3tTDNHHlWY19pPyc++fBTbZPtx6egPXi4HQxKDnMxVxpbtX2GoA==", + "dependencies": { + "@smithy/util-buffer-from": "^2.0.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-waiter": { + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-2.0.15.tgz", + "integrity": "sha512-9Y+btzzB7MhLADW7xgD6SjvmoYaRkrb/9SCbNGmNdfO47v38rxb90IGXyDtAK0Shl9bMthTmLgjlfYc+vtz2Qw==", + "dependencies": { + "@smithy/abort-controller": "^2.0.15", + "@smithy/types": "^2.7.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", + "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", + "dev": true + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true + }, + "node_modules/@types/aws-lambda": { + "version": "8.10.145", + "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.145.tgz", + "integrity": "sha512-dtByW6WiFk5W5Jfgz1VM+YPA21xMXTuSFoLYIDY0L44jDLLflVPtZkYuu3/YxpGcvjzKFBZLU+GyKjR0HOYtyw==" + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.6.8", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.8.tgz", + "integrity": "sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.6", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", + "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/btoa-lite": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/btoa-lite/-/btoa-lite-1.0.2.tgz", + "integrity": "sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg==" + }, + "node_modules/@types/caseless": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", + "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==" + }, + "node_modules/@types/cookie": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.4.1.tgz", + "integrity": "sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q==", + "dev": true + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", + "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "dev": true, + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/js-levenshtein": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/js-levenshtein/-/js-levenshtein-1.1.1.tgz", + "integrity": "sha512-qC4bCqYGy1y/NP7dDVr7KJarn+PbX1nSpwA7JXdu0HxT3QYjO8MJ+cntENtHFVy2dRAyBV23OZ6MxsW1AM1L8g==", + "dev": true + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", + "dev": true + }, + "node_modules/@types/jsonwebtoken": { + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.7.tgz", + "integrity": "sha512-ugo316mmTYBl2g81zDFnZ7cfxlut3o+/EQdaP7J8QN2kY6lJ22hmQYCK5EHcJHbrW+dkCGSCPgbG8JtYj6qSrg==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/long": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", + "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "18.19.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.3.tgz", + "integrity": "sha512-k5fggr14DwAytoA/t8rPrIz++lXK7/DqckthCmoZOKNsEbJkId4Z//BqgApXBUGrGddrigYa1oqheo/7YmW4rg==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/request": { + "version": "2.48.12", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.12.tgz", + "integrity": "sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw==", + "dependencies": { + "@types/caseless": "*", + "@types/node": "*", + "@types/tough-cookie": "*", + "form-data": "^2.5.0" + } + }, + "node_modules/@types/semver": { + "version": "7.5.6", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz", + "integrity": "sha512-dn1l8LaMea/IjDoHNd9J52uBbInB796CDffS6VdIxvqYCPSG0V0DzHp76GpaWnlhg88uYyPbXCDIowa86ybd5A==", + "dev": true + }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true + }, + "node_modules/@types/statuses": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/statuses/-/statuses-2.0.4.tgz", + "integrity": "sha512-eqNDvZsCNY49OAXB0Firg/Sc2BgoWsntsLUdybGFOhAfCD6QJ2n9HXUIHGqt5qjrxmMv4wS8WLAw43ZkKcJ8Pw==", + "dev": true + }, + "node_modules/@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==" + }, + "node_modules/@types/uuid": { + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.7.tgz", + "integrity": "sha512-WUtIVRUZ9i5dYXefDEAI7sh9/O7jGvHg7Df/5O/gtH3Yabe5odI3UWopVR1qbPXQtvOxWu3mM4XxlYeZtMWF4g==", + "dev": true + }, + "node_modules/@types/yargs": { + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.14.0.tgz", + "integrity": "sha512-1ZJBykBCXaSHG94vMMKmiHoL0MhNHKSVlcHVYZNw+BKxufhqQVTOawNpwwI1P5nIFZ/4jLVop0mcY6mJJDFNaw==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.5.1", + "@typescript-eslint/scope-manager": "6.14.0", + "@typescript-eslint/type-utils": "6.14.0", + "@typescript-eslint/utils": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0", + "debug": "^4.3.4", + "graphemer": "^1.4.0", + "ignore": "^5.2.4", + "natural-compare": "^1.4.0", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^6.0.0 || ^6.0.0-alpha", + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@typescript-eslint/parser": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.14.0.tgz", + "integrity": "sha512-QjToC14CKacd4Pa7JK4GeB/vHmWFJckec49FR4hmIRf97+KXole0T97xxu9IFiPxVQ1DBWrQ5wreLwAGwWAVQA==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "6.14.0", + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/typescript-estree": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.14.0.tgz", + "integrity": "sha512-VT7CFWHbZipPncAZtuALr9y3EuzY1b1t1AEkIq2bTXUPKw+pHoXflGNG5L+Gv6nKul1cz1VH8fz16IThIU0tdg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.14.0.tgz", + "integrity": "sha512-x6OC9Q7HfYKqjnuNu5a7kffIYs3No30isapRBJl1iCHLitD8O0lFbRcVGiOcuyN837fqXzPZ1NS10maQzZMKqw==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "6.14.0", + "@typescript-eslint/utils": "6.14.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.14.0.tgz", + "integrity": "sha512-uty9H2K4Xs8E47z3SnXEPRNDfsis8JO27amp2GNCnzGETEW3yTqEIVg5+AI7U276oGF/tw6ZA+UesxeQ104ceA==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.14.0.tgz", + "integrity": "sha512-yPkaLwK0yH2mZKFE/bXkPAkkFgOv15GJAUzgUVonAbv0Hr4PK/N2yaA/4XQbTZQdygiDkpt5DkxPELqHguNvyw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.14.0.tgz", + "integrity": "sha512-XwRTnbvRr7Ey9a1NT6jqdKX8y/atWG+8fAIu3z73HSP8h06i3r/ClMhmaF/RGWGW1tHJEwij1uEg2GbEmPYvYg==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@types/json-schema": "^7.0.12", + "@types/semver": "^7.5.0", + "@typescript-eslint/scope-manager": "6.14.0", + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/typescript-estree": "6.14.0", + "semver": "^7.5.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.14.0.tgz", + "integrity": "sha512-fB5cw6GRhJUz03MrROVuj5Zm/Q+XWlVdIsFj+Zb1Hvqouc8t+XP2H5y53QYU/MGtd2dPg6/vJJlhoX3xc2ehfw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.14.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, + "node_modules/@vercel/ncc": { + "version": "0.38.1", + "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.1.tgz", + "integrity": "sha512-IBBb+iI2NLu4VQn3Vwldyi2QwaXt5+hTyh58ggAMoCGE6DJmPvwL3KPBWcJl1m9LYPChBLE980Jw+CS4Wokqxw==", + "dev": true, + "bin": { + "ncc": "dist/ncc/cli.js" + } + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/acorn": { + "version": "8.14.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", + "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.1.tgz", + "integrity": "sha512-TgUZgYvqZprrl7YldZNoa9OciCAyZR+Ejm9eXzKCmjsF5IKp/wgQ7Z/ZpjpGTIUPwrHQIcYeI8qDh4PsEwxMbw==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-includes": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.7.tgz", + "integrity": "sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "get-intrinsic": "^1.2.1", + "is-string": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/array.prototype.findlastindex": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz", + "integrity": "sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0", + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flat": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz", + "integrity": "sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz", + "integrity": "sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", + "is-shared-array-buffer": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dev": true, + "dependencies": { + "retry": "0.13.1" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/axios": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz", + "integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==", + "dependencies": { + "follow-redirects": "^1.15.0", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/axios/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/babel-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", + "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "dev": true, + "dependencies": { + "@jest/transform": "^29.7.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.6.3", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", + "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "dev": true, + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", + "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==", + "dev": true, + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", + "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "dev": true, + "dependencies": { + "babel-plugin-jest-hoist": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" + }, + "node_modules/bignumber.js": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz", + "integrity": "sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==", + "engines": { + "node": "*" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/bottleneck": { + "version": "2.19.5", + "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", + "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" + }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.24.2", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.2.tgz", + "integrity": "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001669", + "electron-to-chromium": "^1.5.41", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.1" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/btoa-lite": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/btoa-lite/-/btoa-lite-1.0.0.tgz", + "integrity": "sha512-gvW7InbIyF8AicrqWoptdW08pUxuhq8BEgowNajy9RhiE86fmGAGl+bLKo6oB8QP0CkqHLowfN0oJdKC/J6LbA==" + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true + }, + "node_modules/call-bind": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001680", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001680.tgz", + "integrity": "sha512-rPQy70G6AGUMnbwS1z6Xg+RkHYPAi18ihs47GH0jcxIG7wArmPgY3XbS2sRdBbxJljp3thdT8BIqv9ccCypiPA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.1.tgz", + "integrity": "sha512-cuSVIHi9/9E/+821Qjdvngor+xpnlwnuwIyZOaLmHBVdXL+gP+I6QQB9VkO7RI77YIcTV+S1W9AreJ5eN63JBA==", + "dev": true + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-spinners": { + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", + "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-width": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/clone": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", + "integrity": "sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==", + "dev": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", + "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", + "dev": true + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/colors": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.2.3.tgz", + "integrity": "sha512-qTfM2pNFeMZcLvf/RbrVAzDEVttZjFhaApfx9dplNjvHSX88Ui66zBRb/4YGob/xUWxDceirgoC1lT676asfCQ==", + "dev": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/cookie": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", + "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/create-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", + "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "prompts": "^2.0.1" + }, + "bin": { + "create-jest": "bin/create-jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/data-view-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/dateformat": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz", + "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dedent": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.5.3.tgz", + "integrity": "sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==", + "dev": true, + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/defaults": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz", + "integrity": "sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==", + "dev": true, + "dependencies": { + "clone": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "engines": { + "node": ">=8" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dotenv": { + "version": "16.3.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz", + "integrity": "sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/motdotla/dotenv?sponsor=1" + } + }, + "node_modules/duplexify": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", + "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.2" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.63", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.63.tgz", + "integrity": "sha512-ddeXKuY9BHo/mw145axlyWjlJ1UBt4WK3AlvkT7W2AbqfRQoacVoRUCF6wL3uIx/8wT9oLKXzI+rFqHHscByaA==", + "dev": true + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-abstract": { + "version": "1.23.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.5.tgz", + "integrity": "sha512-vlmniQ0WNPwXqA0BnmwV3Ng7HxiGlh6r5U6JcTMNx8OilcAGqVJBHJcPjqOMaczU9fRuRK5Px2BdVyPRnKMMVQ==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", + "es-to-primitive": "^1.2.1", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", + "globalthis": "^1.0.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", + "has-symbols": "^1.0.3", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.3", + "is-string": "^1.0.7", + "is-typed-array": "^1.1.13", + "is-weakref": "^1.0.2", + "object-inspect": "^1.13.3", + "object-keys": "^1.1.1", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.3", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", + "unbox-primitive": "^1.0.2", + "which-typed-array": "^1.1.15" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-abstract/node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag/node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", + "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + } + }, + "node_modules/es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz", + "integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.56.0", + "@humanwhocodes/config-array": "^0.11.13", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-prettier": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", + "dev": true, + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-import-resolver-node": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", + "dev": true, + "dependencies": { + "debug": "^3.2.7", + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" + } + }, + "node_modules/eslint-import-resolver-node/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-module-utils": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", + "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==", + "dev": true, + "dependencies": { + "debug": "^3.2.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import": { + "version": "2.29.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", + "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", + "dev": true, + "dependencies": { + "array-includes": "^3.1.7", + "array.prototype.findlastindex": "^1.2.3", + "array.prototype.flat": "^1.3.2", + "array.prototype.flatmap": "^1.3.2", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.8.0", + "hasown": "^2.0.0", + "is-core-module": "^2.13.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.7", + "object.groupby": "^1.0.1", + "object.values": "^1.1.7", + "semver": "^6.3.1", + "tsconfig-paths": "^3.15.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" + } + }, + "node_modules/eslint-plugin-import/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-import/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/eslint-plugin-jest": { + "version": "27.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-27.6.0.tgz", + "integrity": "sha512-MTlusnnDMChbElsszJvrwD1dN3x6nZl//s4JD23BxB6MgR66TZlL064su24xEIS3VACfAoHV1vgyMgPw8nkdng==", + "dev": true, + "dependencies": { + "@typescript-eslint/utils": "^5.10.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@typescript-eslint/eslint-plugin": "^5.0.0 || ^6.0.0", + "eslint": "^7.0.0 || ^8.0.0", + "jest": "*" + }, + "peerDependenciesMeta": { + "@typescript-eslint/eslint-plugin": { + "optional": true + }, + "jest": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-jest/node_modules/@typescript-eslint/scope-manager": { + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.11.tgz", + "integrity": "sha512-dHFOsxoLFtrIcSj5h0QoBT/89hxQONwmn3FOQ0GOQcLOOXm+MIrS8zEAhs4tWl5MraxCY3ZJpaXQQdFMc2Tu+Q==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/visitor-keys": "5.59.11" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/eslint-plugin-jest/node_modules/@typescript-eslint/types": { + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.11.tgz", + "integrity": "sha512-epoN6R6tkvBYSc+cllrz+c2sOFWkbisJZWkOE+y3xHtvYaOE6Wk6B8e114McRJwFRjGvYdJwLXQH5c9osME/AA==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/eslint-plugin-jest/node_modules/@typescript-eslint/typescript-estree": { + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.11.tgz", + "integrity": "sha512-YupOpot5hJO0maupJXixi6l5ETdrITxeo5eBOeuV7RSKgYdU3G5cxO49/9WRnJq9EMrB7AuTSLH/bqOsXi7wPA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/visitor-keys": "5.59.11", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-jest/node_modules/@typescript-eslint/typescript-estree/node_modules/tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "dev": true, + "dependencies": { + "tslib": "^1.8.1" + }, + "engines": { + "node": ">= 6" + }, + "peerDependencies": { + "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + } + }, + "node_modules/eslint-plugin-jest/node_modules/@typescript-eslint/utils": { + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.11.tgz", + "integrity": "sha512-didu2rHSOMUdJThLk4aZ1Or8IcO3HzCw/ZvEjTTIfjIrcdd5cvSIwwDy2AOlE7htSNp7QIZ10fLMyRCveesMLg==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@types/json-schema": "^7.0.9", + "@types/semver": "^7.3.12", + "@typescript-eslint/scope-manager": "5.59.11", + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/typescript-estree": "5.59.11", + "eslint-scope": "^5.1.1", + "semver": "^7.3.7" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/eslint-plugin-jest/node_modules/@typescript-eslint/visitor-keys": { + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.11.tgz", + "integrity": "sha512-KGYniTGG3AMTuKF9QBD7EIrvufkB6O6uX3knP73xbKLMpH+QRPcgnCxjWXSHjMRuOxFLovljqQgQpR0c7GvjoA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.59.11", + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/eslint-plugin-jest/node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/eslint-plugin-jest/node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/eslint-plugin-jest/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-plugin-jest/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/eslint-plugin-sort-destructure-keys": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-sort-destructure-keys/-/eslint-plugin-sort-destructure-keys-1.5.0.tgz", + "integrity": "sha512-xGLyqHtbFXZNXQSvAiQ4ISBYokrbUywEhmaA50fKtSKgceCv5y3zjoNuZwcnajdM6q29Nxj+oXC9KcqfMsAPrg==", + "dev": true, + "dependencies": { + "natural-compare-lite": "^1.4.0" + }, + "engines": { + "node": ">=6.0.0" + }, + "peerDependencies": { + "eslint": "3 - 8" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esquery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/eventsource": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", + "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/eventsourcemock": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eventsourcemock/-/eventsourcemock-2.0.0.tgz", + "integrity": "sha512-tSmJnuE+h6A8/hLRg0usf1yL+Q8w01RQtmg0Uzgoxk/HIPZrIUeAr/A4es/8h1wNsoG8RdiESNQLTKiNwbSC3Q==", + "dev": true + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", + "dev": true, + "dependencies": { + "@jest/expect-utils": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "node_modules/external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "dependencies": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fast-xml-parser": { + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz", + "integrity": "sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g==", + "funding": [ + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + }, + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/figures/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz", + "integrity": "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==", + "dev": true + }, + "node_modules/follow-redirects": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", + "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dev": true, + "dependencies": { + "is-callable": "^1.1.3" + } + }, + "node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function.prototype.name": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", + "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", + "license": "MIT" + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gaxios": { + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", + "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gaxios/node_modules/agent-base": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/gaxios/node_modules/https-proxy-agent": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", + "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/gcp-metadata": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", + "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-symbol-description": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globalthis": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "dev": true, + "dependencies": { + "define-properties": "^1.2.1", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/google-auth-library": { + "version": "9.15.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", + "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.1.1", + "gcp-metadata": "^6.1.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.4.1.tgz", + "integrity": "sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/grpc-js": "^1.10.9", + "@grpc/proto-loader": "^0.7.13", + "@types/long": "^4.0.0", + "abort-controller": "^3.0.0", + "duplexify": "^4.0.0", + "google-auth-library": "^9.3.0", + "node-fetch": "^2.7.0", + "object-hash": "^3.0.0", + "proto3-json-serializer": "^2.0.2", + "protobufjs": "^7.3.2", + "retry-request": "^7.0.0", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, + "node_modules/graphql": { + "version": "16.8.1", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz", + "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" + } + }, + "node_modules/gtoken": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", + "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", + "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/headers-polyfill": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/headers-polyfill/-/headers-polyfill-4.0.2.tgz", + "integrity": "sha512-EWGTfnTqAO2L/j5HZgoM/3z82L7necsJ0pO9Tp0X1wil3PDLrkypTBRgVO2ExehEEvUycejZD3FuRaXpZZc3kw==", + "dev": true + }, + "node_modules/html-entities": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.5.2.tgz", + "integrity": "sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/mdevils" + }, + { + "type": "patreon", + "url": "https://patreon.com/mdevils" + } + ] + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, + "node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/ignore": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.0.tgz", + "integrity": "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", + "dev": true, + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/inquirer": { + "version": "8.2.5", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.5.tgz", + "integrity": "sha512-QAgPDQMEgrDssk1XiwwHoOGYF9BAbUcc1+j+FhEvaOt8/cKRqyLn0U5qA6F74fGhTMGxf92pOvPBeh29jQJDTQ==", + "dev": true, + "dependencies": { + "ansi-escapes": "^4.2.1", + "chalk": "^4.1.1", + "cli-cursor": "^3.1.0", + "cli-width": "^3.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.21", + "mute-stream": "0.0.8", + "ora": "^5.4.1", + "run-async": "^2.4.0", + "rxjs": "^7.5.5", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", + "through": "^2.3.6", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/internal-slot": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.0", + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "dev": true, + "dependencies": { + "has-bigints": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "dev": true, + "dependencies": { + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-interactive": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", + "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-negative-zero": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-node-process": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-node-process/-/is-node-process-1.2.0.tgz", + "integrity": "sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw==", + "dev": true + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", + "dev": true, + "dependencies": { + "which-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", + "dev": true, + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-instrument/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", + "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", + "dev": true, + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/types": "^29.6.3", + "import-local": "^3.0.2", + "jest-cli": "^29.7.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", + "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", + "dev": true, + "dependencies": { + "execa": "^5.0.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-circus": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", + "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^1.0.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.7.0", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0", + "pretty-format": "^29.7.0", + "pure-rand": "^6.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-config": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", + "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/test-sequencer": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-jest": "^29.7.0", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-circus": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-diff": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", + "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.6.3", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", + "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", + "dev": true, + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-each": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", + "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "jest-util": "^29.7.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", + "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", + "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", + "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-leak-detector": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", + "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", + "dev": true, + "dependencies": { + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", + "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", + "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.6.3", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", + "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock-axios": { + "version": "4.7.3", + "resolved": "https://registry.npmjs.org/jest-mock-axios/-/jest-mock-axios-4.7.3.tgz", + "integrity": "sha512-RHHdCZWreeX1EAl77u46yqYJG5aKX9l4zsCwf6wsIb3uy3w/XaEC5n4wbyluNujXQSZfNH1ir8OXinoewYQkUw==", + "dev": true, + "dependencies": { + "@jest/globals": "^29.7.0", + "jest": "~29.7.0", + "synchronous-promise": "^2.0.17" + } + }, + "node_modules/jest-nock": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/jest-nock/-/jest-nock-0.2.2.tgz", + "integrity": "sha512-8L1Jf41PYF9JUiQZGe1kmNmQwLodi2W+am09lALOSiJ7+zLnQOQfPTGU1/DJ2lcLBg4Ok17CvQrGR7BTUe89xQ==", + "dev": true, + "dependencies": { + "eventsource": "^1.0.7", + "eventsourcemock": "^2.0.0", + "mkdirp": "^0.5.1", + "nock": "^13.0.4" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", + "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", + "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "resolve": "^1.20.0", + "resolve.exports": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", + "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", + "dev": true, + "dependencies": { + "jest-regex-util": "^29.6.3", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", + "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", + "dev": true, + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/environment": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-leak-detector": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-resolve": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-util": "^29.7.0", + "jest-watcher": "^29.7.0", + "jest-worker": "^29.7.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runtime": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", + "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/globals": "^29.7.0", + "@jest/source-map": "^29.6.3", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", + "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-jsx": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/types": "^7.3.3", + "@jest/expect-utils": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "natural-compare": "^1.4.0", + "pretty-format": "^29.7.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", + "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "leven": "^3.1.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", + "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", + "dev": true, + "dependencies": { + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "jest-util": "^29.7.0", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", + "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "dev": true, + "dependencies": { + "@types/node": "*", + "jest-util": "^29.7.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jest/node_modules/jest-cli": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", + "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", + "dev": true, + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "create-jest": "^29.7.0", + "exit": "^0.1.2", + "import-local": "^3.0.2", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "yargs": "^17.3.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/js-levenshtein": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/js-levenshtein/-/js-levenshtein-1.1.6.tgz", + "integrity": "sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsbi": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/jsbi/-/jsbi-3.2.5.tgz", + "integrity": "sha512-aBE4n43IPvjaddScbvWRA2YlTzKEynHzu7MqOyTipdHucf/VxS63ViCjxYRg86M8Rxwbt/GfzHl1kKERkt45fQ==" + }, + "node_modules/jsesc": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", + "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonwebtoken": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz", + "integrity": "sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw==", + "dependencies": { + "jws": "^3.2.2", + "lodash": "^4.17.21", + "ms": "^2.1.1", + "semver": "^7.3.8" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jsonwebtoken/node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jsonwebtoken/node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jsonwebtoken/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "license": "MIT" + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/long": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz", + "integrity": "sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==", + "license": "Apache-2.0" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "dev": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dev": true, + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mnemonist": { + "version": "0.38.3", + "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.3.tgz", + "integrity": "sha512-2K9QYubXx/NAjv4VLq1d1Ly8pWNC5L3BrixtdkyTegXWJIqY+zLNDhhX/A+ZwWt70tB1S8H4BE8FLYEFyNoOBw==", + "dependencies": { + "obliterator": "^1.6.1" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/msw": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/msw/-/msw-2.0.11.tgz", + "integrity": "sha512-dAXFS2DxZX0uFqMPhS3oUAu8S/5IQ5qKKSwtXl3/dMTeML0C8JfSvbeWtowYg6pu4Iehgp5L/pHLrlIcG++y/A==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "@bundled-es-modules/cookie": "^2.0.0", + "@bundled-es-modules/js-levenshtein": "^2.0.1", + "@bundled-es-modules/statuses": "^1.0.1", + "@mswjs/cookies": "^1.1.0", + "@mswjs/interceptors": "^0.25.13", + "@open-draft/until": "^2.1.0", + "@types/cookie": "^0.4.1", + "@types/js-levenshtein": "^1.1.1", + "@types/statuses": "^2.0.1", + "chalk": "^4.1.2", + "chokidar": "^3.4.2", + "graphql": "^16.8.1", + "headers-polyfill": "^4.0.1", + "inquirer": "^8.2.0", + "is-node-process": "^1.2.0", + "js-levenshtein": "^1.1.6", + "outvariant": "^1.4.0", + "path-to-regexp": "^6.2.0", + "strict-event-emitter": "^0.5.0", + "type-fest": "^2.19.0", + "yargs": "^17.3.1" + }, + "bin": { + "msw": "cli/index.js" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mswjs" + }, + "peerDependencies": { + "typescript": ">= 4.7.x <= 5.2.x" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/msw/node_modules/type-fest": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", + "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/natural-compare-lite": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", + "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", + "dev": true + }, + "node_modules/ncc": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/ncc/-/ncc-0.3.6.tgz", + "integrity": "sha512-OXudTB2Ebt/FnOuDoPQbaa17+tdVqSOWA+gLfPxccWwsNED1uA2zEhpoB1hwdFC9yYbio/mdV5cvOtQI3Zrx1w==", + "dev": true, + "dependencies": { + "mkdirp": "^0.5.1", + "rimraf": "^2.6.1", + "tracer": "^0.8.7", + "ws": "^2.3.1" + } + }, + "node_modules/ncc/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/nock": { + "version": "13.4.0", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.4.0.tgz", + "integrity": "sha512-W8NVHjO/LCTNA64yxAPHV/K47LpGYcVzgKd3Q0n6owhwvD0Dgoterc25R4rnZbckJEb6Loxz1f5QMuJpJnbSyQ==", + "dev": true, + "dependencies": { + "debug": "^4.1.0", + "json-stringify-safe": "^5.0.1", + "propagate": "^2.0.0" + }, + "engines": { + "node": ">= 10.13" + } + }, + "node_modules/node-abort-controller": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz", + "integrity": "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==" + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true + }, + "node_modules/node-releases": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", + "dev": true + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/object-inspect": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz", + "integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", + "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.7.tgz", + "integrity": "sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.groupby": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.1.tgz", + "integrity": "sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "get-intrinsic": "^1.2.1" + } + }, + "node_modules/object.values": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz", + "integrity": "sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/obliterator": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/obliterator/-/obliterator-1.6.1.tgz", + "integrity": "sha512-9WXswnqINnnhOG/5SLimUlzuU1hFJUc8zkwyD59Sd+dPOMf05PmnYG/d6Q7HZ+KmgkZJa1PxRso6QdM3sTNHig==" + }, + "node_modules/octokit": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/octokit/-/octokit-3.1.2.tgz", + "integrity": "sha512-MG5qmrTL5y8KYwFgE1A4JWmgfQBaIETE/lOlfwNYx1QOtCQHGVxkRJmdUJltFc1HVn73d61TlMhMyNTOtMl+ng==", + "dependencies": { + "@octokit/app": "^14.0.2", + "@octokit/core": "^5.0.0", + "@octokit/oauth-app": "^6.0.0", + "@octokit/plugin-paginate-graphql": "^4.0.0", + "@octokit/plugin-paginate-rest": "^9.0.0", + "@octokit/plugin-rest-endpoint-methods": "^10.0.0", + "@octokit/plugin-retry": "^6.0.0", + "@octokit/plugin-throttling": "^8.0.0", + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^12.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", + "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", + "dev": true, + "dependencies": { + "@aashutoshrathi/word-wrap": "^1.2.3", + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ora": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", + "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", + "dev": true, + "dependencies": { + "bl": "^4.1.0", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-spinners": "^2.5.0", + "is-interactive": "^1.0.0", + "is-unicode-supported": "^0.1.0", + "log-symbols": "^4.1.0", + "strip-ansi": "^6.0.0", + "wcwidth": "^1.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/outvariant": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/outvariant/-/outvariant-1.4.3.tgz", + "integrity": "sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA==", + "dev": true + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/path-to-regexp": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.1.tgz", + "integrity": "sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw==", + "dev": true + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.1.1.tgz", + "integrity": "sha512-22UbSzg8luF4UuZtzgiUOfcGM8s4tjBv6dJRT7j275NXsy2jb4aJa4NNveul5x4eqlF1wuhuR2RElK71RvmVaw==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/priorityqueuejs": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/priorityqueuejs/-/priorityqueuejs-1.0.0.tgz", + "integrity": "sha512-lg++21mreCEOuGWTbO5DnJKAdxfjrdN0S9ysoW9SzdSJvbkWpkaDdpG/cdsPCsEnoLUwmd9m3WcZhngW7yKA2g==" + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/propagate": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", + "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/proto3-json-serializer": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-2.0.2.tgz", + "integrity": "sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ==", + "license": "Apache-2.0", + "dependencies": { + "protobufjs": "^7.2.5" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/protobufjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ] + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.3.tgz", + "integrity": "sha512-vqlC04+RQoFalODCbCumG2xIOvapzVMHwsyIGM/SIE8fRhFFsXeH8/QQ+s0T0kDAhKc4k30s73/0ydkHQz6HlQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-cwd/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.2.tgz", + "integrity": "sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/retry-request": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz", + "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==", + "dependencies": { + "@types/request": "^2.48.8", + "extend": "^3.0.2", + "teeny-request": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-async": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/safe-array-concat": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", + "has-symbols": "^1.0.3", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safe-regex-test": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-regex": "^1.1.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true + }, + "node_modules/sax": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz", + "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==" + }, + "node_modules/semaphore": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/semaphore/-/semaphore-1.1.0.tgz", + "integrity": "sha512-O4OZEaNtkMd/K0i6js9SL+gqy0ZCBMgUvlSqHKi4IBdjhe7wB8pwztUk1BbZ1fmrvpwFrPbHzqd2w5pTcJH6LA==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/stoppable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz", + "integrity": "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==", + "engines": { + "node": ">=4", + "npm": ">=6" + } + }, + "node_modules/stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "dependencies": { + "stubs": "^3.0.0" + } + }, + "node_modules/stream-shift": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", + "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==" + }, + "node_modules/strict-event-emitter": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/strict-event-emitter/-/strict-event-emitter-0.5.1.tgz", + "integrity": "sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ==", + "dev": true + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string.prototype.trim": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strnum": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" + }, + "node_modules/stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/synchronous-promise": { + "version": "2.0.17", + "resolved": "https://registry.npmjs.org/synchronous-promise/-/synchronous-promise-2.0.17.tgz", + "integrity": "sha512-AsS729u2RHUfEra9xJrE39peJcc2stq2+poBXX8bcM08Y6g9j/i/PUzwNQqkaJde7Ntg1TO7bSREbR5sdosQ+g==", + "dev": true + }, + "node_modules/teeny-request": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", + "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.9", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true + }, + "node_modules/tinytim": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/tinytim/-/tinytim-0.1.1.tgz", + "integrity": "sha512-NIpsp9lBIxPNzB++HnMmUd4byzJSVbbO4F+As1Gb1IG/YQT5QvmBDjpx8SpDS8fhGC+t+Qw8ldQgbcAIaU+2cA==", + "dev": true, + "engines": { + "node": ">= 0.2.0" + } + }, + "node_modules/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "dependencies": { + "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "node_modules/tracer": { + "version": "0.8.15", + "resolved": "https://registry.npmjs.org/tracer/-/tracer-0.8.15.tgz", + "integrity": "sha512-ZQzlhd6zZFIpAhACiZkxLjl65XqVwi8t8UEBVGRIHAQN6nj55ftJWiFell+WSqWCP/vEycrIbUSuiyMwul+TFw==", + "dev": true, + "dependencies": { + "colors": "1.2.3", + "dateformat": "3.0.3", + "mkdirp": "^0.5.1", + "tinytim": "0.1.1" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/ts-api-utils": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.0.3.tgz", + "integrity": "sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg==", + "dev": true, + "engines": { + "node": ">=16.13.0" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/ts-jest": { + "version": "29.1.1", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.1.tgz", + "integrity": "sha512-D6xjnnbP17cC85nliwGiL+tpoKN0StpgE0TeOjXQTU6MVCfsB4v7aW05CgQ/1OywGb0x/oy9hHFnN+sczTiRaA==", + "dev": true, + "dependencies": { + "bs-logger": "0.x", + "fast-json-stable-stringify": "2.x", + "jest-util": "^29.0.0", + "json5": "^2.2.3", + "lodash.memoize": "4.x", + "make-error": "1.x", + "semver": "^7.5.3", + "yargs-parser": "^21.0.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/types": "^29.0.0", + "babel-jest": "^29.0.0", + "jest": "^29.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/tsconfig-paths": { + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", + "dev": true, + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "node_modules/tsconfig-paths/node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/tsconfig-paths/node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typescript": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", + "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/ultron": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.1.1.tgz", + "integrity": "sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og==", + "dev": true + }, + "node_modules/unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/undici": { + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, + "node_modules/universal-github-app-jwt": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/universal-github-app-jwt/-/universal-github-app-jwt-1.2.0.tgz", + "integrity": "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g==", + "dependencies": { + "@types/jsonwebtoken": "^9.0.0", + "jsonwebtoken": "^9.0.2" + } + }, + "node_modules/universal-github-app-jwt/node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/universal-github-app-jwt/node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/universal-github-app-jwt/node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/universal-github-app-jwt/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/universal-user-agent": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", + "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" + }, + "node_modules/update-browserslist-db": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.0" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/update-browserslist-db/node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true + }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/wcwidth": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz", + "integrity": "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==", + "dev": true, + "dependencies": { + "defaults": "^1.0.3" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/ws": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-2.3.1.tgz", + "integrity": "sha512-61a+9LgtYZxTq1hAonhX8Xwpo2riK4IOR/BIVxioFbCfc3QFKmpE4x9dLExfLHKtUfVZigYa36tThVhO57erEw==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.0.1", + "ultron": "~1.1.0" + } + }, + "node_modules/ws/node_modules/safe-buffer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.0.1.tgz", + "integrity": "sha512-cr7dZWLwOeaFBLTIuZeYdkfO7UzGIKhjYENJFAxUOMKWGaWDm2nJM2rzxNRm5Owu0DH3ApwNo6kx5idXZfb/Iw==", + "dev": true + }, + "node_modules/xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/package.json b/package.json index a114162..16e090a 100644 --- a/package.json +++ b/package.json @@ -38,16 +38,18 @@ "@azure/identity": "^4.0.0", "@azure/ms-rest-js": "^2.6.0", "@azure/storage-blob": "^12.17.0", + "@google-cloud/firestore": "^7.10.0", "axios": "^1.6.2", "octokit": "^3.1.2", "uuid": "^9.0.1" }, "devDependencies": { - "@types/jest": "^29.5.11", + "@google-cloud/storage": "^7.14.0", + "@types/jest": "^29.5.14", "@types/node": "^18.19.3", "@types/uuid": "^9.0.7", - "@typescript-eslint/eslint-plugin": "6.14.0", - "@typescript-eslint/parser": "6.14.0", + "@typescript-eslint/eslint-plugin": "6.15.0", + "@typescript-eslint/parser": "6.15.0", "@vercel/ncc": "0.38.1", "dotenv": "^16.3.1", "eslint": "^8.56.0", @@ -55,14 +57,14 @@ "eslint-plugin-import": "^2.29.1", "eslint-plugin-jest": "27.6.0", "eslint-plugin-sort-destructure-keys": "^1.4.0", - "jest": "29.7.0", + "jest": "^29.7.0", "jest-mock-axios": "^4.7.3", "jest-nock": "^0.2.2", "msw": "^2.0.11", "ncc": "^0.3.6", "nock": "^13.4.0", "prettier": "3.1.1", - "ts-jest": "29.1.1", + "ts-jest": "^29.1.1", "ts-node": "^10.9.2", "typescript": "5.3.3" }, diff --git a/src/actions/shared.ts b/src/actions/shared.ts index d095dd2..5613cb7 100644 --- a/src/actions/shared.ts +++ b/src/actions/shared.ts @@ -1,4 +1,5 @@ import * as core from "@actions/core"; +import { Storage } from "@google-cloud/storage"; import { getBlobServiceClient, getCosmosDBContainer, @@ -14,17 +15,21 @@ import { import { s3Client } from "@lib/s3/s3Client"; import { AzureBlobPlanRepo } from "@modules/terraformPlan/repo/AzureBlobPlanRepo"; import { CosmosDBMetadataRepo } from "@modules/terraformPlan/repo/CosmosDbMetadataRepo"; +import { FirestoreDBMetadataRepo } from "@modules/terraformPlan/repo/FirestoreDbMetadataRepo"; +import { GcsPlanRepo } from "@modules/terraformPlan/repo/GcsPlanRepo"; export const getMetadataRepo = (): IMetadataRepository => { const tableName = core.getInput("tableName"); - const metaDataRepoType = core.getInput("metadataRepositoryType"); + + core.debug(`tableName: ${tableName}`); core.debug(`metadataRepositoryType: ${metaDataRepoType}`); + core.debug(`metadataRepositoryType type: ${typeof metaDataRepoType}`); + core.debug(`strict equality test: ${"firestore" === metaDataRepoType}`); switch (metaDataRepoType) { case "dynamo": return new DynamoDBMetadataRepo(dynamoClient, tableName); - break; case "cosmos": { const cosmosConnectionString = core.getInput("cosmosConnectionString"); const cosmosContainerName = core.getInput("cosmosContainerName"); @@ -48,7 +53,31 @@ export const getMetadataRepo = (): IMetadataRepository => { ); return new CosmosDBMetadataRepo(container); - break; + } + case "firestore": { + const gcpProjectId = core.getInput("gcpProjectId"); + const gcpFirestoreDatabaseName = core.getInput("gcpFirestoreDatabaseName"); + const gcpFirestoreCollectionName = core.getInput("gcpFirestoreCollectionName"); + + core.debug(`gcpProjectId: ${gcpProjectId}`); + core.debug(`gcpFirestoreDatabaseName: ${gcpFirestoreDatabaseName}`); + core.debug(`gcpFirestoreCollectionName: ${gcpFirestoreCollectionName}`); + + if (!gcpProjectId) { + throw new Error("gcpProjectId is required"); + } + if (!gcpFirestoreDatabaseName) { + throw new Error("gcpFirestoreDatabaseName is required"); + } + if (!gcpFirestoreCollectionName) { + throw new Error("gcpFirestoreCollectionName is required"); + } + + return new FirestoreDBMetadataRepo( + gcpProjectId, + gcpFirestoreCollectionName, + gcpFirestoreDatabaseName + ); } default: throw new Error(`Invalid metadata repository type: ${metaDataRepoType}`); @@ -65,7 +94,6 @@ export const getPlanRepo = (): IPlanRepository => { switch (planRepoType.toLowerCase()) { case "s3": return new S3PlanRepo(s3Client, bucketName); - break; case "azureblob": { const accountName = core.getInput("blobAccountName"); const blobContainerName = core.getInput("blobContainerName"); @@ -89,7 +117,21 @@ export const getPlanRepo = (): IPlanRepository => { : getBlobServiceClient(blobConnectionString); return new AzureBlobPlanRepo(client, blobContainerName); - break; + } + case "gcs": { + const gcpProjectId = core.getInput("gcpProjectId"); + + core.debug(`gcpProjectId: ${gcpProjectId}`); + + if (!gcpProjectId) { + throw new Error("gcpProjectId is required"); + } + + const client = new Storage({ + projectId: gcpProjectId, + }); + + return new GcsPlanRepo(client, bucketName); } default: throw new Error(`Invalid plan repository type: ${planRepoType}`); diff --git a/src/modules/terraformPlan/mapper/firestoreMapper.ts b/src/modules/terraformPlan/mapper/firestoreMapper.ts new file mode 100644 index 0000000..8d0ba88 --- /dev/null +++ b/src/modules/terraformPlan/mapper/firestoreMapper.ts @@ -0,0 +1,65 @@ +import { UniqueEntityId } from "../../../lib"; +import { Mapper } from "../../../lib/repository/mapper"; +import { TerraformPlanCommit, TerraformPlanComponent } from "../domain"; +import { TerraformPlan } from "../domain/TerraformPlan"; +import { TerraformPlanBranch } from "../domain/TerraformPlanBranch"; +import { TerraformPlanPR } from "../domain/TerraformPlanPR"; +import { TerraformPlanRepository } from "../domain/TerraformPlanRepository"; +import { TerraformPlanStack } from "../domain/TerraformPlanStack"; + +export class TerraformPlanFirestoreMapper extends Mapper { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + public toDomain(raw: any): TerraformPlan { + const planOrError = TerraformPlan.create( + { + branch: TerraformPlanBranch.create({ value: raw.branch }).getValue(), + commitSHA: TerraformPlanCommit.create({ + value: raw.commitSHA + }).getValue(), + component: TerraformPlanComponent.create({ + value: raw.component + }).getValue(), + pr: TerraformPlanPR.create({ + value: raw.pr + }).getValue(), + stack: TerraformPlanStack.create({ + value: raw.stack + }).getValue(), + repository: TerraformPlanRepository.create({ + repoOwner: raw.repoOwner, + repoName: raw.repoName + }).getValue(), + tainted: raw.tainted, + createdAt: new Date(raw.createdAt), + contents: Buffer.from(""), + contentsHash: raw.contentsHash + }, + new UniqueEntityId(raw.id) + ); + + if (planOrError.isFailure) { + throw new Error("Error converting Firestore item to domain"); + } + + return planOrError.getValue(); + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + public toPersistence(domain: TerraformPlan): any { + const item = { + id: domain.id.toString(), + branch: domain.branch, + commitSHA: domain.commitSHA, + component: domain.component, + contentsHash: domain.contentsHash || "", + repoOwner: domain.repoOwner, + pr: domain.pr, + repoName: domain.repoName, + stack: domain.stack, + tainted: domain.tainted, + createdAt: domain.createdAt + }; + + return item; + } +} diff --git a/src/modules/terraformPlan/mapper/index.ts b/src/modules/terraformPlan/mapper/index.ts index a678c2d..293eadd 100644 --- a/src/modules/terraformPlan/mapper/index.ts +++ b/src/modules/terraformPlan/mapper/index.ts @@ -1,2 +1,3 @@ export * from "./dynamoMapper"; export * from "./cosmosMapper"; +export * from "./firestoreMapper"; diff --git a/src/modules/terraformPlan/repo/FirestoreDbMetadataRepo.ts b/src/modules/terraformPlan/repo/FirestoreDbMetadataRepo.ts new file mode 100644 index 0000000..32c116c --- /dev/null +++ b/src/modules/terraformPlan/repo/FirestoreDbMetadataRepo.ts @@ -0,0 +1,216 @@ +import { Firestore, CollectionReference, Timestamp } from "@google-cloud/firestore"; +import { IMetadataRepository, RepositoryErrors } from "@lib/repository"; +import { + TerraformPlan, + TerraformPlanFirestoreMapper +} from "@modules/terraformPlan"; + +export class FirestoreDBMetadataRepo implements IMetadataRepository { + private mapper = new TerraformPlanFirestoreMapper(); + private collection: CollectionReference; + private firestore: Firestore; + + constructor( + projectId: string, + collectionName: string, + databaseName: string, + ) { + // Initialize Firestore + this.firestore = new Firestore({ + projectId: projectId, + databaseId: databaseName, // Specify your database ID + ignoreUndefinedProperties: true + }); + + // Initialize collection (use simple path) + this.collection = this.firestore.collection(collectionName); + + // Replace createCompositeIndexes with checkIndexes + this.checkIndexes(collectionName); + + console.log('Initializing Firestore with:', { + projectId, + collectionName, + collectionPath: this.collection.path, + databaseId: databaseName + }); + } + + public async loadByCommit( + owner: string, + repo: string, + component: string, + stack: string, + commitSHA: string + ): Promise { + const snapshot = await this.collection + .where("repoOwner", "==", owner) + .where("repoName", "==", repo) + .where("commitSHA", "==", commitSHA) + .where("component", "==", component) + .where("stack", "==", stack) + .orderBy("createdAt", "desc") + .limit(1) + .get(); + + if (snapshot.empty) { + throw new RepositoryErrors.PlanNotFoundError(component, stack, commitSHA); + } + + return this.mapper.toDomain(snapshot.docs[0].data()); + } + + public async loadLatestForPR( + owner: string, + repo: string, + component: string, + stack: string, + pr: number + ): Promise { + const snapshot = await this.collection + .where("repoOwner", "==", owner) + .where("repoName", "==", repo) + .where("pr", "==", pr) + .where("component", "==", component) + .where("stack", "==", stack) + .orderBy("createdAt", "desc") + .limit(1) + .get(); + + if (snapshot.empty) { + throw new RepositoryErrors.PlanNotFoundError( + component, + stack, + undefined, + pr + ); + } + + return this.mapper.toDomain(snapshot.docs[0].data()); + } + + public async save(plan: TerraformPlan): Promise { + try { + const item = this.mapper.toPersistence(plan); + + // Create a new document with auto-generated ID + const docRef = this.collection.doc(); + console.log('Attempting to save document with path:', docRef.path); + + await docRef.set({ + ...item, + _id: docRef.id, + _createdAt: Timestamp.now(), + ttl: Timestamp.fromDate(new Date(Date.now() + (30 * 24 * 60 * 60 * 1000))) + }); + + console.log('Successfully saved document with ID:', docRef.id); + + } catch (error: any) { + console.error('Error details:', { + code: error.code, + message: error.message, + details: error.details, + stack: error.stack, + collectionPath: this.collection.path + }); + + throw error; + } + } + + private async checkIndexes(collectionName: string) { + try { + // Test queries that require indexes + const testQueries = [ + this.collection + .where("repoOwner", "==", "test") + .where("repoName", "==", "test") + .where("commitSHA", "==", "test") + .where("component", "==", "test") + .where("stack", "==", "test") + .orderBy("createdAt", "desc") + .limit(1), + + this.collection + .where("repoOwner", "==", "test") + .where("repoName", "==", "test") + .where("pr", "==", 1) + .where("component", "==", "test") + .where("stack", "==", "test") + .orderBy("createdAt", "desc") + .limit(1) + ]; + + await Promise.all(testQueries.map(q => q.get())); + console.log('All required indexes are available'); + } catch (error: any) { + // Check both error codes that Firestore might return + if (error.code === 9 || error.code === 'failed-precondition') { + const indexUrl = error.details?.match(/https:\/\/console\.firebase\.google\.com[^\s]*/)?.[0] || ''; + console.warn(` +Missing required indexes. Please create them using one of these methods: + +1. Using Firebase Console (click this link): +${indexUrl} + +2. Or using CLI with firestore.indexes.json: +{ + "indexes": [ + { + "collectionGroup": "${collectionName}", + "queryScope": "COLLECTION", + "fields": [ + { "fieldPath": "repoOwner", "order": "ASCENDING" }, + { "fieldPath": "repoName", "order": "ASCENDING" }, + { "fieldPath": "commitSHA", "order": "ASCENDING" }, + { "fieldPath": "component", "order": "ASCENDING" }, + { "fieldPath": "stack", "order": "ASCENDING" }, + { "fieldPath": "createdAt", "order": "DESCENDING" } + ] + }, + { + "collectionGroup": "${collectionName}", + "queryScope": "COLLECTION", + "fields": [ + { "fieldPath": "repoOwner", "order": "ASCENDING" }, + { "fieldPath": "repoName", "order": "ASCENDING" }, + { "fieldPath": "pr", "order": "ASCENDING" }, + { "fieldPath": "component", "order": "ASCENDING" }, + { "fieldPath": "stack", "order": "ASCENDING" }, + { "fieldPath": "createdAt", "order": "DESCENDING" } + ] + } + ], + "fieldOverrides": [ + { + "collectionGroup": "${collectionName}", + "fieldPath": "ttl", + "ttl": true, + "indexes": [ + { + "order": "ASCENDING", + "queryScope": "COLLECTION" + }, + { + "order": "DESCENDING", + "queryScope": "COLLECTION" + }, + { + "arrayConfig": "CONTAINS", + "queryScope": "COLLECTION" + } + ] + } + ] +} + +3. Deploy using: firebase deploy --only firestore:indexes +`); + // Don't throw the error, just warn + return; + } + console.error('Unexpected error checking indexes:', error); + } + } +} diff --git a/src/modules/terraformPlan/repo/GcsPlanRepo.ts b/src/modules/terraformPlan/repo/GcsPlanRepo.ts new file mode 100644 index 0000000..434f8a8 --- /dev/null +++ b/src/modules/terraformPlan/repo/GcsPlanRepo.ts @@ -0,0 +1,52 @@ +import { Storage } from "@google-cloud/storage"; +import { IPlanRepository, RepositoryErrors } from "@lib/repository"; +import { TerraformPlan } from "@modules/terraformPlan"; + +const getKey = ( + repoOwner: string, + repoName: string, + commitSHA: string, + component: string, + stack: string +) => `${repoOwner}/${repoName}/${commitSHA}/${component}/${stack}.tfplan`; + +export class GcsPlanRepo implements IPlanRepository { + constructor( + private storage: Storage, + private bucketName: string + ) {} + + public async load( + repoOwner: string, + repoName: string, + component: string, + stack: string, + commitSHA: string + ): Promise { + const blobName = getKey(repoOwner, repoName, commitSHA, component, stack); + const bucket = this.storage.bucket(this.bucketName); + const file = bucket.file(blobName); + + try { + const [contents] = await file.download(); + + if (!contents || contents.length < 1) { + throw new RepositoryErrors.PlanNotFoundError(commitSHA, component, stack); + } + + return new Uint8Array(contents); + } catch (error) { + throw new RepositoryErrors.PlanNotFoundError(commitSHA, component, stack); + } + } + + public async save(plan: TerraformPlan): Promise { + const { repoOwner, repoName, commitSHA, component, contents, stack } = plan; + const blobName = getKey(repoOwner, repoName, commitSHA, component, stack); + + const bucket = this.storage.bucket(this.bucketName); + const file = bucket.file(blobName); + + await file.save(contents); + } +} diff --git a/yarn.lock b/yarn.lock index f824209..6188fd1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4,12 +4,12 @@ "@aashutoshrathi/word-wrap@^1.2.3": version "1.2.6" - resolved "https://registry.yarnpkg.com/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf" + resolved "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz" integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA== "@actions/cache@^3.2.2": version "3.2.2" - resolved "https://registry.yarnpkg.com/@actions/cache/-/cache-3.2.2.tgz#e7bbb5f9b67c613f96f98f91506c69424c9aa288" + resolved "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz" integrity sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg== dependencies: "@actions/core" "^1.10.0" @@ -23,32 +23,24 @@ semver "^6.1.0" uuid "^3.3.3" -"@actions/core@^1.10.0", "@actions/core@^1.2.6": - version "1.10.0" - resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.10.0.tgz#44551c3c71163949a2f06e94d9ca2157a0cfac4f" - integrity sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug== +"@actions/core@^1.10.0", "@actions/core@^1.10.1", "@actions/core@^1.2.6": + version "1.11.1" + resolved "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz" + integrity sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A== dependencies: + "@actions/exec" "^1.1.1" "@actions/http-client" "^2.0.1" - uuid "^8.3.2" - -"@actions/core@^1.10.1": - version "1.10.1" - resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.10.1.tgz#61108e7ac40acae95ee36da074fa5850ca4ced8a" - integrity sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g== - dependencies: - "@actions/http-client" "^2.0.1" - uuid "^8.3.2" -"@actions/exec@^1.0.0", "@actions/exec@^1.0.1": +"@actions/exec@^1.0.0", "@actions/exec@^1.0.1", "@actions/exec@^1.1.1": version "1.1.1" - resolved "https://registry.yarnpkg.com/@actions/exec/-/exec-1.1.1.tgz#2e43f28c54022537172819a7cf886c844221a611" + resolved "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz" integrity sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w== dependencies: "@actions/io" "^1.0.1" "@actions/github@^6.0.0": version "6.0.0" - resolved "https://registry.yarnpkg.com/@actions/github/-/github-6.0.0.tgz#65883433f9d81521b782a64cc1fd45eef2191ea7" + resolved "https://registry.npmjs.org/@actions/github/-/github-6.0.0.tgz" integrity sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g== dependencies: "@actions/http-client" "^2.2.0" @@ -58,22 +50,15 @@ "@actions/glob@^0.1.0": version "0.1.2" - resolved "https://registry.yarnpkg.com/@actions/glob/-/glob-0.1.2.tgz#9685ed2d6583093479c8f137d067c4329d7d0974" + resolved "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz" integrity sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A== dependencies: "@actions/core" "^1.2.6" minimatch "^3.0.4" -"@actions/http-client@^2.0.1": - version "2.1.0" - resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-2.1.0.tgz#b6d8c3934727d6a50d10d19f00a711a964599a9f" - integrity sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw== - dependencies: - tunnel "^0.0.6" - -"@actions/http-client@^2.1.1", "@actions/http-client@^2.2.0": +"@actions/http-client@^2.0.1", "@actions/http-client@^2.1.1", "@actions/http-client@^2.2.0": version "2.2.0" - resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-2.2.0.tgz#f8239f375be6185fcd07765efdcf0031ad5df1a0" + resolved "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.0.tgz" integrity sha512-q+epW0trjVUUHboliPb4UF9g2msf+w61b32tAkFEwL/IwP0DQWgbCMM0Hbe3e3WXSKz5VcUXbzJQgy8Hkra/Lg== dependencies: tunnel "^0.0.6" @@ -81,12 +66,12 @@ "@actions/io@^1.0.1", "@actions/io@^1.1.1", "@actions/io@^1.1.2": version "1.1.3" - resolved "https://registry.yarnpkg.com/@actions/io/-/io-1.1.3.tgz#4cdb6254da7962b07473ff5c335f3da485d94d71" + resolved "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz" integrity sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q== "@actions/tool-cache@^2.0.1": version "2.0.1" - resolved "https://registry.yarnpkg.com/@actions/tool-cache/-/tool-cache-2.0.1.tgz#8a649b9c07838d9d750c9864814e66a7660ab720" + resolved "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-2.0.1.tgz" integrity sha512-iPU+mNwrbA8jodY8eyo/0S/QqCKDajiR8OxWTnSk/SnYg0sj8Hp4QcUEVC1YFpHWXtrfbQrE13Jz4k4HXJQKcA== dependencies: "@actions/core" "^1.2.6" @@ -97,16 +82,16 @@ uuid "^3.3.2" "@ampproject/remapping@^2.2.0": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" - integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== + version "2.3.0" + resolved "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz" + integrity sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw== dependencies: - "@jridgewell/gen-mapping" "^0.3.0" - "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.24" "@aws-crypto/crc32@3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-3.0.0.tgz#07300eca214409c33e3ff769cd5697b57fdd38fa" + resolved "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-3.0.0.tgz" integrity sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA== dependencies: "@aws-crypto/util" "^3.0.0" @@ -115,7 +100,7 @@ "@aws-crypto/crc32c@3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/crc32c/-/crc32c-3.0.0.tgz#016c92da559ef638a84a245eecb75c3e97cb664f" + resolved "https://registry.npmjs.org/@aws-crypto/crc32c/-/crc32c-3.0.0.tgz" integrity sha512-ENNPPManmnVJ4BTXlOjAgD7URidbAznURqD0KvfREyc4o20DPYdEldU1f5cQ7Jbj0CJJSPaMIk/9ZshdB3210w== dependencies: "@aws-crypto/util" "^3.0.0" @@ -124,14 +109,14 @@ "@aws-crypto/ie11-detection@^3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz#640ae66b4ec3395cee6a8e94ebcd9f80c24cd688" + resolved "https://registry.npmjs.org/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz" integrity sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q== dependencies: tslib "^1.11.1" "@aws-crypto/sha1-browser@3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/sha1-browser/-/sha1-browser-3.0.0.tgz#f9083c00782b24714f528b1a1fef2174002266a3" + resolved "https://registry.npmjs.org/@aws-crypto/sha1-browser/-/sha1-browser-3.0.0.tgz" integrity sha512-NJth5c997GLHs6nOYTzFKTbYdMNA6/1XlKVgnZoaZcQ7z7UJlOgj2JdbHE8tiYLS3fzXNCguct77SPGat2raSw== dependencies: "@aws-crypto/ie11-detection" "^3.0.0" @@ -144,7 +129,7 @@ "@aws-crypto/sha256-browser@3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz#05f160138ab893f1c6ba5be57cfd108f05827766" + resolved "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz" integrity sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ== dependencies: "@aws-crypto/ie11-detection" "^3.0.0" @@ -158,7 +143,7 @@ "@aws-crypto/sha256-js@3.0.0", "@aws-crypto/sha256-js@^3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz#f06b84d550d25521e60d2a0e2a90139341e007c2" + resolved "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz" integrity sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ== dependencies: "@aws-crypto/util" "^3.0.0" @@ -167,14 +152,14 @@ "@aws-crypto/supports-web-crypto@^3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz#5d1bf825afa8072af2717c3e455f35cda0103ec2" + resolved "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz" integrity sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg== dependencies: tslib "^1.11.1" "@aws-crypto/util@^3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-3.0.0.tgz#1c7ca90c29293f0883468ad48117937f0fe5bfb0" + resolved "https://registry.npmjs.org/@aws-crypto/util/-/util-3.0.0.tgz" integrity sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w== dependencies: "@aws-sdk/types" "^3.222.0" @@ -183,7 +168,7 @@ "@aws-sdk/client-dynamodb@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-dynamodb/-/client-dynamodb-3.474.0.tgz#fd6326b18def340d1acd31c16ef8810e9e5a8d40" + resolved "https://registry.npmjs.org/@aws-sdk/client-dynamodb/-/client-dynamodb-3.474.0.tgz" integrity sha512-lEUmxBdJ6f2uwbUDyojvF0aXXBzhLJcM6h6t9zgkyA6+N4CueTrtqpgXignideR5+AEgRAQD3JO2MkYRvQuBYQ== dependencies: "@aws-crypto/sha256-browser" "3.0.0" @@ -231,7 +216,7 @@ "@aws-sdk/client-s3@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-s3/-/client-s3-3.474.0.tgz#55a7430d80082dfdc14f08ed86f140a0e9ebce94" + resolved "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.474.0.tgz" integrity sha512-uqji9u2yIhFMx6E18+iIlKqimZE1SUEewS78iYYzOKRoQQ+XqFnQXtHTvBGfTExEvdwZUXYg8FqSP2UpQiEf/g== dependencies: "@aws-crypto/sha1-browser" "3.0.0" @@ -294,7 +279,7 @@ "@aws-sdk/client-sso@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.474.0.tgz#eaea452b76df2d8724e76df1bed8162f182405f6" + resolved "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.474.0.tgz" integrity sha512-6toUmQUIHkDM/P2/nyLEO/mcWOIPByTlegqX9VCHhYh9Fs5MDT2nit7I6fZzBjZjB5oVTwKjbzgxae9cE3bhqw== dependencies: "@aws-crypto/sha256-browser" "3.0.0" @@ -336,7 +321,7 @@ "@aws-sdk/client-sts@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-sts/-/client-sts-3.474.0.tgz#65b4f4132e9891daf7987f5e4fb5f6998b040343" + resolved "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.474.0.tgz" integrity sha512-qPPMbrDVAUJgYiFWVewFG7dg0VyMfuGNNK4IC1nZr0eXejUTbdm8cio6IZ8OkWtK+A+L+wx1vX5686WYVgQ0dQ== dependencies: "@aws-crypto/sha256-browser" "3.0.0" @@ -382,7 +367,7 @@ "@aws-sdk/core@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/core/-/core-3.474.0.tgz#2f2d06815cc56f09e516aefc2873ea851e4aaa81" + resolved "https://registry.npmjs.org/@aws-sdk/core/-/core-3.474.0.tgz" integrity sha512-eVRdeB+AoTNSzfc4viHfr0jfkHujSlf4ToExJtTuxS1wlgmIyyxRNrVKxbf0K78YK/TXRsRlJPoS5QCD5h1S2w== dependencies: "@smithy/core" "^1.1.0" @@ -394,7 +379,7 @@ "@aws-sdk/credential-provider-env@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.468.0.tgz#4196d717d3f5485af863bd1fd84374ea3dcd6210" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.468.0.tgz" integrity sha512-k/1WHd3KZn0EQYjadooj53FC0z24/e4dUZhbSKTULgmxyO62pwh9v3Brvw4WRa/8o2wTffU/jo54tf4vGuP/ZA== dependencies: "@aws-sdk/types" "3.468.0" @@ -404,7 +389,7 @@ "@aws-sdk/credential-provider-ini@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.474.0.tgz#b7580a9cc2242f58508817da0bf2f547be14354a" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.474.0.tgz" integrity sha512-3Y2fHI4ZCNjdOO47Vh/xBgLXOrKm3KwBkYkBKKT2g02FUGNT8NLjJg8WBo3D4RQX2h34qx4mtW5nTY6YcGP80Q== dependencies: "@aws-sdk/credential-provider-env" "3.468.0" @@ -420,7 +405,7 @@ "@aws-sdk/credential-provider-node@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.474.0.tgz#684786766abe2002d4f21acc202c2a1beffedec6" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.474.0.tgz" integrity sha512-3OVVVGnb8Ru5hWeeHkg76YZT5mrufweIiWr6ge5zn7FYxc7WkyqIJ0XehqUqG5VQfaYhqh7uq/zmk8OE2B04lQ== dependencies: "@aws-sdk/credential-provider-env" "3.468.0" @@ -437,7 +422,7 @@ "@aws-sdk/credential-provider-process@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.468.0.tgz#770ed72db036c5d011445e5abf4a4bcc4424c486" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.468.0.tgz" integrity sha512-OYSn1A/UsyPJ7Z8Q2cNhTf55O36shPmSsvOfND04nSfu1nPaR+VUvvsP7v+brhGpwC/GAKTIdGAo4blH31BS6A== dependencies: "@aws-sdk/types" "3.468.0" @@ -448,7 +433,7 @@ "@aws-sdk/credential-provider-sso@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.474.0.tgz#b95866e34f023493545380e0382de4372952d7a1" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.474.0.tgz" integrity sha512-ik4rzhQtcRLSHB/MLQfi/dSpILxPd3zITb79DIEnqT3gpZRNjoARkZ3Hi68pujkU2530NYf8NcFwLCWoV1hS7Q== dependencies: "@aws-sdk/client-sso" "3.474.0" @@ -461,7 +446,7 @@ "@aws-sdk/credential-provider-web-identity@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.468.0.tgz#5befcb593d99a84e16af9e9f285f0d59ed42771f" + resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.468.0.tgz" integrity sha512-rexymPmXjtkwCPfhnUq3EjO1rSkf39R4Jz9CqiM7OsqK2qlT5Y/V3gnMKn0ZMXsYaQOMfM3cT5xly5R+OKDHlw== dependencies: "@aws-sdk/types" "3.468.0" @@ -471,7 +456,7 @@ "@aws-sdk/endpoint-cache@3.465.0": version "3.465.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/endpoint-cache/-/endpoint-cache-3.465.0.tgz#c1d3d001a69d874b4d397fa5b29599bd4eea9756" + resolved "https://registry.npmjs.org/@aws-sdk/endpoint-cache/-/endpoint-cache-3.465.0.tgz" integrity sha512-0cuotk23hVSrqxHkJ3TTWC9MVMRgwlUvCatyegJEauJnk8kpLSGXE5KVdExlUBwShGNlj7ac29okZ9m17iTi5Q== dependencies: mnemonist "0.38.3" @@ -479,7 +464,7 @@ "@aws-sdk/lib-dynamodb@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/lib-dynamodb/-/lib-dynamodb-3.474.0.tgz#bf20c164cee589e96e98eea94325c45aa25d5799" + resolved "https://registry.npmjs.org/@aws-sdk/lib-dynamodb/-/lib-dynamodb-3.474.0.tgz" integrity sha512-uZOuqdue5b85NF5XQGR3vRlKBzUMfSab4YCHovo5E06UYwstS5KGDvjV+29uoK43QEcaGtXA9VTWJugIC6cgyA== dependencies: "@aws-sdk/util-dynamodb" "3.474.0" @@ -489,7 +474,7 @@ "@aws-sdk/middleware-bucket-endpoint@3.470.0": version "3.470.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.470.0.tgz#76a6dde27e791ec8fad798dd5d53789b876498c3" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.470.0.tgz" integrity sha512-vLXXNWtsRmEIwzJ9HUQfIuTNAsEzvCv0Icsnkvt2BiBZXnmHdp2vIC3e3+kfy1D7dVQloXqMmnfcLu/BUMu2Jw== dependencies: "@aws-sdk/types" "3.468.0" @@ -502,7 +487,7 @@ "@aws-sdk/middleware-endpoint-discovery@3.470.0": version "3.470.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-endpoint-discovery/-/middleware-endpoint-discovery-3.470.0.tgz#7bc25fac1a334b93f6bdba62c740bf0501837422" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-endpoint-discovery/-/middleware-endpoint-discovery-3.470.0.tgz" integrity sha512-pN+3Y7W3Xvs6pE2RlkXmO7ugOGLXsGR3zJI/fiGOLoCOGESuM3fq3CXdasOl76wch0L9iB1lPmoHMabkxKugGQ== dependencies: "@aws-sdk/endpoint-cache" "3.465.0" @@ -514,7 +499,7 @@ "@aws-sdk/middleware-expect-continue@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.468.0.tgz#664f7f1238e7bfb633cd44753f8cfb1a62ac624a" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.468.0.tgz" integrity sha512-/wmLjmfgeulxhhmnxX3X3N933TvGsYckVIFjAtDSpLjqkbwzEcNiLq7AdmNJ4BfxG0MCMgcht561DCCD19x8Bg== dependencies: "@aws-sdk/types" "3.468.0" @@ -524,7 +509,7 @@ "@aws-sdk/middleware-flexible-checksums@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.468.0.tgz#96e26042e61724a4981edb3ba3fd2af280df57b6" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.468.0.tgz" integrity sha512-LQwL/N5MCj3Y5keLLewHTqeAXUIMsHFZyxDXRm/uxrOon9ufLKDvGvzAmfwn1/CuSUo66ZfT8VPSA4BsC90RtA== dependencies: "@aws-crypto/crc32" "3.0.0" @@ -538,7 +523,7 @@ "@aws-sdk/middleware-host-header@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.468.0.tgz#6da7b19032e9afccea54fbf8aa10cccd2f817bcf" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.468.0.tgz" integrity sha512-gwQ+/QhX+lhof304r6zbZ/V5l5cjhGRxLL3CjH1uJPMcOAbw9wUlMdl+ibr8UwBZ5elfKFGiB1cdW/0uMchw0w== dependencies: "@aws-sdk/types" "3.468.0" @@ -548,7 +533,7 @@ "@aws-sdk/middleware-location-constraint@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.468.0.tgz#cc9ebcdabed96414fc91f4a39b3b7c08e6374187" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.468.0.tgz" integrity sha512-0gBX/lDynQr4YIhM9h1dVnkVWqrg+34iOCVIUq8jHxzUzgZWglGkG9lHGGg0r1xkLTmegeoo1OKH8wrQ6n33Cg== dependencies: "@aws-sdk/types" "3.468.0" @@ -557,7 +542,7 @@ "@aws-sdk/middleware-logger@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.468.0.tgz#a1883fb7ad8e156444d30689de4ab897357ef1d8" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.468.0.tgz" integrity sha512-X5XHKV7DHRXI3f29SAhJPe/OxWRFgDWDMMCALfzhmJfCi6Jfh0M14cJKoC+nl+dk9lB+36+jKjhjETZaL2bPlA== dependencies: "@aws-sdk/types" "3.468.0" @@ -566,7 +551,7 @@ "@aws-sdk/middleware-recursion-detection@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.468.0.tgz#85b05636a5c2638bf9e15c8b6be17654757e1bf4" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.468.0.tgz" integrity sha512-vch9IQib2Ng9ucSyRW2eKNQXHUPb5jUPCLA5otTW/8nGjcOU37LxQG4WrxO7uaJ9Oe8hjHO+hViE3P0KISUhtA== dependencies: "@aws-sdk/types" "3.468.0" @@ -576,7 +561,7 @@ "@aws-sdk/middleware-sdk-s3@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.474.0.tgz#7af224ba8c85f0190a153f4c4e2b48e549f1f34e" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.474.0.tgz" integrity sha512-62aAo/8u5daIabeJ+gseYeHeShe9eYH6mH+kfWmLsHybXCCv1EaD/ZkdXWNhL0HZ3bUI1z1SF1p8jjTAWALnwA== dependencies: "@aws-sdk/types" "3.468.0" @@ -591,7 +576,7 @@ "@aws-sdk/middleware-signing@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-signing/-/middleware-signing-3.468.0.tgz#d1b5a92c395f55063cfa72ee95e4921b16f4c515" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-signing/-/middleware-signing-3.468.0.tgz" integrity sha512-s+7fSB1gdnnTj5O0aCCarX3z5Vppop8kazbNSZADdkfHIDWCN80IH4ZNjY3OWqaAz0HmR4LNNrovdR304ojb4Q== dependencies: "@aws-sdk/types" "3.468.0" @@ -604,7 +589,7 @@ "@aws-sdk/middleware-ssec@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-ssec/-/middleware-ssec-3.468.0.tgz#8fe4ccfd6f0689b77b230ce17e44438d1ce1b419" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.468.0.tgz" integrity sha512-y1qLW24wRkOGBTK5d6eJXf6d8HYo4rzT4a1mNDN1rd18NSffwQ6Yke5qeUiIaxa0y/l+FvvNYErbhYtij2rJoQ== dependencies: "@aws-sdk/types" "3.468.0" @@ -613,7 +598,7 @@ "@aws-sdk/middleware-user-agent@3.470.0": version "3.470.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.470.0.tgz#6cbb09fc8359acdb45c41f6fe5d6612c81f5ad92" + resolved "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.470.0.tgz" integrity sha512-s0YRGgf4fT5KwwTefpoNUQfB5JghzXyvmPfY1QuFEMeVQNxv0OPuydzo3rY2oXPkZjkulKDtpm5jzIHwut75hA== dependencies: "@aws-sdk/types" "3.468.0" @@ -624,7 +609,7 @@ "@aws-sdk/region-config-resolver@3.470.0": version "3.470.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/region-config-resolver/-/region-config-resolver-3.470.0.tgz#74e5c5f7a5633ad8c482503bf940a9330bd1cd09" + resolved "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.470.0.tgz" integrity sha512-C1o1J06iIw8cyAAOvHqT4Bbqf+PgQ/RDlSyjt2gFfP2OovDpc2o2S90dE8f8iZdSGpg70N5MikT1DBhW9NbhtQ== dependencies: "@smithy/node-config-provider" "^2.1.8" @@ -635,7 +620,7 @@ "@aws-sdk/signature-v4-multi-region@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.474.0.tgz#192f10924899c2ccf181932b4b5f59d6b01d79d3" + resolved "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.474.0.tgz" integrity sha512-93OWRQgTJZASXLrlUNX7mmXknNkYxFYldRLARmYQccONmnIqgYQW0lQj8BFwqkHJTzSMik3/UsU0SHKwZ9ynYA== dependencies: "@aws-sdk/middleware-sdk-s3" "3.474.0" @@ -647,7 +632,7 @@ "@aws-sdk/smithy-client@^3.374.0": version "3.374.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/smithy-client/-/smithy-client-3.374.0.tgz#e00e7d9bbf478846c0ac384e22c95159de5eab33" + resolved "https://registry.npmjs.org/@aws-sdk/smithy-client/-/smithy-client-3.374.0.tgz" integrity sha512-YQBdO/Nv5EXBg/qfMF4GgYYLNN3Y/06MyuVBYILC1TKAnMoLy2FV0VOYyediagepAcWPdJqyUq4MCNNBy0CPRg== dependencies: "@smithy/smithy-client" "^1.0.3" @@ -655,7 +640,7 @@ "@aws-sdk/token-providers@3.470.0": version "3.470.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/token-providers/-/token-providers-3.470.0.tgz#635fa5db3f10919868a9f94be43241fbce206ede" + resolved "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.470.0.tgz" integrity sha512-rzxnJxEUJiV69Cxsf0AHXTqJqTACITwcSH/PL4lWP4uvtzdrzSi3KA3u2aWHWpOcdE6+JFvdICscsbBSo3/TOg== dependencies: "@aws-crypto/sha256-browser" "3.0.0" @@ -696,38 +681,39 @@ "@smithy/util-utf8" "^2.0.2" tslib "^2.5.0" -"@aws-sdk/types@3.468.0", "@aws-sdk/types@^3.468.0": +"@aws-sdk/types@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.468.0.tgz#f97b34fc92a800d1d8b866f47693ae8f3d46517b" + resolved "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz" integrity sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA== dependencies: "@smithy/types" "^2.7.0" tslib "^2.5.0" -"@aws-sdk/types@^3.222.0": - version "3.347.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.347.0.tgz#4affe91de36ef227f6375d64a6efda8d4ececd5d" - integrity sha512-GkCMy79mdjU9OTIe5KT58fI/6uqdf8UmMdWqVHmFJ+UpEzOci7L/uw4sOXWo7xpPzLs6cJ7s5ouGZW4GRPmHFA== +"@aws-sdk/types@^3.222.0", "@aws-sdk/types@^3.468.0": + version "3.692.0" + resolved "https://registry.npmjs.org/@aws-sdk/types/-/types-3.692.0.tgz" + integrity sha512-RpNvzD7zMEhiKgmlxGzyXaEcg2khvM7wd5sSHVapOcrde1awQSOMGI4zKBQ+wy5TnDfrm170ROz/ERLYtrjPZA== dependencies: - tslib "^2.5.0" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" "@aws-sdk/util-arn-parser@3.465.0": version "3.465.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-arn-parser/-/util-arn-parser-3.465.0.tgz#2896f6b06f69770378586853c97a0f283cbb2e20" + resolved "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.465.0.tgz" integrity sha512-zOJ82vzDJFqBX9yZBlNeHHrul/kpx/DCoxzW5UBbZeb26kfV53QhMSoEmY8/lEbBqlqargJ/sgRC845GFhHNQw== dependencies: tslib "^2.5.0" "@aws-sdk/util-dynamodb@3.474.0": version "3.474.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-dynamodb/-/util-dynamodb-3.474.0.tgz#635330a829f5f61ce9dfcf2010c1f6d60049f487" + resolved "https://registry.npmjs.org/@aws-sdk/util-dynamodb/-/util-dynamodb-3.474.0.tgz" integrity sha512-I4wZTpmd8UJUV6siJ4pB2dbv/RzlC8bRAqOj0m/w0ZoDGt9UpVWfC7b+s7jaGSsD8I1vuuQ/CLw58RgESX9anQ== dependencies: tslib "^2.5.0" "@aws-sdk/util-endpoints@3.470.0": version "3.470.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-endpoints/-/util-endpoints-3.470.0.tgz#94338991804f24e0225636abd4215b3bb4338c15" + resolved "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.470.0.tgz" integrity sha512-6N6VvPCmu+89p5Ez/+gLf+X620iQ9JpIs8p8ECZiCodirzFOe8NC1O2S7eov7YiG9IHSuodqn/0qNq+v+oLe0A== dependencies: "@aws-sdk/types" "3.468.0" @@ -736,14 +722,14 @@ "@aws-sdk/util-locate-window@^3.0.0": version "3.310.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.310.0.tgz#b071baf050301adee89051032bd4139bba32cc40" + resolved "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.310.0.tgz" integrity sha512-qo2t/vBTnoXpjKxlsC2e1gBrRm80M3bId27r0BRB2VniSSe7bL1mmzM+/HFtujm0iAxtPM+aLEflLJlJeDPg0w== dependencies: tslib "^2.5.0" "@aws-sdk/util-user-agent-browser@3.468.0": version "3.468.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.468.0.tgz#095caecb3fd75104ee38ae81ed78821de0f58e28" + resolved "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.468.0.tgz" integrity sha512-OJyhWWsDEizR3L+dCgMXSUmaCywkiZ7HSbnQytbeKGwokIhD69HTiJcibF/sgcM5gk4k3Mq3puUhGnEZ46GIig== dependencies: "@aws-sdk/types" "3.468.0" @@ -753,7 +739,7 @@ "@aws-sdk/util-user-agent-node@3.470.0": version "3.470.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.470.0.tgz#b78605f336859d6c3b5f573cff931ce41f83a27d" + resolved "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.470.0.tgz" integrity sha512-QxsZ9iVHcBB/XRdYvwfM5AMvNp58HfqkIrH88mY0cmxuvtlIGDfWjczdDrZMJk9y0vIq+cuoCHsGXHu7PyiEAQ== dependencies: "@aws-sdk/types" "3.468.0" @@ -763,14 +749,14 @@ "@aws-sdk/util-utf8-browser@^3.0.0": version "3.259.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz#3275a6f5eb334f96ca76635b961d3c50259fd9ff" + resolved "https://registry.npmjs.org/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz" integrity sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw== dependencies: tslib "^2.3.1" "@aws-sdk/xml-builder@3.472.0": version "3.472.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/xml-builder/-/xml-builder-3.472.0.tgz#fe804e26517779868f7093e361dce4816be546d6" + resolved "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.472.0.tgz" integrity sha512-PwjVxz1hr9up8QkddabuScPZ/d5aDHgvHYgK4acHYzltXL4wngfvimi5ZqXTzVWF2QANxHmWnHUr45QJX71oJQ== dependencies: "@smithy/types" "^2.7.0" @@ -778,47 +764,30 @@ "@azure/abort-controller@^1.0.0", "@azure/abort-controller@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@azure/abort-controller/-/abort-controller-1.1.0.tgz#788ee78457a55af8a1ad342acb182383d2119249" + resolved "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz" integrity sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw== dependencies: tslib "^2.2.0" -"@azure/abort-controller@^2.0.0": - version "2.1.1" - resolved "https://registry.yarnpkg.com/@azure/abort-controller/-/abort-controller-2.1.1.tgz#ad4a964ce50a1eaed70ed2d2ef77c8de5708d10b" - integrity sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ== - dependencies: - tslib "^2.6.2" - -"@azure/core-auth@^1.1.4": - version "1.7.1" - resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.7.1.tgz#ca75bc663b6463602fb10471db60f09368a1a3d2" - integrity sha512-dyeQwvgthqs/SlPVQbZQetpslXceHd4i5a7M/7z/lGEAVwnSluabnQOjF2/dk/hhWgMISusv1Ytp4mQ8JNy62A== +"@azure/abort-controller@^2.0.0", "@azure/abort-controller@^2.1.2": + version "2.1.2" + resolved "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz" + integrity sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA== dependencies: - "@azure/abort-controller" "^2.0.0" - "@azure/core-util" "^1.1.0" tslib "^2.6.2" -"@azure/core-auth@^1.3.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.4.0.tgz#6fa9661c1705857820dbc216df5ba5665ac36a9e" - integrity sha512-HFrcTgmuSuukRf/EdPmqBrc5l6Q5Uu+2TbuhaKbgaCpP2TfAeiNaQPAadxO+CYBRHGUzIDteMAjFspFLDLnKVQ== - dependencies: - "@azure/abort-controller" "^1.0.0" - tslib "^2.2.0" - -"@azure/core-auth@^1.4.0", "@azure/core-auth@^1.5.0": +"@azure/core-auth@^1.1.4", "@azure/core-auth@^1.3.0", "@azure/core-auth@^1.4.0", "@azure/core-auth@^1.5.0": version "1.5.0" - resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.5.0.tgz#a41848c5c31cb3b7c84c409885267d55a2c92e44" + resolved "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.5.0.tgz" integrity sha512-udzoBuYG1VBoHVohDTrvKjyzel34zt77Bhp7dQntVGGD0ehVq48owENbBG8fIgkHRNUBQH5k1r0hpoMu5L8+kw== dependencies: "@azure/abort-controller" "^1.0.0" "@azure/core-util" "^1.1.0" tslib "^2.2.0" -"@azure/core-client@^1.4.0": +"@azure/core-client@^1.3.0", "@azure/core-client@^1.4.0", "@azure/core-client@^1.6.2": version "1.7.3" - resolved "https://registry.yarnpkg.com/@azure/core-client/-/core-client-1.7.3.tgz#f8cb2a1f91e8bc4921fa2e745cfdfda3e6e491a3" + resolved "https://registry.npmjs.org/@azure/core-client/-/core-client-1.7.3.tgz" integrity sha512-kleJ1iUTxcO32Y06dH9Pfi9K4U+Tlb111WXEnbt7R/ne+NLRwppZiTGJuTD5VVoxTMK5NTbEtm5t2vcdNCFe2g== dependencies: "@azure/abort-controller" "^1.0.0" @@ -829,46 +798,35 @@ "@azure/logger" "^1.0.0" tslib "^2.2.0" -"@azure/core-http@^3.0.0": - version "3.0.2" - resolved "https://registry.yarnpkg.com/@azure/core-http/-/core-http-3.0.2.tgz#970c5a0ee27884d60a406eeec17a271413e45ff4" - integrity sha512-o1wR9JrmoM0xEAa0Ue7Sp8j+uJvmqYaGoHOCT5qaVYmvgmnZDC0OvQimPA/JR3u77Sz6D1y3Xmk1y69cDU9q9A== +"@azure/core-http-compat@^2.0.0": + version "2.1.2" + resolved "https://registry.npmjs.org/@azure/core-http-compat/-/core-http-compat-2.1.2.tgz" + integrity sha512-5MnV1yqzZwgNLLjlizsU3QqOeQChkIXw781Fwh1xdAqJR5AA32IUaq6xv1BICJvfbHoa+JYcaij2HFkhLbNTJQ== dependencies: - "@azure/abort-controller" "^1.0.0" - "@azure/core-auth" "^1.3.0" - "@azure/core-tracing" "1.0.0-preview.13" - "@azure/core-util" "^1.1.1" - "@azure/logger" "^1.0.0" - "@types/node-fetch" "^2.5.0" - "@types/tunnel" "^0.0.3" - form-data "^4.0.0" - node-fetch "^2.6.7" - process "^0.11.10" - tslib "^2.2.0" - tunnel "^0.0.6" - uuid "^8.3.0" - xml2js "^0.5.0" + "@azure/abort-controller" "^2.0.0" + "@azure/core-client" "^1.3.0" + "@azure/core-rest-pipeline" "^1.3.0" "@azure/core-lro@^2.2.0": - version "2.5.3" - resolved "https://registry.yarnpkg.com/@azure/core-lro/-/core-lro-2.5.3.tgz#6bb74e76dd84071d319abf7025e8abffef091f91" - integrity sha512-ubkOf2YCnVtq7KqEJQqAI8dDD5rH1M6OP5kW0KO/JQyTaxLA0N0pjFWvvaysCj9eHMNBcuuoZXhhl0ypjod2DA== + version "2.7.2" + resolved "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.2.tgz" + integrity sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw== dependencies: - "@azure/abort-controller" "^1.0.0" + "@azure/abort-controller" "^2.0.0" "@azure/core-util" "^1.2.0" "@azure/logger" "^1.0.0" - tslib "^2.2.0" + tslib "^2.6.2" "@azure/core-paging@^1.1.1": - version "1.5.0" - resolved "https://registry.yarnpkg.com/@azure/core-paging/-/core-paging-1.5.0.tgz#5a5b09353e636072e6a7fc38f7879e11d0afb15f" - integrity sha512-zqWdVIt+2Z+3wqxEOGzR5hXFZ8MGKK52x4vFLw8n58pR6ZfKRx3EXYTxTaYxYHc/PexPUTyimcTWFJbji9Z6Iw== + version "1.6.2" + resolved "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.2.tgz" + integrity sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA== dependencies: - tslib "^2.2.0" + tslib "^2.6.2" -"@azure/core-rest-pipeline@^1.1.0", "@azure/core-rest-pipeline@^1.2.0", "@azure/core-rest-pipeline@^1.9.1": +"@azure/core-rest-pipeline@^1.1.0", "@azure/core-rest-pipeline@^1.10.1", "@azure/core-rest-pipeline@^1.2.0", "@azure/core-rest-pipeline@^1.3.0", "@azure/core-rest-pipeline@^1.9.1": version "1.13.0" - resolved "https://registry.yarnpkg.com/@azure/core-rest-pipeline/-/core-rest-pipeline-1.13.0.tgz#770b003c351b4869e3f1c85800bacb947c98cd33" + resolved "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.13.0.tgz" integrity sha512-a62aP/wppgmnfIkJLfcB4ssPBcH94WzrzPVJ3tlJt050zX4lfmtnvy95D3igDo3f31StO+9BgPrzvkj4aOxnoA== dependencies: "@azure/abort-controller" "^1.1.0" @@ -880,48 +838,32 @@ https-proxy-agent "^5.0.0" tslib "^2.2.0" -"@azure/core-tracing@1.0.0-preview.13": - version "1.0.0-preview.13" - resolved "https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz#55883d40ae2042f6f1e12b17dd0c0d34c536d644" - integrity sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ== - dependencies: - "@opentelemetry/api" "^1.0.1" - tslib "^2.2.0" - -"@azure/core-tracing@^1.0.0", "@azure/core-tracing@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.1.tgz#352a38cbea438c4a83c86b314f48017d70ba9503" - integrity sha512-I5CGMoLtX+pI17ZdiFJZgxMJApsK6jjfm85hpgp3oazCdq5Wxgh4wMr7ge/TTWW1B5WBuvIOI1fMU/FrOAMKrw== +"@azure/core-tracing@^1.0.0", "@azure/core-tracing@^1.0.1", "@azure/core-tracing@^1.1.2": + version "1.2.0" + resolved "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.2.0.tgz" + integrity sha512-UKTiEJPkWcESPYJz3X5uKRYyOcJD+4nYph+KpfdPRnQJVrZfk0KJgdnaAWKfhsBBtAf/D58Az4AvCJEmWgIBAg== dependencies: - tslib "^2.2.0" + tslib "^2.6.2" -"@azure/core-util@^1.0.0", "@azure/core-util@^1.3.0": +"@azure/core-util@^1.0.0", "@azure/core-util@^1.1.0", "@azure/core-util@^1.2.0", "@azure/core-util@^1.3.0", "@azure/core-util@^1.6.1": version "1.6.1" - resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.6.1.tgz#fea221c4fa43c26543bccf799beb30c1c7878f5a" + resolved "https://registry.npmjs.org/@azure/core-util/-/core-util-1.6.1.tgz" integrity sha512-h5taHeySlsV9qxuK64KZxy4iln1BtMYlNt5jbuEFN3UFSAd1EwKg/Gjl5a6tZ/W8t6li3xPnutOx7zbDyXnPmQ== dependencies: "@azure/abort-controller" "^1.0.0" tslib "^2.2.0" -"@azure/core-util@^1.1.0": - version "1.8.1" - resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.8.1.tgz#4a14ddb338dc1acf2ea7628b5b1cccdb5b6fbfbf" - integrity sha512-L3voj0StUdJ+YKomvwnTv7gHzguJO+a6h30pmmZdRprJCM+RJlGMPxzuh4R7lhQu1jNmEtaHX5wvTgWLDAmbGQ== +"@azure/core-xml@^1.4.3": + version "1.4.4" + resolved "https://registry.npmjs.org/@azure/core-xml/-/core-xml-1.4.4.tgz" + integrity sha512-J4FYAqakGXcbfeZjwjMzjNcpcH4E+JtEBv+xcV1yL0Ydn/6wbQfeFKTCHh9wttAi0lmajHw7yBbHPRG+YHckZQ== dependencies: - "@azure/abort-controller" "^2.0.0" + fast-xml-parser "^4.4.1" tslib "^2.6.2" -"@azure/core-util@^1.1.1", "@azure/core-util@^1.2.0": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.3.2.tgz#3f8cfda1e87fac0ce84f8c1a42fcd6d2a986632d" - integrity sha512-2bECOUh88RvL1pMZTcc6OzfobBeWDBf5oBbhjIhT1MV9otMVWCzpOJkkiKtrnO88y5GGBelgY8At73KGAdbkeQ== - dependencies: - "@azure/abort-controller" "^1.0.0" - tslib "^2.2.0" - "@azure/cosmos@^4.0.0": version "4.0.0" - resolved "https://registry.yarnpkg.com/@azure/cosmos/-/cosmos-4.0.0.tgz#5fda8b35cb62bbcda52159b96c4c3981a843d5b9" + resolved "https://registry.npmjs.org/@azure/cosmos/-/cosmos-4.0.0.tgz" integrity sha512-/Z27p1+FTkmjmm8jk90zi/HrczPHw2t8WecFnsnTe4xGocWl0Z4clP0YlLUTJPhRLWYa5upwD9rMvKJkS1f1kg== dependencies: "@azure/abort-controller" "^1.0.0" @@ -940,7 +882,7 @@ "@azure/identity@^4.0.0": version "4.0.0" - resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-4.0.0.tgz#87d4c7b745f0855b83993518f1c7782dfb258bba" + resolved "https://registry.npmjs.org/@azure/identity/-/identity-4.0.0.tgz" integrity sha512-gtPYxIL0kI39Dw4t3HvlbfhOdXqKD2MqDgynlklF0j728j51dcKgRo6FLX0QzpBw/1gGfLxjMXqq3nKOSQ2lmA== dependencies: "@azure/abort-controller" "^1.0.0" @@ -960,14 +902,14 @@ "@azure/logger@^1.0.0": version "1.0.4" - resolved "https://registry.yarnpkg.com/@azure/logger/-/logger-1.0.4.tgz#28bc6d0e5b3c38ef29296b32d35da4e483593fa1" + resolved "https://registry.npmjs.org/@azure/logger/-/logger-1.0.4.tgz" integrity sha512-ustrPY8MryhloQj7OWGe+HrYx+aoiOxzbXTtgblbV3xwCqpzUK36phH3XNHQKj3EPonyFUuDTfR3qFhTEAuZEg== dependencies: tslib "^2.2.0" "@azure/ms-rest-js@^2.6.0": version "2.7.0" - resolved "https://registry.yarnpkg.com/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz#8639065577ffdf4946951e1d246334ebfd72d537" + resolved "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz" integrity sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA== dependencies: "@azure/core-auth" "^1.1.4" @@ -981,385 +923,350 @@ "@azure/msal-browser@^3.5.0": version "3.6.0" - resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-3.6.0.tgz#06ad33c82af4f24f2fcf8a8a61a3a1cbe3dd7fad" + resolved "https://registry.npmjs.org/@azure/msal-browser/-/msal-browser-3.6.0.tgz" integrity sha512-FrFBJXRJMyWXjAjg4cUNZwEKktzfzD/YD9+S1kj2ors67hKoveam4aL0bZuCZU/jTiHTn0xDQGQh2ksCMXTXtA== dependencies: "@azure/msal-common" "14.5.0" "@azure/msal-common@14.5.0": version "14.5.0" - resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-14.5.0.tgz#5a891e5f8eaf23f598bdb4e285e938fb606cd716" + resolved "https://registry.npmjs.org/@azure/msal-common/-/msal-common-14.5.0.tgz" integrity sha512-Gx5rZbiZV/HiZ2nEKfjfAF/qDdZ4/QWxMvMo2jhIFVz528dVKtaZyFAOtsX2Ak8+TQvRsGCaEfuwJFuXB6tu1A== "@azure/msal-node@^2.5.1": version "2.6.0" - resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-2.6.0.tgz#44bad20a1523c92d79de24fb225da86abaeedb6c" + resolved "https://registry.npmjs.org/@azure/msal-node/-/msal-node-2.6.0.tgz" integrity sha512-RWAWCYYrSldIYC47oWtofIun41e6SB9TBYgGYsezq6ednagwo9ZRFyRsvl1NabmdTkdDDXRAABIdveeN2Gtd8w== dependencies: "@azure/msal-common" "14.5.0" jsonwebtoken "^9.0.0" uuid "^8.3.0" -"@azure/storage-blob@^12.13.0": - version "12.14.0" - resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.14.0.tgz#32d3e5fa3bb2a12d5d44b186aed11c8e78f00178" - integrity sha512-g8GNUDpMisGXzBeD+sKphhH5yLwesB4JkHr1U6be/X3F+cAMcyGLPD1P89g2M7wbEtUJWoikry1rlr83nNRBzg== - dependencies: - "@azure/abort-controller" "^1.0.0" - "@azure/core-http" "^3.0.0" - "@azure/core-lro" "^2.2.0" - "@azure/core-paging" "^1.1.1" - "@azure/core-tracing" "1.0.0-preview.13" - "@azure/logger" "^1.0.0" - events "^3.0.0" - tslib "^2.2.0" - -"@azure/storage-blob@^12.17.0": - version "12.17.0" - resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.17.0.tgz#04aad7f59cb08dbbe5b1b672a9f5b6256c8c9006" - integrity sha512-sM4vpsCpcCApagRW5UIjQNlNylo02my2opgp0Emi8x888hZUvJ3dN69Oq20cEGXkMUWnoCrBaB0zyS3yeB87sQ== +"@azure/storage-blob@^12.13.0", "@azure/storage-blob@^12.17.0": + version "12.25.0" + resolved "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.25.0.tgz" + integrity sha512-oodouhA3nCCIh843tMMbxty3WqfNT+Vgzj3Xo5jqR9UPnzq3d7mzLjlHAYz7lW+b4km3SIgz+NAgztvhm7Z6kQ== dependencies: - "@azure/abort-controller" "^1.0.0" - "@azure/core-http" "^3.0.0" + "@azure/abort-controller" "^2.1.2" + "@azure/core-auth" "^1.4.0" + "@azure/core-client" "^1.6.2" + "@azure/core-http-compat" "^2.0.0" "@azure/core-lro" "^2.2.0" "@azure/core-paging" "^1.1.1" - "@azure/core-tracing" "1.0.0-preview.13" + "@azure/core-rest-pipeline" "^1.10.1" + "@azure/core-tracing" "^1.1.2" + "@azure/core-util" "^1.6.1" + "@azure/core-xml" "^1.4.3" "@azure/logger" "^1.0.0" events "^3.0.0" tslib "^2.2.0" -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.5.tgz#234d98e1551960604f1246e6475891a570ad5658" - integrity sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ== +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.25.9", "@babel/code-frame@^7.26.0": + version "7.26.2" + resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz" + integrity sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ== dependencies: - "@babel/highlight" "^7.22.5" + "@babel/helper-validator-identifier" "^7.25.9" + js-tokens "^4.0.0" + picocolors "^1.0.0" -"@babel/compat-data@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.5.tgz#b1f6c86a02d85d2dd3368a2b67c09add8cd0c255" - integrity sha512-4Jc/YuIaYqKnDDz892kPIledykKg12Aw1PYX5i/TY28anJtacvM1Rrr8wbieB9GfEJwlzqT0hUEao0CxEebiDA== +"@babel/compat-data@^7.25.9": + version "7.26.2" + resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.2.tgz" + integrity sha512-Z0WgzSEa+aUcdiJuCIqgujCshpMWgUpgOxXotrYPSA53hA3qopNaqcJpyr0hVb1FeWdnqFA35/fUtXgBK8srQg== -"@babel/core@^7.11.6", "@babel/core@^7.12.3": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.5.tgz#d67d9747ecf26ee7ecd3ebae1ee22225fe902a89" - integrity sha512-SBuTAjg91A3eKOvD+bPEz3LlhHZRNu1nFOVts9lzDJTXshHTjII0BAtDS3Y2DAkdZdDKWVZGVwkDfc4Clxn1dg== +"@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.23.9": + version "7.26.0" + resolved "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz" + integrity sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg== dependencies: "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.22.5" - "@babel/generator" "^7.22.5" - "@babel/helper-compilation-targets" "^7.22.5" - "@babel/helper-module-transforms" "^7.22.5" - "@babel/helpers" "^7.22.5" - "@babel/parser" "^7.22.5" - "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.5" - "@babel/types" "^7.22.5" - convert-source-map "^1.7.0" + "@babel/code-frame" "^7.26.0" + "@babel/generator" "^7.26.0" + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-module-transforms" "^7.26.0" + "@babel/helpers" "^7.26.0" + "@babel/parser" "^7.26.0" + "@babel/template" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.26.0" + convert-source-map "^2.0.0" debug "^4.1.0" gensync "^1.0.0-beta.2" - json5 "^2.2.2" - semver "^6.3.0" + json5 "^2.2.3" + semver "^6.3.1" + +"@babel/generator@^7.25.9", "@babel/generator@^7.26.0", "@babel/generator@^7.7.2": + version "7.26.2" + resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.26.2.tgz" + integrity sha512-zevQbhbau95nkoxSq3f/DC/SC+EEOUZd3DYqfSkMhY2/wfSeaHV1Ew4vk8e+x8lja31IbyuUa2uQ3JONqKbysw== + dependencies: + "@babel/parser" "^7.26.2" + "@babel/types" "^7.26.0" + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + jsesc "^3.0.2" + +"@babel/helper-compilation-targets@^7.25.9": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.9.tgz" + integrity sha512-j9Db8Suy6yV/VHa4qzrj9yZfZxhLWQdVnRlXxmKLYlhWUVB1sB2G5sxuWYXk/whHD9iW76PmNzxZ4UCnTQTVEQ== + dependencies: + "@babel/compat-data" "^7.25.9" + "@babel/helper-validator-option" "^7.25.9" + browserslist "^4.24.0" + lru-cache "^5.1.1" + semver "^6.3.1" -"@babel/generator@^7.22.5", "@babel/generator@^7.7.2": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.22.5.tgz#1e7bf768688acfb05cf30b2369ef855e82d984f7" - integrity sha512-+lcUbnTRhd0jOewtFSedLyiPsD5tswKkbgcezOqqWFUVNEwoUTlpPOBmvhG7OXWLR4jMdv0czPGH5XbflnD1EA== +"@babel/helper-module-imports@^7.25.9": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz" + integrity sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw== dependencies: - "@babel/types" "^7.22.5" - "@jridgewell/gen-mapping" "^0.3.2" - "@jridgewell/trace-mapping" "^0.3.17" - jsesc "^2.5.1" + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" -"@babel/helper-compilation-targets@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.5.tgz#fc7319fc54c5e2fa14b2909cf3c5fd3046813e02" - integrity sha512-Ji+ywpHeuqxB8WDxraCiqR0xfhYjiDE/e6k7FuIaANnoOFxAHskHChz4vA1mJC9Lbm01s1PVAGhQY4FUKSkGZw== +"@babel/helper-module-transforms@^7.26.0": + version "7.26.0" + resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz" + integrity sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw== dependencies: - "@babel/compat-data" "^7.22.5" - "@babel/helper-validator-option" "^7.22.5" - browserslist "^4.21.3" - lru-cache "^5.1.1" - semver "^6.3.0" + "@babel/helper-module-imports" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" + "@babel/traverse" "^7.25.9" -"@babel/helper-environment-visitor@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" - integrity sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q== - -"@babel/helper-function-name@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz#ede300828905bb15e582c037162f99d5183af1be" - integrity sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ== - dependencies: - "@babel/template" "^7.22.5" - "@babel/types" "^7.22.5" - -"@babel/helper-hoist-variables@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" - integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-module-imports@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz#1a8f4c9f4027d23f520bd76b364d44434a72660c" - integrity sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-module-transforms@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz#0f65daa0716961b6e96b164034e737f60a80d2ef" - integrity sha512-+hGKDt/Ze8GFExiVHno/2dvG5IdstpzCq0y4Qc9OJ25D4q3pKfiIP/4Vp3/JvhDkLKsDK2api3q3fpIgiIF5bw== - dependencies: - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-module-imports" "^7.22.5" - "@babel/helper-simple-access" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.5" - "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.5" - "@babel/types" "^7.22.5" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" - integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== - -"@babel/helper-simple-access@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" - integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-split-export-declaration@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.5.tgz#88cf11050edb95ed08d596f7a044462189127a08" - integrity sha512-thqK5QFghPKWLhAV321lxF95yCg2K3Ob5yw+M3VHWfdia0IkPXUtoLH8x/6Fh486QUvzhb8YOWHChTVen2/PoQ== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-string-parser@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" - integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== - -"@babel/helper-validator-identifier@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" - integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== - -"@babel/helper-validator-option@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" - integrity sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw== - -"@babel/helpers@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.22.5.tgz#74bb4373eb390d1ceed74a15ef97767e63120820" - integrity sha512-pSXRmfE1vzcUIDFQcSGA5Mr+GxBV9oiRKDuDxXvWQQBCh8HoIjs/2DlDB7H8smac1IVrB9/xdXj2N3Wol9Cr+Q== - dependencies: - "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.5" - "@babel/types" "^7.22.5" - -"@babel/highlight@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.5.tgz#aa6c05c5407a67ebce408162b7ede789b4d22031" - integrity sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw== - dependencies: - "@babel/helper-validator-identifier" "^7.22.5" - chalk "^2.0.0" - js-tokens "^4.0.0" +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.25.9", "@babel/helper-plugin-utils@^7.8.0": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.25.9.tgz" + integrity sha512-kSMlyUVdWe25rEsRGviIgOWnoT/nfABVWlqt9N19/dIPWViAOW2s9wznP5tURbs/IDuNk4gPy3YdYRgH3uxhBw== + +"@babel/helper-string-parser@^7.25.9": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz" + integrity sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA== + +"@babel/helper-validator-identifier@^7.25.9": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz" + integrity sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ== + +"@babel/helper-validator-option@^7.25.9": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz" + integrity sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw== -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.5.tgz#721fd042f3ce1896238cf1b341c77eb7dee7dbea" - integrity sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q== +"@babel/helpers@^7.26.0": + version "7.26.0" + resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.0.tgz" + integrity sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw== + dependencies: + "@babel/template" "^7.25.9" + "@babel/types" "^7.26.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.23.9", "@babel/parser@^7.25.9", "@babel/parser@^7.26.0", "@babel/parser@^7.26.2": + version "7.26.2" + resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.26.2.tgz" + integrity sha512-DWMCZH9WA4Maitz2q21SRKHo9QXZxkDsbNZoVD62gusNtNBBqDg9i7uOhASfTfIGNzW+O+r7+jAlM8dwphcJKQ== + dependencies: + "@babel/types" "^7.26.0" "@babel/plugin-syntax-async-generators@^7.8.4": version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz" integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-bigint@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz" integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-class-properties@^7.8.3": +"@babel/plugin-syntax-class-properties@^7.12.13": version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz" integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-syntax-import-meta@^7.8.3": +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-import-attributes@^7.24.7": + version "7.26.0" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.26.0.tgz" + integrity sha512-e2dttdsJ1ZTpi3B9UYGLw41hifAubg19AtCu/2I/F1QNVclOBr1dYpTdmdyZ84Xiz43BS/tCUkMAZNLv12Pi+A== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/plugin-syntax-import-meta@^7.10.4": version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz" integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-json-strings@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz" integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-jsx@^7.7.2": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz#a6b68e84fb76e759fc3b93e901876ffabbe1d918" - integrity sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg== + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz" + integrity sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4": version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz" integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz" integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-numeric-separator@^7.8.3": +"@babel/plugin-syntax-numeric-separator@^7.10.4": version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz" integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-object-rest-spread@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz" integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-optional-catch-binding@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz" integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-optional-chaining@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz" integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-top-level-await@^7.8.3": +"@babel/plugin-syntax-private-property-in-object@^7.14.5": version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5": + version "7.14.5" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz" integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== dependencies: "@babel/helper-plugin-utils" "^7.14.5" "@babel/plugin-syntax-typescript@^7.7.2": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz#aac8d383b062c5072c647a31ef990c1d0af90272" - integrity sha512-1mS2o03i7t1c6VzH6fdQ3OA8tcEIxwG18zIPRp+UY1Ihv6W+XZzBCVxExF9upussPXJ0xE9XRHwMoNs1ep/nRQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/template@^7.22.5", "@babel/template@^7.3.3": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.5.tgz#0c8c4d944509875849bd0344ff0050756eefc6ec" - integrity sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw== - dependencies: - "@babel/code-frame" "^7.22.5" - "@babel/parser" "^7.22.5" - "@babel/types" "^7.22.5" - -"@babel/traverse@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.5.tgz#44bd276690db6f4940fdb84e1cb4abd2f729ccd1" - integrity sha512-7DuIjPgERaNo6r+PZwItpjCZEa5vyw4eJGufeLxrPdBXBoLcCJCIasvK6pK/9DVNrLZTLFhUGqaC6X/PA007TQ== - dependencies: - "@babel/code-frame" "^7.22.5" - "@babel/generator" "^7.22.5" - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-function-name" "^7.22.5" - "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.5" - "@babel/parser" "^7.22.5" - "@babel/types" "^7.22.5" - debug "^4.1.0" + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz" + integrity sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/template@^7.25.9", "@babel/template@^7.3.3": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz" + integrity sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg== + dependencies: + "@babel/code-frame" "^7.25.9" + "@babel/parser" "^7.25.9" + "@babel/types" "^7.25.9" + +"@babel/traverse@^7.25.9": + version "7.25.9" + resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.9.tgz" + integrity sha512-ZCuvfwOwlz/bawvAuvcj8rrithP2/N55Tzz342AkTvq4qaWbGfmCk/tKhNaV2cthijKrPAA8SRJV5WWe7IBMJw== + dependencies: + "@babel/code-frame" "^7.25.9" + "@babel/generator" "^7.25.9" + "@babel/parser" "^7.25.9" + "@babel/template" "^7.25.9" + "@babel/types" "^7.25.9" + debug "^4.3.1" globals "^11.1.0" -"@babel/types@^7.0.0", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.3": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.5.tgz#cd93eeaab025880a3a47ec881f4b096a5b786fbe" - integrity sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA== +"@babel/types@^7.0.0", "@babel/types@^7.20.7", "@babel/types@^7.25.9", "@babel/types@^7.26.0", "@babel/types@^7.3.3": + version "7.26.0" + resolved "https://registry.npmjs.org/@babel/types/-/types-7.26.0.tgz" + integrity sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA== dependencies: - "@babel/helper-string-parser" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.5" - to-fast-properties "^2.0.0" + "@babel/helper-string-parser" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" "@bcoe/v8-coverage@^0.2.3": version "0.2.3" - resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + resolved "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== "@bundled-es-modules/cookie@^2.0.0": version "2.0.0" - resolved "https://registry.yarnpkg.com/@bundled-es-modules/cookie/-/cookie-2.0.0.tgz#c3b82703969a61cf6a46e959a012b2c257f6b164" + resolved "https://registry.npmjs.org/@bundled-es-modules/cookie/-/cookie-2.0.0.tgz" integrity sha512-Or6YHg/kamKHpxULAdSqhGqnWFneIXu1NKvvfBBzKGwpVsYuFIQ5aBPHDnnoR3ghW1nvSkALd+EF9iMtY7Vjxw== dependencies: cookie "^0.5.0" "@bundled-es-modules/js-levenshtein@^2.0.1": version "2.0.1" - resolved "https://registry.yarnpkg.com/@bundled-es-modules/js-levenshtein/-/js-levenshtein-2.0.1.tgz#b02bbbd546358ab77080a430f0911cfc2b3779c4" + resolved "https://registry.npmjs.org/@bundled-es-modules/js-levenshtein/-/js-levenshtein-2.0.1.tgz" integrity sha512-DERMS3yfbAljKsQc0U2wcqGKUWpdFjwqWuoMugEJlqBnKO180/n+4SR/J8MRDt1AN48X1ovgoD9KrdVXcaa3Rg== dependencies: js-levenshtein "^1.1.6" "@bundled-es-modules/statuses@^1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@bundled-es-modules/statuses/-/statuses-1.0.1.tgz#761d10f44e51a94902c4da48675b71a76cc98872" + resolved "https://registry.npmjs.org/@bundled-es-modules/statuses/-/statuses-1.0.1.tgz" integrity sha512-yn7BklA5acgcBr+7w064fGV+SGIFySjCKpqjcWgBAIfrAkY+4GQTJJHQMeT3V/sgz23VTEVV8TtOmkvJAhFVfg== dependencies: statuses "^2.0.1" "@cspotcode/source-map-support@^0.8.0": version "0.8.1" - resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + resolved "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz" integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== dependencies: "@jridgewell/trace-mapping" "0.3.9" "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" - resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" + resolved "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz" integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== dependencies: eslint-visitor-keys "^3.3.0" "@eslint-community/regexpp@^4.5.1", "@eslint-community/regexpp@^4.6.1": version "4.10.0" - resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.10.0.tgz#548f6de556857c8bb73bbee70c35dc82a2e74d63" + resolved "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz" integrity sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA== "@eslint/eslintrc@^2.1.4": version "2.1.4" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.4.tgz#388a269f0f25c1b6adc317b5a2c55714894c70ad" + resolved "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz" integrity sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ== dependencies: ajv "^6.12.4" @@ -1374,17 +1281,85 @@ "@eslint/js@8.56.0": version "8.56.0" - resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.56.0.tgz#ef20350fec605a7f7035a01764731b2de0f3782b" + resolved "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz" integrity sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A== "@fastify/busboy@^2.0.0": - version "2.1.0" - resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.0.tgz#0709e9f4cb252351c609c6e6d8d6779a8d25edff" - integrity sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA== + version "2.1.1" + resolved "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz" + integrity sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA== + +"@google-cloud/firestore@^7.10.0": + version "7.10.0" + resolved "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.10.0.tgz" + integrity sha512-VFNhdHvfnmqcHHs6YhmSNHHxQqaaD64GwiL0c+e1qz85S8SWZPC2XFRf8p9yHRTF40Kow424s1KBU9f0fdQa+Q== + dependencies: + "@opentelemetry/api" "^1.3.0" + fast-deep-equal "^3.1.1" + functional-red-black-tree "^1.0.1" + google-gax "^4.3.3" + protobufjs "^7.2.6" + +"@google-cloud/paginator@^5.0.0": + version "5.0.2" + resolved "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz" + integrity sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg== + dependencies: + arrify "^2.0.0" + extend "^3.0.2" + +"@google-cloud/projectify@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz" + integrity sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA== + +"@google-cloud/promisify@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz" + integrity sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g== + +"@google-cloud/storage@^7.14.0": + version "7.14.0" + resolved "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.14.0.tgz" + integrity sha512-H41bPL2cMfSi4EEnFzKvg7XSb7T67ocSXrmF7MPjfgFB0L6CKGzfIYJheAZi1iqXjz6XaCT1OBf6HCG5vDBTOQ== + dependencies: + "@google-cloud/paginator" "^5.0.0" + "@google-cloud/projectify" "^4.0.0" + "@google-cloud/promisify" "^4.0.0" + abort-controller "^3.0.0" + async-retry "^1.3.3" + duplexify "^4.1.3" + fast-xml-parser "^4.4.1" + gaxios "^6.0.2" + google-auth-library "^9.6.3" + html-entities "^2.5.2" + mime "^3.0.0" + p-limit "^3.0.1" + retry-request "^7.0.0" + teeny-request "^9.0.0" + uuid "^8.0.0" + +"@grpc/grpc-js@^1.10.9": + version "1.12.2" + resolved "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.2.tgz" + integrity sha512-bgxdZmgTrJZX50OjyVwz3+mNEnCTNkh3cIqGPWVNeW9jX6bn1ZkU80uPd+67/ZpIJIjRQ9qaHCjhavyoWYxumg== + dependencies: + "@grpc/proto-loader" "^0.7.13" + "@js-sdsl/ordered-map" "^4.4.2" + +"@grpc/proto-loader@^0.7.13": + version "0.7.13" + resolved "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz" + integrity sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw== + dependencies: + lodash.camelcase "^4.3.0" + long "^5.0.0" + protobufjs "^7.2.5" + yargs "^17.7.2" "@humanwhocodes/config-array@^0.11.13": version "0.11.13" - resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.13.tgz#075dc9684f40a531d9b26b0822153c1e832ee297" + resolved "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.13.tgz" integrity sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ== dependencies: "@humanwhocodes/object-schema" "^2.0.1" @@ -1393,17 +1368,17 @@ "@humanwhocodes/module-importer@^1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + resolved "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz" integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== "@humanwhocodes/object-schema@^2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz#e5211452df060fa8522b55c7b3c0c4d1981cb044" - integrity sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw== + version "2.0.3" + resolved "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz" + integrity sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA== "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + resolved "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz" integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== dependencies: camelcase "^5.3.1" @@ -1412,14 +1387,14 @@ js-yaml "^3.13.1" resolve-from "^5.0.0" -"@istanbuljs/schema@^0.1.2": +"@istanbuljs/schema@^0.1.2", "@istanbuljs/schema@^0.1.3": version "0.1.3" - resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + resolved "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz" integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== "@jest/console@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-29.7.0.tgz#cd4822dbdb84529265c5a2bdb529a3c9cc950ffc" + resolved "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz" integrity sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg== dependencies: "@jest/types" "^29.6.3" @@ -1431,7 +1406,7 @@ "@jest/core@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/core/-/core-29.7.0.tgz#b6cccc239f30ff36609658c5a5e2291757ce448f" + resolved "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz" integrity sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg== dependencies: "@jest/console" "^29.7.0" @@ -1465,7 +1440,7 @@ "@jest/environment@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-29.7.0.tgz#24d61f54ff1f786f3cd4073b4b94416383baf2a7" + resolved "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz" integrity sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw== dependencies: "@jest/fake-timers" "^29.7.0" @@ -1473,23 +1448,16 @@ "@types/node" "*" jest-mock "^29.7.0" -"@jest/expect-utils@^29.5.0": - version "29.5.0" - resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.5.0.tgz#f74fad6b6e20f924582dc8ecbf2cb800fe43a036" - integrity sha512-fmKzsidoXQT2KwnrwE0SQq3uj8Z763vzR8LnLBwC2qYWEFpjX8daRsk6rHUM1QvNlEW/UJXNXm59ztmJJWs2Mg== - dependencies: - jest-get-type "^29.4.3" - "@jest/expect-utils@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.7.0.tgz#023efe5d26a8a70f21677d0a1afc0f0a44e3a1c6" + resolved "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz" integrity sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA== dependencies: jest-get-type "^29.6.3" "@jest/expect@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/expect/-/expect-29.7.0.tgz#76a3edb0cb753b70dfbfe23283510d3d45432bf2" + resolved "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz" integrity sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ== dependencies: expect "^29.7.0" @@ -1497,7 +1465,7 @@ "@jest/fake-timers@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-29.7.0.tgz#fd91bf1fffb16d7d0d24a426ab1a47a49881a565" + resolved "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz" integrity sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ== dependencies: "@jest/types" "^29.6.3" @@ -1509,7 +1477,7 @@ "@jest/globals@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-29.7.0.tgz#8d9290f9ec47ff772607fa864ca1d5a2efae1d4d" + resolved "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz" integrity sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ== dependencies: "@jest/environment" "^29.7.0" @@ -1519,7 +1487,7 @@ "@jest/reporters@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-29.7.0.tgz#04b262ecb3b8faa83b0b3d321623972393e8f4c7" + resolved "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz" integrity sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg== dependencies: "@bcoe/v8-coverage" "^0.2.3" @@ -1547,23 +1515,16 @@ strip-ansi "^6.0.0" v8-to-istanbul "^9.0.1" -"@jest/schemas@^29.4.3": - version "29.4.3" - resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.4.3.tgz#39cf1b8469afc40b6f5a2baaa146e332c4151788" - integrity sha512-VLYKXQmtmuEz6IxJsrZwzG9NvtkQsWNnWMsKxqWNu3+CnfzJQhp0WDDKWLVV9hLKr0l3SLLFRqcYHjhtyuDVxg== - dependencies: - "@sinclair/typebox" "^0.25.16" - "@jest/schemas@^29.6.3": version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03" + resolved "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz" integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA== dependencies: "@sinclair/typebox" "^0.27.8" "@jest/source-map@^29.6.3": version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-29.6.3.tgz#d90ba772095cf37a34a5eb9413f1b562a08554c4" + resolved "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz" integrity sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw== dependencies: "@jridgewell/trace-mapping" "^0.3.18" @@ -1572,7 +1533,7 @@ "@jest/test-result@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-29.7.0.tgz#8db9a80aa1a097bb2262572686734baed9b1657c" + resolved "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz" integrity sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA== dependencies: "@jest/console" "^29.7.0" @@ -1582,7 +1543,7 @@ "@jest/test-sequencer@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz#6cef977ce1d39834a3aea887a1726628a6f072ce" + resolved "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz" integrity sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw== dependencies: "@jest/test-result" "^29.7.0" @@ -1592,7 +1553,7 @@ "@jest/transform@^29.7.0": version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-29.7.0.tgz#df2dd9c346c7d7768b8a06639994640c642e284c" + resolved "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz" integrity sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw== dependencies: "@babel/core" "^7.11.6" @@ -1611,21 +1572,9 @@ slash "^3.0.0" write-file-atomic "^4.0.2" -"@jest/types@^29.5.0": - version "29.5.0" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.5.0.tgz#f59ef9b031ced83047c67032700d8c807d6e1593" - integrity sha512-qbu7kN6czmVRc3xWFQcAN03RAUamgppVUdXrvl1Wr3jlNF93o9mJbGcDWrwGB6ht44u7efB1qCFgVQmca24Uog== - dependencies: - "@jest/schemas" "^29.4.3" - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^17.0.8" - chalk "^4.0.0" - "@jest/types@^29.6.3": version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59" + resolved "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz" integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw== dependencies: "@jest/schemas" "^29.6.3" @@ -1635,72 +1584,59 @@ "@types/yargs" "^17.0.8" chalk "^4.0.0" -"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" - integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== +"@jridgewell/gen-mapping@^0.3.5": + version "0.3.5" + resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz" + integrity sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg== dependencies: - "@jridgewell/set-array" "^1.0.1" + "@jridgewell/set-array" "^1.2.1" "@jridgewell/sourcemap-codec" "^1.4.10" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/resolve-uri@3.1.0": - version "3.1.0" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" - integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + "@jridgewell/trace-mapping" "^0.3.24" "@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@^3.1.0": - version "3.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" - integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== - -"@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + version "3.1.2" + resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== -"@jridgewell/sourcemap-codec@1.4.14": - version "1.4.14" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" - integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== "@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": - version "1.4.15" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" - integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== + version "1.5.0" + resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== "@jridgewell/trace-mapping@0.3.9": version "0.3.9" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz" integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== dependencies: "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" -"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.18" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" - integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== - dependencies: - "@jridgewell/resolve-uri" "3.1.0" - "@jridgewell/sourcemap-codec" "1.4.14" - -"@jridgewell/trace-mapping@^0.3.18": - version "0.3.20" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz#72e45707cf240fa6b081d0366f8265b0cd10197f" - integrity sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q== +"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": + version "0.3.25" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== dependencies: "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" +"@js-sdsl/ordered-map@^4.4.2": + version "4.4.2" + resolved "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz" + integrity sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw== + "@mswjs/cookies@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@mswjs/cookies/-/cookies-1.1.0.tgz#1528eb43630caf83a1d75d5332b30e75e9bb1b5b" + resolved "https://registry.npmjs.org/@mswjs/cookies/-/cookies-1.1.0.tgz" integrity sha512-0ZcCVQxifZmhwNBoQIrystCb+2sWBY2Zw8lpfJBPCHGCA/HWqehITeCRVIv4VMy8MPlaHo2w2pTHFV2pFfqKPw== "@mswjs/interceptors@^0.25.13": version "0.25.13" - resolved "https://registry.yarnpkg.com/@mswjs/interceptors/-/interceptors-0.25.13.tgz#c7f8b845b5fdbd8f0f71fbbde06d8a40a9e81b35" + resolved "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.25.13.tgz" integrity sha512-xfjR81WwXPHwhDbqJRHlxYmboJuiSaIKpP4I5TJVFl/EmByOU13jOBT9hmEnxcjR3jvFYoqoNKt7MM9uqerj9A== dependencies: "@open-draft/deferred-promise" "^2.2.0" @@ -1712,7 +1648,7 @@ "@nodelib/fs.scandir@2.1.5": version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== dependencies: "@nodelib/fs.stat" "2.0.5" @@ -1720,12 +1656,12 @@ "@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== "@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== dependencies: "@nodelib/fs.scandir" "2.1.5" @@ -1733,7 +1669,7 @@ "@octokit/app@^14.0.2": version "14.0.2" - resolved "https://registry.yarnpkg.com/@octokit/app/-/app-14.0.2.tgz#b47c52020221351fb58640f113eb38b2ad3998fe" + resolved "https://registry.npmjs.org/@octokit/app/-/app-14.0.2.tgz" integrity sha512-NCSCktSx+XmjuSUVn2dLfqQ9WIYePGP95SDJs4I9cn/0ZkeXcPkaoCLl64Us3dRKL2ozC7hArwze5Eu+/qt1tg== dependencies: "@octokit/auth-app" "^6.0.0" @@ -1745,63 +1681,63 @@ "@octokit/webhooks" "^12.0.4" "@octokit/auth-app@^6.0.0": - version "6.0.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-app/-/auth-app-6.0.1.tgz#7137b1af124189a979de6053da5d4c8cdb1fa4e9" - integrity sha512-tjCD4nzQNZgmLH62+PSnTF6eGerisFgV4v6euhqJik6yWV96e1ZiiGj+NXIqbgnpjLmtnBqVUrNyGKu3DoGEGA== - dependencies: - "@octokit/auth-oauth-app" "^7.0.0" - "@octokit/auth-oauth-user" "^4.0.0" - "@octokit/request" "^8.0.2" - "@octokit/request-error" "^5.0.0" - "@octokit/types" "^12.0.0" + version "6.1.3" + resolved "https://registry.npmjs.org/@octokit/auth-app/-/auth-app-6.1.3.tgz" + integrity sha512-dcaiteA6Y/beAlDLZOPNReN3FGHu+pARD6OHfh3T9f3EO09++ec+5wt3KtGGSSs2Mp5tI8fQwdMOEnrzBLfgUA== + dependencies: + "@octokit/auth-oauth-app" "^7.1.0" + "@octokit/auth-oauth-user" "^4.1.0" + "@octokit/request" "^8.3.1" + "@octokit/request-error" "^5.1.0" + "@octokit/types" "^13.1.0" deprecation "^2.3.1" - lru-cache "^10.0.0" - universal-github-app-jwt "^1.1.1" + lru-cache "npm:@wolfy1339/lru-cache@^11.0.2-patch.1" + universal-github-app-jwt "^1.1.2" universal-user-agent "^6.0.0" -"@octokit/auth-oauth-app@^7.0.0": - version "7.0.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-app/-/auth-oauth-app-7.0.1.tgz#30fd8fcb4608ca52c29c265a3fc7032897796c8e" - integrity sha512-RE0KK0DCjCHXHlQBoubwlLijXEKfhMhKm9gO56xYvFmP1QTMb+vvwRPmQLLx0V+5AvV9N9I3lr1WyTzwL3rMDg== +"@octokit/auth-oauth-app@^7.0.0", "@octokit/auth-oauth-app@^7.1.0": + version "7.1.0" + resolved "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-7.1.0.tgz" + integrity sha512-w+SyJN/b0l/HEb4EOPRudo7uUOSW51jcK1jwLa+4r7PA8FPFpoxEnHBHMITqCsc/3Vo2qqFjgQfz/xUUvsSQnA== dependencies: - "@octokit/auth-oauth-device" "^6.0.0" - "@octokit/auth-oauth-user" "^4.0.0" - "@octokit/request" "^8.0.2" - "@octokit/types" "^12.0.0" + "@octokit/auth-oauth-device" "^6.1.0" + "@octokit/auth-oauth-user" "^4.1.0" + "@octokit/request" "^8.3.1" + "@octokit/types" "^13.0.0" "@types/btoa-lite" "^1.0.0" btoa-lite "^1.0.0" universal-user-agent "^6.0.0" -"@octokit/auth-oauth-device@^6.0.0": - version "6.0.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-device/-/auth-oauth-device-6.0.1.tgz#38e5f7f8997c5e8b774f283463ecf4a7e42d7cee" - integrity sha512-yxU0rkL65QkjbqQedgVx3gmW7YM5fF+r5uaSj9tM/cQGVqloXcqP2xK90eTyYvl29arFVCW8Vz4H/t47mL0ELw== +"@octokit/auth-oauth-device@^6.1.0": + version "6.1.0" + resolved "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-6.1.0.tgz" + integrity sha512-FNQ7cb8kASufd6Ej4gnJ3f1QB5vJitkoV1O0/g6e6lUsQ7+VsSNRHRmFScN2tV4IgKA12frrr/cegUs0t+0/Lw== dependencies: - "@octokit/oauth-methods" "^4.0.0" - "@octokit/request" "^8.0.0" - "@octokit/types" "^12.0.0" + "@octokit/oauth-methods" "^4.1.0" + "@octokit/request" "^8.3.1" + "@octokit/types" "^13.0.0" universal-user-agent "^6.0.0" -"@octokit/auth-oauth-user@^4.0.0": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-user/-/auth-oauth-user-4.0.1.tgz#c8267883935c83f78318c726ff91d7e98de05517" - integrity sha512-N94wWW09d0hleCnrO5wt5MxekatqEJ4zf+1vSe8MKMrhZ7gAXKFOKrDEZW2INltvBWJCyDUELgGRv8gfErH1Iw== +"@octokit/auth-oauth-user@^4.0.0", "@octokit/auth-oauth-user@^4.1.0": + version "4.1.0" + resolved "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-4.1.0.tgz" + integrity sha512-FrEp8mtFuS/BrJyjpur+4GARteUCrPeR/tZJzD8YourzoVhRics7u7we/aDcKv+yywRNwNi/P4fRi631rG/OyQ== dependencies: - "@octokit/auth-oauth-device" "^6.0.0" - "@octokit/oauth-methods" "^4.0.0" - "@octokit/request" "^8.0.2" - "@octokit/types" "^12.0.0" + "@octokit/auth-oauth-device" "^6.1.0" + "@octokit/oauth-methods" "^4.1.0" + "@octokit/request" "^8.3.1" + "@octokit/types" "^13.0.0" btoa-lite "^1.0.0" universal-user-agent "^6.0.0" "@octokit/auth-token@^4.0.0": version "4.0.0" - resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-4.0.0.tgz#40d203ea827b9f17f42a29c6afb93b7745ef80c7" + resolved "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz" integrity sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA== "@octokit/auth-unauthenticated@^5.0.0": version "5.0.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-unauthenticated/-/auth-unauthenticated-5.0.1.tgz#d8032211728333068b2e07b53997c29e59a03507" + resolved "https://registry.npmjs.org/@octokit/auth-unauthenticated/-/auth-unauthenticated-5.0.1.tgz" integrity sha512-oxeWzmBFxWd+XolxKTc4zr+h3mt+yofn4r7OfoIkR/Cj/o70eEGmPsFbueyJE2iBAGpjgTnEOKM3pnuEGVmiqg== dependencies: "@octokit/request-error" "^5.0.0" @@ -1809,7 +1745,7 @@ "@octokit/core@^5.0.0", "@octokit/core@^5.0.1": version "5.0.2" - resolved "https://registry.yarnpkg.com/@octokit/core/-/core-5.0.2.tgz#ae7c5d61fdd98ba348a27c3cc510879a130b1234" + resolved "https://registry.npmjs.org/@octokit/core/-/core-5.0.2.tgz" integrity sha512-cZUy1gUvd4vttMic7C0lwPed8IYXWYp8kHIMatyhY8t8n3Cpw2ILczkV5pGMPqef7v0bLo0pOHrEHarsau2Ydg== dependencies: "@octokit/auth-token" "^4.0.0" @@ -1820,27 +1756,27 @@ before-after-hook "^2.2.0" universal-user-agent "^6.0.0" -"@octokit/endpoint@^9.0.0": - version "9.0.4" - resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-9.0.4.tgz#8afda5ad1ffc3073d08f2b450964c610b821d1ea" - integrity sha512-DWPLtr1Kz3tv8L0UvXTDP1fNwM0S+z6EJpRcvH66orY6Eld4XBMCSYsaWp4xIm61jTWxK68BrR7ibO+vSDnZqw== +"@octokit/endpoint@^9.0.0", "@octokit/endpoint@^9.0.1": + version "9.0.5" + resolved "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.5.tgz" + integrity sha512-ekqR4/+PCLkEBF6qgj8WqJfvDq65RH85OAgrtnVp1mSxaXF03u2xW/hUdweGS5654IlC0wkNYC18Z50tSYTAFw== dependencies: - "@octokit/types" "^12.0.0" + "@octokit/types" "^13.1.0" universal-user-agent "^6.0.0" "@octokit/graphql@^7.0.0": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-7.0.2.tgz#3df14b9968192f9060d94ed9e3aa9780a76e7f99" - integrity sha512-OJ2iGMtj5Tg3s6RaXH22cJcxXRi7Y3EBqbHTBRq+PQAqfaS8f/236fUrWhfSn8P4jovyzqucxme7/vWSSZBX2Q== + version "7.1.0" + resolved "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.0.tgz" + integrity sha512-r+oZUH7aMFui1ypZnAvZmn0KSqAUgE1/tUXIWaqUCa1758ts/Jio84GZuzsvUkme98kv0WFY8//n0J1Z+vsIsQ== dependencies: - "@octokit/request" "^8.0.1" - "@octokit/types" "^12.0.0" + "@octokit/request" "^8.3.0" + "@octokit/types" "^13.0.0" universal-user-agent "^6.0.0" "@octokit/oauth-app@^6.0.0": - version "6.0.0" - resolved "https://registry.yarnpkg.com/@octokit/oauth-app/-/oauth-app-6.0.0.tgz#a5c3b7794df4280c6aadbadd843119059d70a2c4" - integrity sha512-bNMkS+vJ6oz2hCyraT9ZfTpAQ8dZNqJJQVNaKjPLx4ue5RZiFdU1YWXguOPR8AaSHS+lKe+lR3abn2siGd+zow== + version "6.1.0" + resolved "https://registry.npmjs.org/@octokit/oauth-app/-/oauth-app-6.1.0.tgz" + integrity sha512-nIn/8eUJ/BKUVzxUXd5vpzl1rwaVxMyYbQkNZjHrF7Vk/yu98/YDF/N2KeWO7uZ0g3b5EyiFXFkZI8rJ+DH1/g== dependencies: "@octokit/auth-oauth-app" "^7.0.0" "@octokit/auth-oauth-user" "^4.0.0" @@ -1853,47 +1789,52 @@ "@octokit/oauth-authorization-url@^6.0.2": version "6.0.2" - resolved "https://registry.yarnpkg.com/@octokit/oauth-authorization-url/-/oauth-authorization-url-6.0.2.tgz#cc82ca29cc5e339c9921672f39f2b3f5c8eb6ef2" + resolved "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-6.0.2.tgz" integrity sha512-CdoJukjXXxqLNK4y/VOiVzQVjibqoj/xHgInekviUJV73y/BSIcwvJ/4aNHPBPKcPWFnd4/lO9uqRV65jXhcLA== -"@octokit/oauth-methods@^4.0.0": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@octokit/oauth-methods/-/oauth-methods-4.0.1.tgz#90d22c662387056307778d7e5c4763ff559636c4" - integrity sha512-1NdTGCoBHyD6J0n2WGXg9+yDLZrRNZ0moTEex/LSPr49m530WNKcCfXDghofYptr3st3eTii+EHoG5k/o+vbtw== +"@octokit/oauth-methods@^4.0.0", "@octokit/oauth-methods@^4.1.0": + version "4.1.0" + resolved "https://registry.npmjs.org/@octokit/oauth-methods/-/oauth-methods-4.1.0.tgz" + integrity sha512-4tuKnCRecJ6CG6gr0XcEXdZtkTDbfbnD5oaHBmLERTjTMZNi2CbfEHZxPU41xXLDG4DfKf+sonu00zvKI9NSbw== dependencies: "@octokit/oauth-authorization-url" "^6.0.2" - "@octokit/request" "^8.0.2" - "@octokit/request-error" "^5.0.0" - "@octokit/types" "^12.0.0" + "@octokit/request" "^8.3.1" + "@octokit/request-error" "^5.1.0" + "@octokit/types" "^13.0.0" btoa-lite "^1.0.0" "@octokit/openapi-types@^19.1.0": version "19.1.0" - resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-19.1.0.tgz#75ec7e64743870fc73e1ab4bc6ec252ecdd624dc" + resolved "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-19.1.0.tgz" integrity sha512-6G+ywGClliGQwRsjvqVYpklIfa7oRPA0vyhPQG/1Feh+B+wU0vGH1JiJ5T25d3g1JZYBHzR2qefLi9x8Gt+cpw== +"@octokit/openapi-types@^22.2.0": + version "22.2.0" + resolved "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz" + integrity sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg== + "@octokit/plugin-paginate-graphql@^4.0.0": version "4.0.0" - resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-4.0.0.tgz#b26024fa454039c18b948f13bf754ff86b89e8b9" + resolved "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-4.0.0.tgz" integrity sha512-7HcYW5tP7/Z6AETAPU14gp5H5KmCPT3hmJrS/5tO7HIgbwenYmgw4OY9Ma54FDySuxMwD+wsJlxtuGWwuZuItA== "@octokit/plugin-paginate-rest@^9.0.0": version "9.1.5" - resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.1.5.tgz#1705bcef4dcde1f4015ee58a63dc61b68648f480" + resolved "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.1.5.tgz" integrity sha512-WKTQXxK+bu49qzwv4qKbMMRXej1DU2gq017euWyKVudA6MldaSSQuxtz+vGbhxV4CjxpUxjZu6rM2wfc1FiWVg== dependencies: "@octokit/types" "^12.4.0" "@octokit/plugin-rest-endpoint-methods@^10.0.0": version "10.2.0" - resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.2.0.tgz#eeaa4de97a2ae26404dea30ce3e17b11928e027c" + resolved "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.2.0.tgz" integrity sha512-ePbgBMYtGoRNXDyKGvr9cyHjQ163PbwD0y1MkDJCpkO2YH4OeXX40c4wYHKikHGZcpGPbcRLuy0unPUuafco8Q== dependencies: "@octokit/types" "^12.3.0" "@octokit/plugin-retry@^6.0.0": version "6.0.1" - resolved "https://registry.yarnpkg.com/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz#3257404f7cc418e1c1f13a7f2012c1db848b7693" + resolved "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz" integrity sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog== dependencies: "@octokit/request-error" "^5.0.0" @@ -1902,7 +1843,7 @@ "@octokit/plugin-throttling@^8.0.0": version "8.1.3" - resolved "https://registry.yarnpkg.com/@octokit/plugin-throttling/-/plugin-throttling-8.1.3.tgz#7fb0e001c0cb9383c6be07740b8ec326ed990f6b" + resolved "https://registry.npmjs.org/@octokit/plugin-throttling/-/plugin-throttling-8.1.3.tgz" integrity sha512-pfyqaqpc0EXh5Cn4HX9lWYsZ4gGbjnSmUILeu4u2gnuM50K/wIk9s1Pxt3lVeVwekmITgN/nJdoh43Ka+vye8A== dependencies: "@octokit/types" "^12.2.0" @@ -1910,16 +1851,25 @@ "@octokit/request-error@^5.0.0": version "5.0.1" - resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-5.0.1.tgz#277e3ce3b540b41525e07ba24c5ef5e868a72db9" + resolved "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.0.1.tgz" integrity sha512-X7pnyTMV7MgtGmiXBwmO6M5kIPrntOXdyKZLigNfQWSEQzVxR4a4vo49vJjTWX70mPndj8KhfT4Dx+2Ng3vnBQ== dependencies: "@octokit/types" "^12.0.0" deprecation "^2.0.0" once "^1.4.0" -"@octokit/request@^8.0.0", "@octokit/request@^8.0.1", "@octokit/request@^8.0.2": +"@octokit/request-error@^5.1.0": + version "5.1.0" + resolved "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz" + integrity sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q== + dependencies: + "@octokit/types" "^13.1.0" + deprecation "^2.0.0" + once "^1.4.0" + +"@octokit/request@^8.0.2": version "8.1.6" - resolved "https://registry.yarnpkg.com/@octokit/request/-/request-8.1.6.tgz#a76a859c30421737a3918b40973c2ff369009571" + resolved "https://registry.npmjs.org/@octokit/request/-/request-8.1.6.tgz" integrity sha512-YhPaGml3ncZC1NfXpP3WZ7iliL1ap6tLkAp6MvbK2fTTPytzVUyUesBBogcdMm86uRYO5rHaM1xIWxigWZ17MQ== dependencies: "@octokit/endpoint" "^9.0.0" @@ -1927,41 +1877,58 @@ "@octokit/types" "^12.0.0" universal-user-agent "^6.0.0" +"@octokit/request@^8.3.0", "@octokit/request@^8.3.1": + version "8.4.0" + resolved "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz" + integrity sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw== + dependencies: + "@octokit/endpoint" "^9.0.1" + "@octokit/request-error" "^5.1.0" + "@octokit/types" "^13.1.0" + universal-user-agent "^6.0.0" + "@octokit/types@^12.0.0", "@octokit/types@^12.2.0", "@octokit/types@^12.3.0", "@octokit/types@^12.4.0": version "12.4.0" - resolved "https://registry.yarnpkg.com/@octokit/types/-/types-12.4.0.tgz#8f97b601e91ce6b9776ed8152217e77a71be7aac" + resolved "https://registry.npmjs.org/@octokit/types/-/types-12.4.0.tgz" integrity sha512-FLWs/AvZllw/AGVs+nJ+ELCDZZJk+kY0zMen118xhL2zD0s1etIUHm1odgjP7epxYU1ln7SZxEUWYop5bhsdgQ== dependencies: "@octokit/openapi-types" "^19.1.0" -"@octokit/webhooks-methods@^4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@octokit/webhooks-methods/-/webhooks-methods-4.0.0.tgz#d1697930ba3d8e6b6d0f8a2c996bb440d2e1df1b" - integrity sha512-M8mwmTXp+VeolOS/kfRvsDdW+IO0qJ8kYodM/sAysk093q6ApgmBXwK1ZlUvAwXVrp/YVHp6aArj4auAxUAOFw== +"@octokit/types@^13.0.0", "@octokit/types@^13.1.0": + version "13.6.1" + resolved "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz" + integrity sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g== + dependencies: + "@octokit/openapi-types" "^22.2.0" -"@octokit/webhooks-types@7.1.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@octokit/webhooks-types/-/webhooks-types-7.1.0.tgz#d533dea253416e02dd6c2bfab25e533295bd5d3f" - integrity sha512-y92CpG4kFFtBBjni8LHoV12IegJ+KFxLgKRengrVjKmGE5XMeCuGvlfRe75lTRrgXaG6XIWJlFpIDTlkoJsU8w== +"@octokit/webhooks-methods@^4.1.0": + version "4.1.0" + resolved "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-4.1.0.tgz" + integrity sha512-zoQyKw8h9STNPqtm28UGOYFE7O6D4Il8VJwhAtMHFt2C4L0VQT1qGKLeefUOqHNs1mNRYSadVv7x0z8U2yyeWQ== + +"@octokit/webhooks-types@7.6.1": + version "7.6.1" + resolved "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.6.1.tgz" + integrity sha512-S8u2cJzklBC0FgTwWVLaM8tMrDuDMVE4xiTK4EYXM9GntyvrdbSoxqDQa+Fh57CCNApyIpyeqPhhFEmHPfrXgw== "@octokit/webhooks@^12.0.4": - version "12.0.10" - resolved "https://registry.yarnpkg.com/@octokit/webhooks/-/webhooks-12.0.10.tgz#3dcd3424ae4ff29b62b8fc8408b08c17b8178ece" - integrity sha512-Q8d26l7gZ3L1SSr25NFbbP0B431sovU5r0tIqcvy8Z4PrD1LBv0cJEjvDLOieouzPSTzSzufzRIeXD7S+zAESA== + version "12.3.1" + resolved "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-12.3.1.tgz" + integrity sha512-BVwtWE3rRXB9IugmQTfKspqjNa8q+ab73ddkV9k1Zok3XbuOxJUi4lTYk5zBZDhfWb/Y2H+RO9Iggm25gsqeow== dependencies: "@octokit/request-error" "^5.0.0" - "@octokit/webhooks-methods" "^4.0.0" - "@octokit/webhooks-types" "7.1.0" + "@octokit/webhooks-methods" "^4.1.0" + "@octokit/webhooks-types" "7.6.1" aggregate-error "^3.1.0" "@open-draft/deferred-promise@^2.2.0": version "2.2.0" - resolved "https://registry.yarnpkg.com/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz#4a822d10f6f0e316be4d67b4d4f8c9a124b073bd" + resolved "https://registry.npmjs.org/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz" integrity sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA== "@open-draft/logger@^0.3.0": version "0.3.0" - resolved "https://registry.yarnpkg.com/@open-draft/logger/-/logger-0.3.0.tgz#2b3ab1242b360aa0adb28b85f5d7da1c133a0954" + resolved "https://registry.npmjs.org/@open-draft/logger/-/logger-0.3.0.tgz" integrity sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ== dependencies: is-node-process "^1.2.0" @@ -1969,72 +1936,120 @@ "@open-draft/until@^2.0.0", "@open-draft/until@^2.1.0": version "2.1.0" - resolved "https://registry.yarnpkg.com/@open-draft/until/-/until-2.1.0.tgz#0acf32f470af2ceaf47f095cdecd40d68666efda" + resolved "https://registry.npmjs.org/@open-draft/until/-/until-2.1.0.tgz" integrity sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg== -"@opentelemetry/api@^1.0.1": - version "1.4.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.4.1.tgz#ff22eb2e5d476fbc2450a196e40dd243cc20c28f" - integrity sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA== +"@opentelemetry/api@^1.3.0": + version "1.9.0" + resolved "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz" + integrity sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg== -"@sinclair/typebox@^0.25.16": - version "0.25.24" - resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.25.24.tgz#8c7688559979f7079aacaf31aa881c3aa410b718" - integrity sha512-XJfwUVUKDHF5ugKwIcxEgc9k8b7HbznCp6eUfWgu710hMPNIO4aw4/zB5RogDQz8nd6gyCDpU9O/m6qYEWY6yQ== +"@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": + version "1.1.2" + resolved "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz" + integrity sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ== + +"@protobufjs/base64@^1.1.2": + version "1.1.2" + resolved "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz" + integrity sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg== + +"@protobufjs/codegen@^2.0.4": + version "2.0.4" + resolved "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz" + integrity sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg== + +"@protobufjs/eventemitter@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz" + integrity sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q== + +"@protobufjs/fetch@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz" + integrity sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ== + dependencies: + "@protobufjs/aspromise" "^1.1.1" + "@protobufjs/inquire" "^1.1.0" + +"@protobufjs/float@^1.0.2": + version "1.0.2" + resolved "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz" + integrity sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ== + +"@protobufjs/inquire@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz" + integrity sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q== + +"@protobufjs/path@^1.1.2": + version "1.1.2" + resolved "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz" + integrity sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA== + +"@protobufjs/pool@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz" + integrity sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw== + +"@protobufjs/utf8@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz" + integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== "@sinclair/typebox@^0.27.8": version "0.27.8" - resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" + resolved "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz" integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== "@sinonjs/commons@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.0.tgz#beb434fe875d965265e04722ccfc21df7f755d72" - integrity sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA== + version "3.0.1" + resolved "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz" + integrity sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ== dependencies: type-detect "4.0.8" "@sinonjs/fake-timers@^10.0.2": - version "10.2.0" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.2.0.tgz#b3e322a34c5f26e3184e7f6115695f299c1b1194" - integrity sha512-OPwQlEdg40HAj5KNF8WW6q2KG4Z+cBCZb3m4ninfTZKaBmbIJodviQsDBoYMPHkOyJJMHnOJo5j2+LKDOhOACg== + version "10.3.0" + resolved "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz" + integrity sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== dependencies: "@sinonjs/commons" "^3.0.0" "@smithy/abort-controller@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/abort-controller/-/abort-controller-1.1.0.tgz#2da0d73c504b93ca8bb83bdc8d6b8208d73f418b" + resolved "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-1.1.0.tgz" integrity sha512-5imgGUlZL4dW4YWdMYAKLmal9ny/tlenM81QZY7xYyb76z9Z/QOg7oM5Ak9HQl8QfFTlGVWwcMXl+54jroRgEQ== dependencies: "@smithy/types" "^1.2.0" tslib "^2.5.0" -"@smithy/abort-controller@^2.0.15": - version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/abort-controller/-/abort-controller-2.0.15.tgz#fcec9193da8b86eef1eedc3e71139a99c061db32" - integrity sha512-JkS36PIS3/UCbq/MaozzV7jECeL+BTt4R75bwY8i+4RASys4xOyUS1HsRyUNSqUXFP4QyCz5aNnh3ltuaxv+pw== +"@smithy/abort-controller@^2.0.15", "@smithy/abort-controller@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-2.2.0.tgz" + integrity sha512-wRlta7GuLWpTqtFfGo+nZyOO1vEvewdNR1R4rTxpC8XU6vG/NDyrFBhwLZsqg1NUoR1noVaXJPC/7ZK47QCySw== dependencies: - "@smithy/types" "^2.7.0" - tslib "^2.5.0" + "@smithy/types" "^2.12.0" + tslib "^2.6.2" "@smithy/chunked-blob-reader-native@^2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-2.0.1.tgz#0599eaed8c2cd15c7ab43a1838cef1258ff27133" - integrity sha512-N2oCZRglhWKm7iMBu7S6wDzXirjAofi7tAd26cxmgibRYOBS4D3hGfmkwCpHdASZzwZDD8rluh0Rcqw1JeZDRw== + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-2.2.0.tgz" + integrity sha512-VNB5+1oCgX3Fzs072yuRsUoC2N4Zg/LJ11DTxX3+Qu+Paa6AmbIF0E9sc2wthz9Psrk/zcOlTCyuposlIhPjZQ== dependencies: - "@smithy/util-base64" "^2.0.1" - tslib "^2.5.0" + "@smithy/util-base64" "^2.3.0" + tslib "^2.6.2" "@smithy/chunked-blob-reader@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/chunked-blob-reader/-/chunked-blob-reader-2.0.0.tgz#c44fe2c780eaf77f9e5381d982ac99a880cce51b" - integrity sha512-k+J4GHJsMSAIQPChGBrjEmGS+WbPonCXesoqP9fynIqjn7rdOThdH8FAeCmokP9mxTYKQAKoHCLPzNlm6gh7Wg== + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/chunked-blob-reader/-/chunked-blob-reader-2.2.0.tgz" + integrity sha512-3GJNvRwXBGdkDZZOGiziVYzDpn4j6zfyULHMDKAGIUo72yHALpE9CbhfQp/XcLNVoc1byfMpn6uW5H2BqPjgaQ== dependencies: - tslib "^2.5.0" + tslib "^2.6.2" "@smithy/config-resolver@^2.0.21": version "2.0.21" - resolved "https://registry.yarnpkg.com/@smithy/config-resolver/-/config-resolver-2.0.21.tgz#97cb1c71f3c8c453fb01169545f98414b3414d7f" + resolved "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-2.0.21.tgz" integrity sha512-rlLIGT+BeqjnA6C2FWumPRJS1UW07iU5ZxDHtFuyam4W65gIaOFMjkB90ofKCIh+0mLVQrQFrl/VLtQT/6FWTA== dependencies: "@smithy/node-config-provider" "^2.1.8" @@ -2044,32 +2059,33 @@ tslib "^2.5.0" "@smithy/core@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/core/-/core-1.1.0.tgz#80e419842cfcaf93040b2cc546f1d12731555479" - integrity sha512-k1zaT5S4K0bG67Q5TmPZ6PdWNQBTMQErChuDvTi+NTx21kKDt+/4YRidsK6nDbHizN6fn1bafUxrougZdKrpxA== - dependencies: - "@smithy/middleware-endpoint" "^2.2.3" - "@smithy/middleware-retry" "^2.0.24" - "@smithy/middleware-serde" "^2.0.15" - "@smithy/protocol-http" "^3.0.11" - "@smithy/smithy-client" "^2.1.18" - "@smithy/types" "^2.7.0" - tslib "^2.5.0" + version "1.4.2" + resolved "https://registry.npmjs.org/@smithy/core/-/core-1.4.2.tgz" + integrity sha512-2fek3I0KZHWJlRLvRTqxTEri+qV0GRHrJIoLFuBMZB4EMg4WgeBGfF0X6abnrNYpq55KJ6R4D6x4f0vLnhzinA== + dependencies: + "@smithy/middleware-endpoint" "^2.5.1" + "@smithy/middleware-retry" "^2.3.1" + "@smithy/middleware-serde" "^2.3.0" + "@smithy/protocol-http" "^3.3.0" + "@smithy/smithy-client" "^2.5.1" + "@smithy/types" "^2.12.0" + "@smithy/util-middleware" "^2.2.0" + tslib "^2.6.2" "@smithy/credential-provider-imds@^2.0.0", "@smithy/credential-provider-imds@^2.1.4": - version "2.1.4" - resolved "https://registry.yarnpkg.com/@smithy/credential-provider-imds/-/credential-provider-imds-2.1.4.tgz#126adf69eac333f23f8683edbfabdc2b3b2deb15" - integrity sha512-cwPJN1fa1YOQzhBlTXRavABEYRRchci1X79QRwzaNLySnIMJfztyv1Zkst0iZPLMnpn8+CnHu3wOHS11J5Dr3A== + version "2.3.0" + resolved "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-2.3.0.tgz" + integrity sha512-BWB9mIukO1wjEOo1Ojgl6LrG4avcaC7T/ZP6ptmAaW4xluhSIPZhY+/PI5YKzlk+jsm+4sQZB45Bt1OfMeQa3w== dependencies: - "@smithy/node-config-provider" "^2.1.8" - "@smithy/property-provider" "^2.0.16" - "@smithy/types" "^2.7.0" - "@smithy/url-parser" "^2.0.15" - tslib "^2.5.0" + "@smithy/node-config-provider" "^2.3.0" + "@smithy/property-provider" "^2.2.0" + "@smithy/types" "^2.12.0" + "@smithy/url-parser" "^2.2.0" + tslib "^2.6.2" "@smithy/eventstream-codec@^2.0.15": version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-2.0.15.tgz#733e638fd38e7e264bc0429dbda139bab950bd25" + resolved "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-2.0.15.tgz" integrity sha512-crjvz3j1gGPwA0us6cwS7+5gAn35CTmqu/oIxVbYJo2Qm/sGAye6zGJnMDk3BKhWZw5kcU1G4MxciTkuBpOZPg== dependencies: "@aws-crypto/crc32" "3.0.0" @@ -2077,9 +2093,19 @@ "@smithy/util-hex-encoding" "^2.0.0" tslib "^2.5.0" +"@smithy/eventstream-codec@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-2.2.0.tgz" + integrity sha512-8janZoJw85nJmQZc4L8TuePp2pk1nxLgkxIR0TUjKJ5Dkj5oelB9WtiSSGXCQvNsJl0VSTvK/2ueMXxvpa9GVw== + dependencies: + "@aws-crypto/crc32" "3.0.0" + "@smithy/types" "^2.12.0" + "@smithy/util-hex-encoding" "^2.2.0" + tslib "^2.6.2" + "@smithy/eventstream-serde-browser@^2.0.15": version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-2.0.15.tgz#f62c891e6f8ad59f552a92d8aa14eb6b4541d418" + resolved "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-2.0.15.tgz" integrity sha512-WiFG5N9j3jmS5P0z5Xev6dO0c3lf7EJYC2Ncb0xDnWFvShwXNn741AF71ABr5EcZw8F4rQma0362MMjAwJeZog== dependencies: "@smithy/eventstream-serde-universal" "^2.0.15" @@ -2088,7 +2114,7 @@ "@smithy/eventstream-serde-config-resolver@^2.0.15": version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-2.0.15.tgz#50e98c59aeb31a0702bad5dfab4009a15fc8b3bf" + resolved "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-2.0.15.tgz" integrity sha512-o65d2LRjgCbWYH+VVNlWXtmsI231SO99ZTOL4UuIPa6WTjbSHWtlXvUcJG9libhEKWmEV9DIUiH2IqyPWi7ubA== dependencies: "@smithy/types" "^2.7.0" @@ -2096,7 +2122,7 @@ "@smithy/eventstream-serde-node@^2.0.15": version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-node/-/eventstream-serde-node-2.0.15.tgz#8be1bd024048adcff4ccbb723c55fc42ce582d33" + resolved "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-2.0.15.tgz" integrity sha512-9OOXiIhHq1VeOG6xdHkn2ZayfMYM3vzdUTV3zhcCnt+tMqA3BJK3XXTJFRR2BV28rtRM778DzqbBTf+hqwQPTg== dependencies: "@smithy/eventstream-serde-universal" "^2.0.15" @@ -2104,17 +2130,17 @@ tslib "^2.5.0" "@smithy/eventstream-serde-universal@^2.0.15": - version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-2.0.15.tgz#85cdff39abc630cb18b4d333913b7120651771ca" - integrity sha512-dP8AQp/pXlWBjvL0TaPBJC3rM0GoYv7O0Uim8d/7UKZ2Wo13bFI3/BhQfY/1DeiP1m23iCHFNFtOQxfQNBB8rQ== + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-2.2.0.tgz" + integrity sha512-pvoe/vvJY0mOpuF84BEtyZoYfbehiFj8KKWk1ds2AT0mTLYFVs+7sBJZmioOFdBXKd48lfrx1vumdPdmGlCLxA== dependencies: - "@smithy/eventstream-codec" "^2.0.15" - "@smithy/types" "^2.7.0" - tslib "^2.5.0" + "@smithy/eventstream-codec" "^2.2.0" + "@smithy/types" "^2.12.0" + tslib "^2.6.2" "@smithy/fetch-http-handler@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/fetch-http-handler/-/fetch-http-handler-1.1.0.tgz#933694dcc0e1ade205161237a151c1c818479676" + resolved "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-1.1.0.tgz" integrity sha512-N22C9R44u5WGlcY+Wuv8EXmCAq62wWwriRAuoczMEwAIjPbvHSthyPSLqI4S7kAST1j6niWg8kwpeJ3ReAv3xg== dependencies: "@smithy/protocol-http" "^1.2.0" @@ -2125,7 +2151,7 @@ "@smithy/fetch-http-handler@^2.3.1": version "2.3.1" - resolved "https://registry.yarnpkg.com/@smithy/fetch-http-handler/-/fetch-http-handler-2.3.1.tgz#aa055db5bf4d78acec97abe6ef24283fa2c18430" + resolved "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-2.3.1.tgz" integrity sha512-6MNk16fqb8EwcYY8O8WxB3ArFkLZ2XppsSNo1h7SQcFdDDwIumiJeO6wRzm7iB68xvsOQzsdQKbdtTieS3hfSQ== dependencies: "@smithy/protocol-http" "^3.0.11" @@ -2134,9 +2160,20 @@ "@smithy/util-base64" "^2.0.1" tslib "^2.5.0" +"@smithy/fetch-http-handler@^2.5.0": + version "2.5.0" + resolved "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-2.5.0.tgz" + integrity sha512-BOWEBeppWhLn/no/JxUL/ghTfANTjT7kg3Ww2rPqTUY9R4yHPXxJ9JhMe3Z03LN3aPwiwlpDIUcVw1xDyHqEhw== + dependencies: + "@smithy/protocol-http" "^3.3.0" + "@smithy/querystring-builder" "^2.2.0" + "@smithy/types" "^2.12.0" + "@smithy/util-base64" "^2.3.0" + tslib "^2.6.2" + "@smithy/hash-blob-browser@^2.0.16": version "2.0.16" - resolved "https://registry.yarnpkg.com/@smithy/hash-blob-browser/-/hash-blob-browser-2.0.16.tgz#6cd3686e79f3c8d96a129076073bf20d06293152" + resolved "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-2.0.16.tgz" integrity sha512-cSYRi05LA7DZDwjB1HL0BP8B56eUNNeLglVH147QTXFyuXJq/7erAIiLRfsyXB8+GfFHkSS5BHbc76a7k/AYPA== dependencies: "@smithy/chunked-blob-reader" "^2.0.0" @@ -2146,7 +2183,7 @@ "@smithy/hash-node@^2.0.17": version "2.0.17" - resolved "https://registry.yarnpkg.com/@smithy/hash-node/-/hash-node-2.0.17.tgz#9ce5e3f137143e3658759d31a16e068ef94a14fc" + resolved "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-2.0.17.tgz" integrity sha512-Il6WuBcI1nD+e2DM7tTADMf01wEPGK8PAhz4D+YmDUVaoBqlA+CaH2uDJhiySifmuKBZj748IfygXty81znKhw== dependencies: "@smithy/types" "^2.7.0" @@ -2156,7 +2193,7 @@ "@smithy/hash-stream-node@^2.0.17": version "2.0.17" - resolved "https://registry.yarnpkg.com/@smithy/hash-stream-node/-/hash-stream-node-2.0.17.tgz#90375ed9c1a586118433c925a61d39b5555bf284" + resolved "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-2.0.17.tgz" integrity sha512-ey8DtnATzp1mOXgS7rqMwSmAki6iJA+jgNucKcxRkhMB1rrICfHg+rhmIF50iLPDHUhTcS5pBMOrLzzpZftvNQ== dependencies: "@smithy/types" "^2.7.0" @@ -2165,7 +2202,7 @@ "@smithy/invalid-dependency@^2.0.15": version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/invalid-dependency/-/invalid-dependency-2.0.15.tgz#7653490047bf0ab6042fb812adfbcce857aa2d06" + resolved "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-2.0.15.tgz" integrity sha512-dlEKBFFwVfzA5QroHlBS94NpgYjXhwN/bFfun+7w3rgxNvVy79SK0w05iGc7UAeC5t+D7gBxrzdnD6hreZnDVQ== dependencies: "@smithy/types" "^2.7.0" @@ -2173,21 +2210,28 @@ "@smithy/is-array-buffer@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-1.1.0.tgz#29948072da2b57575aa9898cda863932e842ab11" + resolved "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-1.1.0.tgz" integrity sha512-twpQ/n+3OWZJ7Z+xu43MJErmhB/WO/mMTnqR6PwWQShvSJ/emx5d1N59LQZk6ZpTAeuRWrc+eHhkzTp9NFjNRQ== dependencies: tslib "^2.5.0" "@smithy/is-array-buffer@^2.0.0": version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-2.0.0.tgz#8fa9b8040651e7ba0b2f6106e636a91354ff7d34" + resolved "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.0.0.tgz" integrity sha512-z3PjFjMyZNI98JFRJi/U0nGoLWMSJlDjAW4QUX2WNZLas5C0CmVV6LJ01JI0k90l7FvpmixjWxPFmENSClQ7ug== dependencies: tslib "^2.5.0" +"@smithy/is-array-buffer@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz" + integrity sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA== + dependencies: + tslib "^2.6.2" + "@smithy/md5-js@^2.0.17": version "2.0.17" - resolved "https://registry.yarnpkg.com/@smithy/md5-js/-/md5-js-2.0.17.tgz#784c02da6cee539f5af0e45b1eaf9beb10ed8ad6" + resolved "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-2.0.17.tgz" integrity sha512-jmISTCnEkOnm2oCNx/rMkvBT/eQh3aA6nktevkzbmn/VYqYEuc5Z2n5sTTqsciMSO01Lvf56wG1A4twDqovYeQ== dependencies: "@smithy/types" "^2.7.0" @@ -2196,7 +2240,7 @@ "@smithy/middleware-content-length@^2.0.17": version "2.0.17" - resolved "https://registry.yarnpkg.com/@smithy/middleware-content-length/-/middleware-content-length-2.0.17.tgz#13479173a15d1cd4224e3e21071a27c66a74b653" + resolved "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-2.0.17.tgz" integrity sha512-OyadvMcKC7lFXTNBa8/foEv7jOaqshQZkjWS9coEXPRZnNnihU/Ls+8ZuJwGNCOrN2WxXZFmDWhegbnM4vak8w== dependencies: "@smithy/protocol-http" "^3.0.11" @@ -2205,7 +2249,7 @@ "@smithy/middleware-endpoint@^2.2.3": version "2.2.3" - resolved "https://registry.yarnpkg.com/@smithy/middleware-endpoint/-/middleware-endpoint-2.2.3.tgz#4069ab6e8d1b485bc0d2384b30f7b37096111ec2" + resolved "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-2.2.3.tgz" integrity sha512-nYfxuq0S/xoAjdLbyn1ixeVB6cyH9wYCMtbbOCpcCRYR5u2mMtqUtVjjPAZ/DIdlK3qe0tpB0Q76szFGNuz+kQ== dependencies: "@smithy/middleware-serde" "^2.0.15" @@ -2216,9 +2260,22 @@ "@smithy/util-middleware" "^2.0.8" tslib "^2.5.0" +"@smithy/middleware-endpoint@^2.5.1": + version "2.5.1" + resolved "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-2.5.1.tgz" + integrity sha512-1/8kFp6Fl4OsSIVTWHnNjLnTL8IqpIb/D3sTSczrKFnrE9VMNWxnrRKNvpUHOJ6zpGD5f62TPm7+17ilTJpiCQ== + dependencies: + "@smithy/middleware-serde" "^2.3.0" + "@smithy/node-config-provider" "^2.3.0" + "@smithy/shared-ini-file-loader" "^2.4.0" + "@smithy/types" "^2.12.0" + "@smithy/url-parser" "^2.2.0" + "@smithy/util-middleware" "^2.2.0" + tslib "^2.6.2" + "@smithy/middleware-retry@^2.0.24": version "2.0.24" - resolved "https://registry.yarnpkg.com/@smithy/middleware-retry/-/middleware-retry-2.0.24.tgz#556a39e7d2be32cc61862e020409d3f93e2c5be1" + resolved "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-2.0.24.tgz" integrity sha512-q2SvHTYu96N7lYrn3VSuX3vRpxXHR/Cig6MJpGWxd0BWodUQUWlKvXpWQZA+lTaFJU7tUvpKhRd4p4MU3PbeJg== dependencies: "@smithy/node-config-provider" "^2.1.8" @@ -2231,32 +2288,63 @@ tslib "^2.5.0" uuid "^8.3.2" +"@smithy/middleware-retry@^2.3.1": + version "2.3.1" + resolved "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-2.3.1.tgz" + integrity sha512-P2bGufFpFdYcWvqpyqqmalRtwFUNUA8vHjJR5iGqbfR6mp65qKOLcUd6lTr4S9Gn/enynSrSf3p3FVgVAf6bXA== + dependencies: + "@smithy/node-config-provider" "^2.3.0" + "@smithy/protocol-http" "^3.3.0" + "@smithy/service-error-classification" "^2.1.5" + "@smithy/smithy-client" "^2.5.1" + "@smithy/types" "^2.12.0" + "@smithy/util-middleware" "^2.2.0" + "@smithy/util-retry" "^2.2.0" + tslib "^2.6.2" + uuid "^9.0.1" + "@smithy/middleware-serde@^2.0.15": version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/middleware-serde/-/middleware-serde-2.0.15.tgz#9deac4daad1f2a60d5c4e7097658f9ae2eb0a33f" + resolved "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-2.0.15.tgz" integrity sha512-FOZRFk/zN4AT4wzGuBY+39XWe+ZnCFd0gZtyw3f9Okn2CJPixl9GyWe98TIaljeZdqWkgrzGyPre20AcW2UMHQ== dependencies: "@smithy/types" "^2.7.0" tslib "^2.5.0" +"@smithy/middleware-serde@^2.3.0": + version "2.3.0" + resolved "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-2.3.0.tgz" + integrity sha512-sIADe7ojwqTyvEQBe1nc/GXB9wdHhi9UwyX0lTyttmUWDJLP655ZYE1WngnNyXREme8I27KCaUhyhZWRXL0q7Q== + dependencies: + "@smithy/types" "^2.12.0" + tslib "^2.6.2" + "@smithy/middleware-stack@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/middleware-stack/-/middleware-stack-1.1.0.tgz#04edd33b5db48d880b9942c38459f193144fa533" + resolved "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-1.1.0.tgz" integrity sha512-XynYiIvXNea2BbLcppvpNK0zu8o2woJqgnmxqYTn4FWagH/Hr2QIk8LOsUz7BIJ4tooFhmx8urHKCdlPbbPDCA== dependencies: tslib "^2.5.0" "@smithy/middleware-stack@^2.0.9": version "2.0.9" - resolved "https://registry.yarnpkg.com/@smithy/middleware-stack/-/middleware-stack-2.0.9.tgz#60e51697c74258fac087bc739d940f524921a15f" + resolved "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-2.0.9.tgz" integrity sha512-bCB5dUtGQ5wh7QNL2ELxmDc6g7ih7jWU3Kx6MYH1h4mZbv9xL3WyhKHojRltThCB1arLPyTUFDi+x6fB/oabtA== dependencies: "@smithy/types" "^2.7.0" tslib "^2.5.0" +"@smithy/middleware-stack@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-2.2.0.tgz" + integrity sha512-Qntc3jrtwwrsAC+X8wms8zhrTr0sFXnyEGhZd9sLtsJ/6gGQKFzNB+wWbOcpJd7BR8ThNCoKt76BuQahfMvpeA== + dependencies: + "@smithy/types" "^2.12.0" + tslib "^2.6.2" + "@smithy/node-config-provider@^2.1.8": version "2.1.8" - resolved "https://registry.yarnpkg.com/@smithy/node-config-provider/-/node-config-provider-2.1.8.tgz#8cab8f1172c8cd1146e7997292786909abcae763" + resolved "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-2.1.8.tgz" integrity sha512-+w26OKakaBUGp+UG+dxYZtFb5fs3tgHg3/QrRrmUZj+rl3cIuw840vFUXX35cVPTUCQIiTqmz7CpVF7+hdINdQ== dependencies: "@smithy/property-provider" "^2.0.16" @@ -2264,9 +2352,19 @@ "@smithy/types" "^2.7.0" tslib "^2.5.0" +"@smithy/node-config-provider@^2.3.0": + version "2.3.0" + resolved "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-2.3.0.tgz" + integrity sha512-0elK5/03a1JPWMDPaS726Iw6LpQg80gFut1tNpPfxFuChEEklo2yL823V94SpTZTxmKlXFtFgsP55uh3dErnIg== + dependencies: + "@smithy/property-provider" "^2.2.0" + "@smithy/shared-ini-file-loader" "^2.4.0" + "@smithy/types" "^2.12.0" + tslib "^2.6.2" + "@smithy/node-http-handler@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/node-http-handler/-/node-http-handler-1.1.0.tgz#887cee930b520e08043c9f41e463f8d8f5dae127" + resolved "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-1.1.0.tgz" integrity sha512-d3kRriEgaIiGXLziAM8bjnaLn1fthCJeTLZIwEIpzQqe6yPX0a+yQoLCTyjb2fvdLwkMoG4p7THIIB5cj5lkbg== dependencies: "@smithy/abort-controller" "^1.1.0" @@ -2277,7 +2375,7 @@ "@smithy/node-http-handler@^2.2.1": version "2.2.1" - resolved "https://registry.yarnpkg.com/@smithy/node-http-handler/-/node-http-handler-2.2.1.tgz#23f6540e565edcae8c558a854fffde3d003451c0" + resolved "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-2.2.1.tgz" integrity sha512-8iAKQrC8+VFHPAT8pg4/j6hlsTQh+NKOWlctJBrYtQa4ExcxX7aSg3vdQ2XLoYwJotFUurg/NLqFCmZaPRrogw== dependencies: "@smithy/abort-controller" "^2.0.15" @@ -2286,17 +2384,28 @@ "@smithy/types" "^2.7.0" tslib "^2.5.0" -"@smithy/property-provider@^2.0.0", "@smithy/property-provider@^2.0.16": - version "2.0.16" - resolved "https://registry.yarnpkg.com/@smithy/property-provider/-/property-provider-2.0.16.tgz#0c15ea8a3e8c8e7012bf5877c79ce754f7d2c06e" - integrity sha512-28Ky0LlOqtEjwg5CdHmwwaDRHcTWfPRzkT6HrhwOSRS2RryAvuDfJrZpM+BMcrdeCyEg1mbcgIMoqTla+rdL8Q== +"@smithy/node-http-handler@^2.5.0": + version "2.5.0" + resolved "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-2.5.0.tgz" + integrity sha512-mVGyPBzkkGQsPoxQUbxlEfRjrj6FPyA3u3u2VXGr9hT8wilsoQdZdvKpMBFMB8Crfhv5dNkKHIW0Yyuc7eABqA== dependencies: - "@smithy/types" "^2.7.0" - tslib "^2.5.0" + "@smithy/abort-controller" "^2.2.0" + "@smithy/protocol-http" "^3.3.0" + "@smithy/querystring-builder" "^2.2.0" + "@smithy/types" "^2.12.0" + tslib "^2.6.2" + +"@smithy/property-provider@^2.0.0", "@smithy/property-provider@^2.0.16", "@smithy/property-provider@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-2.2.0.tgz" + integrity sha512-+xiil2lFhtTRzXkx8F053AV46QnIw6e7MV8od5Mi68E1ICOjCeCHw2XfLnDEUHnT9WGUIkwcqavXjfwuJbGlpg== + dependencies: + "@smithy/types" "^2.12.0" + tslib "^2.6.2" "@smithy/protocol-http@^1.2.0": version "1.2.0" - resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-1.2.0.tgz#a554e4dabb14508f0bc2cdef9c3710e2b294be04" + resolved "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-1.2.0.tgz" integrity sha512-GfGfruksi3nXdFok5RhgtOnWe5f6BndzYfmEXISD+5gAGdayFGpjWu5pIqIweTudMtse20bGbc+7MFZXT1Tb8Q== dependencies: "@smithy/types" "^1.2.0" @@ -2304,56 +2413,64 @@ "@smithy/protocol-http@^3.0.11": version "3.0.11" - resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-3.0.11.tgz#a9ea712fe7cc3375378ac68d9168a7b6cd0b6f65" + resolved "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-3.0.11.tgz" integrity sha512-3ziB8fHuXIRamV/akp/sqiWmNPR6X+9SB8Xxnozzj+Nq7hSpyKdFHd1FLpBkgfGFUTzzcBJQlDZPSyxzmdcx5A== dependencies: "@smithy/types" "^2.7.0" tslib "^2.5.0" +"@smithy/protocol-http@^3.3.0": + version "3.3.0" + resolved "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-3.3.0.tgz" + integrity sha512-Xy5XK1AFWW2nlY/biWZXu6/krgbaf2dg0q492D8M5qthsnU2H+UgFeZLbM76FnH7s6RO/xhQRkj+T6KBO3JzgQ== + dependencies: + "@smithy/types" "^2.12.0" + tslib "^2.6.2" + "@smithy/querystring-builder@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/querystring-builder/-/querystring-builder-1.1.0.tgz#de6306104640ade34e59be33949db6cc64aa9d7f" + resolved "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-1.1.0.tgz" integrity sha512-gDEi4LxIGLbdfjrjiY45QNbuDmpkwh9DX4xzrR2AzjjXpxwGyfSpbJaYhXARw9p17VH0h9UewnNQXNwaQyYMDA== dependencies: "@smithy/types" "^1.2.0" "@smithy/util-uri-escape" "^1.1.0" tslib "^2.5.0" -"@smithy/querystring-builder@^2.0.15": - version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/querystring-builder/-/querystring-builder-2.0.15.tgz#aa8c889bcaef274b8345be4ddabae3bfedf2cf33" - integrity sha512-e1q85aT6HutvouOdN+dMsN0jcdshp50PSCvxDvo6aIM57LqeXimjfONUEgfqQ4IFpYWAtVixptyIRE5frMp/2A== +"@smithy/querystring-builder@^2.0.15", "@smithy/querystring-builder@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-2.2.0.tgz" + integrity sha512-L1kSeviUWL+emq3CUVSgdogoM/D9QMFaqxL/dd0X7PCNWmPXqt+ExtrBjqT0V7HLN03Vs9SuiLrG3zy3JGnE5A== dependencies: - "@smithy/types" "^2.7.0" - "@smithy/util-uri-escape" "^2.0.0" - tslib "^2.5.0" + "@smithy/types" "^2.12.0" + "@smithy/util-uri-escape" "^2.2.0" + tslib "^2.6.2" -"@smithy/querystring-parser@^2.0.15": - version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/querystring-parser/-/querystring-parser-2.0.15.tgz#46c8806a145f46636e4aee2a5d79e7ba68161a4c" - integrity sha512-jbBvoK3cc81Cj1c1TH1qMYxNQKHrYQ2DoTntN9FBbtUWcGhc+T4FP6kCKYwRLXyU4AajwGIZstvNAmIEgUUNTQ== +"@smithy/querystring-parser@^2.0.15", "@smithy/querystring-parser@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-2.2.0.tgz" + integrity sha512-BvHCDrKfbG5Yhbpj4vsbuPV2GgcpHiAkLeIlcA1LtfpMz3jrqizP1+OguSNSj1MwBHEiN+jwNisXLGdajGDQJA== dependencies: - "@smithy/types" "^2.7.0" - tslib "^2.5.0" + "@smithy/types" "^2.12.0" + tslib "^2.6.2" -"@smithy/service-error-classification@^2.0.8": - version "2.0.8" - resolved "https://registry.yarnpkg.com/@smithy/service-error-classification/-/service-error-classification-2.0.8.tgz#c9e421312a2def84da025c5efe6de06679c5be95" - integrity sha512-jCw9+005im8tsfYvwwSc4TTvd29kXRFkH9peQBg5R/4DD03ieGm6v6Hpv9nIAh98GwgYg1KrztcINC1s4o7/hg== +"@smithy/service-error-classification@^2.0.8", "@smithy/service-error-classification@^2.1.5": + version "2.1.5" + resolved "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-2.1.5.tgz" + integrity sha512-uBDTIBBEdAQryvHdc5W8sS5YX7RQzF683XrHePVdFmAgKiMofU15FLSM0/HU03hKTnazdNRFa0YHS7+ArwoUSQ== dependencies: - "@smithy/types" "^2.7.0" + "@smithy/types" "^2.12.0" -"@smithy/shared-ini-file-loader@^2.0.6", "@smithy/shared-ini-file-loader@^2.2.7": - version "2.2.7" - resolved "https://registry.yarnpkg.com/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-2.2.7.tgz#4a3bd469703d02c3cc8e36dcba2238c06efa12cb" - integrity sha512-0Qt5CuiogIuvQIfK+be7oVHcPsayLgfLJGkPlbgdbl0lD28nUKu4p11L+UG3SAEsqc9UsazO+nErPXw7+IgDpQ== +"@smithy/shared-ini-file-loader@^2.0.6", "@smithy/shared-ini-file-loader@^2.2.7", "@smithy/shared-ini-file-loader@^2.4.0": + version "2.4.0" + resolved "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-2.4.0.tgz" + integrity sha512-WyujUJL8e1B6Z4PBfAqC/aGY1+C7T0w20Gih3yrvJSk97gpiVfB+y7c46T4Nunk+ZngLq0rOIdeVeIklk0R3OA== dependencies: - "@smithy/types" "^2.7.0" - tslib "^2.5.0" + "@smithy/types" "^2.12.0" + tslib "^2.6.2" "@smithy/signature-v4@^2.0.0": version "2.0.18" - resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-2.0.18.tgz#53b78b238edaa84cc8d61faf67d2b3c926cdd698" + resolved "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-2.0.18.tgz" integrity sha512-SJRAj9jT/l9ocm8D0GojMbnA1sp7I4JeStOQ4lEXI8A5eHE73vbjlzlqIFB7cLvIgau0oUl4cGVpF9IGCrvjlw== dependencies: "@smithy/eventstream-codec" "^2.0.15" @@ -2367,7 +2484,7 @@ "@smithy/smithy-client@^1.0.3": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/smithy-client/-/smithy-client-1.1.0.tgz#a546a41cc377c836756b6fa749fc9ae292472985" + resolved "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-1.1.0.tgz" integrity sha512-j32SGgVhv2G9nBTmel9u3OXux8KG20ssxuFakJrEeDug3kqbl1qrGzVLCe+Eib402UDtA0Sp1a4NZ2SEXDBxag== dependencies: "@smithy/middleware-stack" "^1.1.0" @@ -2377,7 +2494,7 @@ "@smithy/smithy-client@^2.1.18": version "2.1.18" - resolved "https://registry.yarnpkg.com/@smithy/smithy-client/-/smithy-client-2.1.18.tgz#f8ce2c0e9614f207256ddcd992403aff40750546" + resolved "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-2.1.18.tgz" integrity sha512-7FqdbaJiVaHJDD9IfDhmzhSDbpjyx+ZsfdYuOpDJF09rl8qlIAIlZNoSaflKrQ3cEXZN2YxGPaNWGhbYimyIRQ== dependencies: "@smithy/middleware-stack" "^2.0.9" @@ -2385,32 +2502,67 @@ "@smithy/util-stream" "^2.0.23" tslib "^2.5.0" +"@smithy/smithy-client@^2.5.1": + version "2.5.1" + resolved "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-2.5.1.tgz" + integrity sha512-jrbSQrYCho0yDaaf92qWgd+7nAeap5LtHTI51KXqmpIFCceKU3K9+vIVTUH72bOJngBMqa4kyu1VJhRcSrk/CQ== + dependencies: + "@smithy/middleware-endpoint" "^2.5.1" + "@smithy/middleware-stack" "^2.2.0" + "@smithy/protocol-http" "^3.3.0" + "@smithy/types" "^2.12.0" + "@smithy/util-stream" "^2.2.0" + tslib "^2.6.2" + "@smithy/types@^1.2.0": version "1.2.0" - resolved "https://registry.yarnpkg.com/@smithy/types/-/types-1.2.0.tgz#9dc65767b0ee3d6681704fcc67665d6fc9b6a34e" + resolved "https://registry.npmjs.org/@smithy/types/-/types-1.2.0.tgz" integrity sha512-z1r00TvBqF3dh4aHhya7nz1HhvCg4TRmw51fjMrh5do3h+ngSstt/yKlNbHeb9QxJmFbmN8KEVSWgb1bRvfEoA== dependencies: tslib "^2.5.0" +"@smithy/types@^2.12.0": + version "2.12.0" + resolved "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz" + integrity sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw== + dependencies: + tslib "^2.6.2" + "@smithy/types@^2.7.0": version "2.7.0" - resolved "https://registry.yarnpkg.com/@smithy/types/-/types-2.7.0.tgz#6ed9ba5bff7c4d28c980cff967e6d8456840a4f3" + resolved "https://registry.npmjs.org/@smithy/types/-/types-2.7.0.tgz" integrity sha512-1OIFyhK+vOkMbu4aN2HZz/MomREkrAC/HqY5mlJMUJfGrPRwijJDTeiN8Rnj9zUaB8ogXAfIOtZrrgqZ4w7Wnw== dependencies: tslib "^2.5.0" +"@smithy/types@^3.7.0": + version "3.7.1" + resolved "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz" + integrity sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA== + dependencies: + tslib "^2.6.2" + "@smithy/url-parser@^2.0.15": version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/url-parser/-/url-parser-2.0.15.tgz#878d9b61f9eac8834cb611cf1a8a0e5d9a48038c" + resolved "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-2.0.15.tgz" integrity sha512-sADUncUj9rNbOTrdDGm4EXlUs0eQ9dyEo+V74PJoULY4jSQxS+9gwEgsPYyiu8PUOv16JC/MpHonOgqP/IEDZA== dependencies: "@smithy/querystring-parser" "^2.0.15" "@smithy/types" "^2.7.0" tslib "^2.5.0" +"@smithy/url-parser@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-2.2.0.tgz" + integrity sha512-hoA4zm61q1mNTpksiSWp2nEl1dt3j726HdRhiNgVJQMj7mLp7dprtF57mOB6JvEk/x9d2bsuL5hlqZbBuHQylQ== + dependencies: + "@smithy/querystring-parser" "^2.2.0" + "@smithy/types" "^2.12.0" + tslib "^2.6.2" + "@smithy/util-base64@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/util-base64/-/util-base64-1.1.0.tgz#2b1854013bfd11aefdd0c035eae789d7c4e56a1e" + resolved "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-1.1.0.tgz" integrity sha512-FpYmDmVbOXAxqvoVCwqehUN0zXS+lN8V7VS9O7I8MKeVHdSTsZzlwiMEvGoyTNOXWn8luF4CTDYgNHnZViR30g== dependencies: "@smithy/util-buffer-from" "^1.1.0" @@ -2418,29 +2570,38 @@ "@smithy/util-base64@^2.0.1": version "2.0.1" - resolved "https://registry.yarnpkg.com/@smithy/util-base64/-/util-base64-2.0.1.tgz#57f782dafc187eddea7c8a1ff2a7c188ed1a02c4" + resolved "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-2.0.1.tgz" integrity sha512-DlI6XFYDMsIVN+GH9JtcRp3j02JEVuWIn/QOZisVzpIAprdsxGveFed0bjbMRCqmIFe8uetn5rxzNrBtIGrPIQ== dependencies: "@smithy/util-buffer-from" "^2.0.0" tslib "^2.5.0" +"@smithy/util-base64@^2.3.0": + version "2.3.0" + resolved "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-2.3.0.tgz" + integrity sha512-s3+eVwNeJuXUwuMbusncZNViuhv2LjVJ1nMwTqSA0XAC7gjKhqqxRdJPhR8+YrkoZ9IiIbFk/yK6ACe/xlF+hw== + dependencies: + "@smithy/util-buffer-from" "^2.2.0" + "@smithy/util-utf8" "^2.3.0" + tslib "^2.6.2" + "@smithy/util-body-length-browser@^2.0.1": version "2.0.1" - resolved "https://registry.yarnpkg.com/@smithy/util-body-length-browser/-/util-body-length-browser-2.0.1.tgz#424485cc81c640d18c17c683e0e6edb57e8e2ab9" + resolved "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-2.0.1.tgz" integrity sha512-NXYp3ttgUlwkaug4bjBzJ5+yIbUbUx8VsSLuHZROQpoik+gRkIBeEG9MPVYfvPNpuXb/puqodeeUXcKFe7BLOQ== dependencies: tslib "^2.5.0" "@smithy/util-body-length-node@^2.1.0": version "2.1.0" - resolved "https://registry.yarnpkg.com/@smithy/util-body-length-node/-/util-body-length-node-2.1.0.tgz#313a5f7c5017947baf5fa018bfc22628904bbcfa" + resolved "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-2.1.0.tgz" integrity sha512-/li0/kj/y3fQ3vyzn36NTLGmUwAICb7Jbe/CsWCktW363gh1MOcpEcSO3mJ344Gv2dqz8YJCLQpb6hju/0qOWw== dependencies: tslib "^2.5.0" "@smithy/util-buffer-from@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-1.1.0.tgz#a000bd9f95c0e8d5b0edb0112f2a586daa5bed49" + resolved "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-1.1.0.tgz" integrity sha512-9m6NXE0ww+ra5HKHCHig20T+FAwxBAm7DIdwc/767uGWbRcY720ybgPacQNB96JMOI7xVr/CDa3oMzKmW4a+kw== dependencies: "@smithy/is-array-buffer" "^1.1.0" @@ -2448,22 +2609,30 @@ "@smithy/util-buffer-from@^2.0.0": version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-2.0.0.tgz#7eb75d72288b6b3001bc5f75b48b711513091deb" + resolved "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.0.0.tgz" integrity sha512-/YNnLoHsR+4W4Vf2wL5lGv0ksg8Bmk3GEGxn2vEQt52AQaPSCuaO5PM5VM7lP1K9qHRKHwrPGktqVoAHKWHxzw== dependencies: "@smithy/is-array-buffer" "^2.0.0" tslib "^2.5.0" +"@smithy/util-buffer-from@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz" + integrity sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA== + dependencies: + "@smithy/is-array-buffer" "^2.2.0" + tslib "^2.6.2" + "@smithy/util-config-provider@^2.0.0": version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-config-provider/-/util-config-provider-2.0.0.tgz#4dd6a793605559d94267312fd06d0f58784b4c38" + resolved "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-2.0.0.tgz" integrity sha512-xCQ6UapcIWKxXHEU4Mcs2s7LcFQRiU3XEluM2WcCjjBtQkUN71Tb+ydGmJFPxMUrW/GWMgQEEGipLym4XG0jZg== dependencies: tslib "^2.5.0" "@smithy/util-defaults-mode-browser@^2.0.22": version "2.0.22" - resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-2.0.22.tgz#8ef8c36b8c3c2f98f7a62278c3c684d659134269" + resolved "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-2.0.22.tgz" integrity sha512-qcF20IHHH96FlktvBRICDXDhLPtpVmtksHmqNGtotb9B0DYWXsC6jWXrkhrrwF7tH26nj+npVTqh9isiFV1gdA== dependencies: "@smithy/property-provider" "^2.0.16" @@ -2474,7 +2643,7 @@ "@smithy/util-defaults-mode-node@^2.0.29": version "2.0.29" - resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-2.0.29.tgz#6b210aede145a6bf4bd83d9f465948fb300ca577" + resolved "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-2.0.29.tgz" integrity sha512-+uG/15VoUh6JV2fdY9CM++vnSuMQ1VKZ6BdnkUM7R++C/vLjnlg+ToiSR1FqKZbMmKBXmsr8c/TsDWMAYvxbxQ== dependencies: "@smithy/config-resolver" "^2.0.21" @@ -2487,7 +2656,7 @@ "@smithy/util-endpoints@^1.0.7": version "1.0.7" - resolved "https://registry.yarnpkg.com/@smithy/util-endpoints/-/util-endpoints-1.0.7.tgz#5a258ac7838dea085660060b515cd2d19f19a4bc" + resolved "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-1.0.7.tgz" integrity sha512-Q2gEind3jxoLk6hdKWyESMU7LnXz8aamVwM+VeVjOYzYT1PalGlY/ETa48hv2YpV4+YV604y93YngyzzzQ4IIA== dependencies: "@smithy/node-config-provider" "^2.1.8" @@ -2496,38 +2665,62 @@ "@smithy/util-hex-encoding@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-1.1.0.tgz#b5ba919aa076a3fd5e93e368e34ae2b732fa2090" + resolved "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-1.1.0.tgz" integrity sha512-7UtIE9eH0u41zpB60Jzr0oNCQ3hMJUabMcKRUVjmyHTXiWDE4vjSqN6qlih7rCNeKGbioS7f/y2Jgym4QZcKFg== dependencies: tslib "^2.5.0" "@smithy/util-hex-encoding@^2.0.0": version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-2.0.0.tgz#0aa3515acd2b005c6d55675e377080a7c513b59e" + resolved "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-2.0.0.tgz" integrity sha512-c5xY+NUnFqG6d7HFh1IFfrm3mGl29lC+vF+geHv4ToiuJCBmIfzx6IeHLg+OgRdPFKDXIw6pvi+p3CsscaMcMA== dependencies: tslib "^2.5.0" +"@smithy/util-hex-encoding@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-2.2.0.tgz" + integrity sha512-7iKXR+/4TpLK194pVjKiasIyqMtTYJsgKgM242Y9uzt5dhHnUDvMNb+3xIhRJ9QhvqGii/5cRUt4fJn3dtXNHQ== + dependencies: + tslib "^2.6.2" + "@smithy/util-middleware@^2.0.8": version "2.0.8" - resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-2.0.8.tgz#2ec1da1190d09b69512ce0248ebd5e819e3c8a92" + resolved "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-2.0.8.tgz" integrity sha512-qkvqQjM8fRGGA8P2ydWylMhenCDP8VlkPn8kiNuFEaFz9xnUKC2irfqsBSJrfrOB9Qt6pQsI58r3zvvumhFMkw== dependencies: "@smithy/types" "^2.7.0" tslib "^2.5.0" +"@smithy/util-middleware@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-2.2.0.tgz" + integrity sha512-L1qpleXf9QD6LwLCJ5jddGkgWyuSvWBkJwWAZ6kFkdifdso+sk3L3O1HdmPvCdnCK3IS4qWyPxev01QMnfHSBw== + dependencies: + "@smithy/types" "^2.12.0" + tslib "^2.6.2" + "@smithy/util-retry@^2.0.8": version "2.0.8" - resolved "https://registry.yarnpkg.com/@smithy/util-retry/-/util-retry-2.0.8.tgz#61f8db11e4fe60975cb9fb2eada173f5024a06f3" + resolved "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-2.0.8.tgz" integrity sha512-cQTPnVaVFMjjS6cb44WV2yXtHVyXDC5icKyIbejMarJEApYeJWpBU3LINTxHqp/tyLI+MZOUdosr2mZ3sdziNg== dependencies: "@smithy/service-error-classification" "^2.0.8" "@smithy/types" "^2.7.0" tslib "^2.5.0" +"@smithy/util-retry@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-2.2.0.tgz" + integrity sha512-q9+pAFPTfftHXRytmZ7GzLFFrEGavqapFc06XxzZFcSIGERXMerXxCitjOG1prVDR9QdjqotF40SWvbqcCpf8g== + dependencies: + "@smithy/service-error-classification" "^2.1.5" + "@smithy/types" "^2.12.0" + tslib "^2.6.2" + "@smithy/util-stream@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/util-stream/-/util-stream-1.1.0.tgz#3f174223bef33af85aa39261fccb908648e13af9" + resolved "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-1.1.0.tgz" integrity sha512-w3lsdGsntaLQIrwDWJkIFKrFscgZXwU/oxsse09aSTNv5TckPhDeYea3LhsDrU5MGAG3vprhVZAKr33S45coVA== dependencies: "@smithy/fetch-http-handler" "^1.1.0" @@ -2541,7 +2734,7 @@ "@smithy/util-stream@^2.0.23": version "2.0.23" - resolved "https://registry.yarnpkg.com/@smithy/util-stream/-/util-stream-2.0.23.tgz#468ad29913d091092317cfea2d8ac5b866326a07" + resolved "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-2.0.23.tgz" integrity sha512-OJMWq99LAZJUzUwTk+00plyxX3ESktBaGPhqNIEVab+53gLULiWN9B/8bRABLg0K6R6Xg4t80uRdhk3B/LZqMQ== dependencies: "@smithy/fetch-http-handler" "^2.3.1" @@ -2553,23 +2746,37 @@ "@smithy/util-utf8" "^2.0.2" tslib "^2.5.0" +"@smithy/util-stream@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-2.2.0.tgz" + integrity sha512-17faEXbYWIRst1aU9SvPZyMdWmqIrduZjVOqCPMIsWFNxs5yQQgFrJL6b2SdiCzyW9mJoDjFtgi53xx7EH+BXA== + dependencies: + "@smithy/fetch-http-handler" "^2.5.0" + "@smithy/node-http-handler" "^2.5.0" + "@smithy/types" "^2.12.0" + "@smithy/util-base64" "^2.3.0" + "@smithy/util-buffer-from" "^2.2.0" + "@smithy/util-hex-encoding" "^2.2.0" + "@smithy/util-utf8" "^2.3.0" + tslib "^2.6.2" + "@smithy/util-uri-escape@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-1.1.0.tgz#a8c5edaf19c0efdb9b51661e840549cf600a1808" + resolved "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-1.1.0.tgz" integrity sha512-/jL/V1xdVRt5XppwiaEU8Etp5WHZj609n0xMTuehmCqdoOFbId1M+aEeDWZsQ+8JbEB/BJ6ynY2SlYmOaKtt8w== dependencies: tslib "^2.5.0" -"@smithy/util-uri-escape@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-2.0.0.tgz#19955b1a0f517a87ae77ac729e0e411963dfda95" - integrity sha512-ebkxsqinSdEooQduuk9CbKcI+wheijxEb3utGXkCoYQkJnwTnLbH1JXGimJtUkQwNQbsbuYwG2+aFVyZf5TLaw== +"@smithy/util-uri-escape@^2.0.0", "@smithy/util-uri-escape@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-2.2.0.tgz" + integrity sha512-jtmJMyt1xMD/d8OtbVJ2gFZOSKc+ueYJZPW20ULW1GOp/q/YIM0wNh+u8ZFao9UaIGz4WoPW8hC64qlWLIfoDA== dependencies: - tslib "^2.5.0" + tslib "^2.6.2" "@smithy/util-utf8@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-1.1.0.tgz#b791ab1e3f694374edfe22811e39dd8424a1be69" + resolved "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-1.1.0.tgz" integrity sha512-p/MYV+JmqmPyjdgyN2UxAeYDj9cBqCjp0C/NsTWnnjoZUVqoeZ6IrW915L9CAKWVECgv9lVQGc4u/yz26/bI1A== dependencies: "@smithy/util-buffer-from" "^1.1.0" @@ -2577,15 +2784,23 @@ "@smithy/util-utf8@^2.0.2": version "2.0.2" - resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-2.0.2.tgz#626b3e173ad137208e27ed329d6bea70f4a1a7f7" + resolved "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.0.2.tgz" integrity sha512-qOiVORSPm6Ce4/Yu6hbSgNHABLP2VMv8QOC3tTDNHHlWY19pPyc++fBTbZPtx6egPXi4HQxKDnMxVxpbtX2GoA== dependencies: "@smithy/util-buffer-from" "^2.0.0" tslib "^2.5.0" +"@smithy/util-utf8@^2.3.0": + version "2.3.0" + resolved "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz" + integrity sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A== + dependencies: + "@smithy/util-buffer-from" "^2.2.0" + tslib "^2.6.2" + "@smithy/util-waiter@^2.0.15": version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/util-waiter/-/util-waiter-2.0.15.tgz#b02a42bf1b82f07973d1756a0ee10fafa1fbf58e" + resolved "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-2.0.15.tgz" integrity sha512-9Y+btzzB7MhLADW7xgD6SjvmoYaRkrb/9SCbNGmNdfO47v38rxb90IGXyDtAK0Shl9bMthTmLgjlfYc+vtz2Qw== dependencies: "@smithy/abort-controller" "^2.0.15" @@ -2594,38 +2809,38 @@ "@tootallnate/once@2": version "2.0.0" - resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf" + resolved "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz" integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A== "@tsconfig/node10@^1.0.7": version "1.0.9" - resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" + resolved "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz" integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== "@tsconfig/node12@^1.0.7": version "1.0.11" - resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + resolved "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz" integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== "@tsconfig/node14@^1.0.0": version "1.0.3" - resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + resolved "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz" integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== "@tsconfig/node16@^1.0.2": version "1.0.4" - resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" + resolved "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz" integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== "@types/aws-lambda@^8.10.83": - version "8.10.117" - resolved "https://registry.yarnpkg.com/@types/aws-lambda/-/aws-lambda-8.10.117.tgz#7d12cd6c33ceb4f1b1f35581dacf9f610d99cdd2" - integrity sha512-6T1aHTSSK4l8+67ANKHha/CRVxyk/bAl6OGCOxsKVsHaSxWpqsqgupc8rPw8vQGjtIgIZ+EaHqMz8gA4d6xZhQ== + version "8.10.145" + resolved "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.145.tgz" + integrity sha512-dtByW6WiFk5W5Jfgz1VM+YPA21xMXTuSFoLYIDY0L44jDLLflVPtZkYuu3/YxpGcvjzKFBZLU+GyKjR0HOYtyw== "@types/babel__core@^7.1.14": - version "7.20.1" - resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.1.tgz#916ecea274b0c776fec721e333e55762d3a9614b" - integrity sha512-aACu/U/omhdk15O4Nfb+fHgH/z3QsfQzpnvRZhYhThms83ZnAOZz7zZAWO7mn2yyNQaA4xTO8GLK3uqFU4bYYw== + version "7.20.5" + resolved "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz" + integrity sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA== dependencies: "@babel/parser" "^7.20.7" "@babel/types" "^7.20.7" @@ -2634,172 +2849,167 @@ "@types/babel__traverse" "*" "@types/babel__generator@*": - version "7.6.4" - resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" - integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + version "7.6.8" + resolved "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.8.tgz" + integrity sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw== dependencies: "@babel/types" "^7.0.0" "@types/babel__template@*": - version "7.4.1" - resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" - integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + version "7.4.4" + resolved "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz" + integrity sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A== dependencies: "@babel/parser" "^7.1.0" "@babel/types" "^7.0.0" "@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": - version "7.20.1" - resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.20.1.tgz#dd6f1d2411ae677dcb2db008c962598be31d6acf" - integrity sha512-MitHFXnhtgwsGZWtT68URpOvLN4EREih1u3QtQiN4VdAxWKRVvGCSvw/Qth0M0Qq3pJpnGOu5JaM/ydK7OGbqg== + version "7.20.6" + resolved "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz" + integrity sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg== dependencies: "@babel/types" "^7.20.7" "@types/btoa-lite@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@types/btoa-lite/-/btoa-lite-1.0.0.tgz#e190a5a548e0b348adb0df9ac7fa5f1151c7cca4" - integrity sha512-wJsiX1tosQ+J5+bY5LrSahHxr2wT+uME5UDwdN1kg4frt40euqA+wzECkmq4t5QbveHiJepfdThgQrPw6KiSlg== + version "1.0.2" + resolved "https://registry.npmjs.org/@types/btoa-lite/-/btoa-lite-1.0.2.tgz" + integrity sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg== + +"@types/caseless@*": + version "0.12.5" + resolved "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz" + integrity sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg== "@types/cookie@^0.4.1": version "0.4.1" - resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.4.1.tgz#bfd02c1f2224567676c1545199f87c3a861d878d" + resolved "https://registry.npmjs.org/@types/cookie/-/cookie-0.4.1.tgz" integrity sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q== "@types/graceful-fs@^4.1.3": - version "4.1.6" - resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.6.tgz#e14b2576a1c25026b7f02ede1de3b84c3a1efeae" - integrity sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw== + version "4.1.9" + resolved "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz" + integrity sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ== dependencies: "@types/node" "*" "@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" - integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + version "2.0.6" + resolved "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz" + integrity sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w== "@types/istanbul-lib-report@*": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" - integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + version "3.0.3" + resolved "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz" + integrity sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA== dependencies: "@types/istanbul-lib-coverage" "*" "@types/istanbul-reports@^3.0.0": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" - integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + version "3.0.4" + resolved "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz" + integrity sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ== dependencies: "@types/istanbul-lib-report" "*" -"@types/jest@^29.5.11": - version "29.5.11" - resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.11.tgz#0c13aa0da7d0929f078ab080ae5d4ced80fa2f2c" - integrity sha512-S2mHmYIVe13vrm6q4kN6fLYYAka15ALQki/vgDC3mIukEOx8WJlv0kQPM+d4w8Gp6u0uSdKND04IlTXBv0rwnQ== +"@types/jest@^29.5.14": + version "29.5.14" + resolved "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz" + integrity sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ== dependencies: expect "^29.0.0" pretty-format "^29.0.0" "@types/js-levenshtein@^1.1.1": version "1.1.1" - resolved "https://registry.yarnpkg.com/@types/js-levenshtein/-/js-levenshtein-1.1.1.tgz#ba05426a43f9e4e30b631941e0aa17bf0c890ed5" + resolved "https://registry.npmjs.org/@types/js-levenshtein/-/js-levenshtein-1.1.1.tgz" integrity sha512-qC4bCqYGy1y/NP7dDVr7KJarn+PbX1nSpwA7JXdu0HxT3QYjO8MJ+cntENtHFVy2dRAyBV23OZ6MxsW1AM1L8g== -"@types/json-schema@^7.0.12": +"@types/json-schema@^7.0.12", "@types/json-schema@^7.0.9": version "7.0.15" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" + resolved "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz" integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== -"@types/json-schema@^7.0.9": - version "7.0.12" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.12.tgz#d70faba7039d5fca54c83c7dbab41051d2b6f6cb" - integrity sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA== - "@types/json5@^0.0.29": version "0.0.29" - resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + resolved "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz" integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== "@types/jsonwebtoken@^9.0.0": - version "9.0.2" - resolved "https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz#9eeb56c76dd555039be2a3972218de5bd3b8d83e" - integrity sha512-drE6uz7QBKq1fYqqoFKTDRdFCPHd5TCub75BM+D+cMx7NU9hUz7SESLfC2fSCXVFMO5Yj8sOWHuGqPgjc+fz0Q== - dependencies: - "@types/node" "*" - -"@types/node-fetch@^2.5.0": - version "2.6.4" - resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.4.tgz#1bc3a26de814f6bf466b25aeb1473fa1afe6a660" - integrity sha512-1ZX9fcN4Rvkvgv4E6PAY5WXUFWFcRWxZa3EW83UjycOB9ljJCedb2CupIP4RZMEwF/M3eTcCihbBRgwtGbg5Rg== + version "9.0.7" + resolved "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.7.tgz" + integrity sha512-ugo316mmTYBl2g81zDFnZ7cfxlut3o+/EQdaP7J8QN2kY6lJ22hmQYCK5EHcJHbrW+dkCGSCPgbG8JtYj6qSrg== dependencies: "@types/node" "*" - form-data "^3.0.0" -"@types/node@*": - version "20.3.1" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.3.1.tgz#e8a83f1aa8b649377bb1fb5d7bac5cb90e784dfe" - integrity sha512-EhcH/wvidPy1WeML3TtYFGR83UzjxeWRen9V402T8aUGYsCHOmfoisV3ZSg03gAFIbLq8TnWOJ0f4cALtnSEUg== +"@types/long@^4.0.0": + version "4.0.2" + resolved "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz" + integrity sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA== -"@types/node@^18.19.3": +"@types/node@*", "@types/node@>=13.7.0", "@types/node@^18.19.3": version "18.19.3" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.3.tgz#e4723c4cb385641d61b983f6fe0b716abd5f8fc0" + resolved "https://registry.npmjs.org/@types/node/-/node-18.19.3.tgz" integrity sha512-k5fggr14DwAytoA/t8rPrIz++lXK7/DqckthCmoZOKNsEbJkId4Z//BqgApXBUGrGddrigYa1oqheo/7YmW4rg== dependencies: undici-types "~5.26.4" -"@types/semver@^7.3.12": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.0.tgz#591c1ce3a702c45ee15f47a42ade72c2fd78978a" - integrity sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw== +"@types/request@^2.48.8": + version "2.48.12" + resolved "https://registry.npmjs.org/@types/request/-/request-2.48.12.tgz" + integrity sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw== + dependencies: + "@types/caseless" "*" + "@types/node" "*" + "@types/tough-cookie" "*" + form-data "^2.5.0" -"@types/semver@^7.5.0": +"@types/semver@^7.3.12", "@types/semver@^7.5.0": version "7.5.6" - resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.6.tgz#c65b2bfce1bec346582c07724e3f8c1017a20339" + resolved "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz" integrity sha512-dn1l8LaMea/IjDoHNd9J52uBbInB796CDffS6VdIxvqYCPSG0V0DzHp76GpaWnlhg88uYyPbXCDIowa86ybd5A== "@types/stack-utils@^2.0.0": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" - integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + version "2.0.3" + resolved "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz" + integrity sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw== "@types/statuses@^2.0.1": version "2.0.4" - resolved "https://registry.yarnpkg.com/@types/statuses/-/statuses-2.0.4.tgz#041143ba4a918e8f080f8b0ffbe3d4cb514e2315" + resolved "https://registry.npmjs.org/@types/statuses/-/statuses-2.0.4.tgz" integrity sha512-eqNDvZsCNY49OAXB0Firg/Sc2BgoWsntsLUdybGFOhAfCD6QJ2n9HXUIHGqt5qjrxmMv4wS8WLAw43ZkKcJ8Pw== -"@types/tunnel@^0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@types/tunnel/-/tunnel-0.0.3.tgz#f109e730b072b3136347561fc558c9358bb8c6e9" - integrity sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA== - dependencies: - "@types/node" "*" +"@types/tough-cookie@*": + version "4.0.5" + resolved "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz" + integrity sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA== "@types/uuid@^9.0.7": version "9.0.7" - resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-9.0.7.tgz#b14cebc75455eeeb160d5fe23c2fcc0c64f724d8" + resolved "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.7.tgz" integrity sha512-WUtIVRUZ9i5dYXefDEAI7sh9/O7jGvHg7Df/5O/gtH3Yabe5odI3UWopVR1qbPXQtvOxWu3mM4XxlYeZtMWF4g== "@types/yargs-parser@*": - version "21.0.0" - resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" - integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + version "21.0.3" + resolved "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz" + integrity sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ== "@types/yargs@^17.0.8": - version "17.0.24" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.24.tgz#b3ef8d50ad4aa6aecf6ddc97c580a00f5aa11902" - integrity sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw== + version "17.0.33" + resolved "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz" + integrity sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA== dependencies: "@types/yargs-parser" "*" -"@typescript-eslint/eslint-plugin@6.14.0": - version "6.14.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.14.0.tgz#fc1ab5f23618ba590c87e8226ff07a760be3dd7b" - integrity sha512-1ZJBykBCXaSHG94vMMKmiHoL0MhNHKSVlcHVYZNw+BKxufhqQVTOawNpwwI1P5nIFZ/4jLVop0mcY6mJJDFNaw== +"@typescript-eslint/eslint-plugin@6.15.0": + version "6.15.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.15.0.tgz#b0b3e15fa8c3e67ed4386b765cc0ba98ad3a303b" + integrity sha512-j5qoikQqPccq9QoBAupOP+CBu8BaJ8BLjaXSioDISeTZkVO3ig7oSIKh3H+rEpee7xCXtWwSB4KIL5l6hWZzpg== dependencies: "@eslint-community/regexpp" "^4.5.1" - "@typescript-eslint/scope-manager" "6.14.0" - "@typescript-eslint/type-utils" "6.14.0" - "@typescript-eslint/utils" "6.14.0" - "@typescript-eslint/visitor-keys" "6.14.0" + "@typescript-eslint/scope-manager" "6.15.0" + "@typescript-eslint/type-utils" "6.15.0" + "@typescript-eslint/utils" "6.15.0" + "@typescript-eslint/visitor-keys" "6.15.0" debug "^4.3.4" graphemer "^1.4.0" ignore "^5.2.4" @@ -2807,56 +3017,56 @@ semver "^7.5.4" ts-api-utils "^1.0.1" -"@typescript-eslint/parser@6.14.0": - version "6.14.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.14.0.tgz#a2d6a732e0d2b95c73f6a26ae7362877cc1b4212" - integrity sha512-QjToC14CKacd4Pa7JK4GeB/vHmWFJckec49FR4hmIRf97+KXole0T97xxu9IFiPxVQ1DBWrQ5wreLwAGwWAVQA== +"@typescript-eslint/parser@6.15.0": + version "6.15.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.15.0.tgz#1af69741cfa314a13c1434d0bdd5a0c3096699d7" + integrity sha512-MkgKNnsjC6QwcMdlNAel24jjkEO/0hQaMDLqP4S9zq5HBAUJNQB6y+3DwLjX7b3l2b37eNAxMPLwb3/kh8VKdA== dependencies: - "@typescript-eslint/scope-manager" "6.14.0" - "@typescript-eslint/types" "6.14.0" - "@typescript-eslint/typescript-estree" "6.14.0" - "@typescript-eslint/visitor-keys" "6.14.0" + "@typescript-eslint/scope-manager" "6.15.0" + "@typescript-eslint/types" "6.15.0" + "@typescript-eslint/typescript-estree" "6.15.0" + "@typescript-eslint/visitor-keys" "6.15.0" debug "^4.3.4" "@typescript-eslint/scope-manager@5.59.11": version "5.59.11" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.59.11.tgz#5d131a67a19189c42598af9fb2ea1165252001ce" + resolved "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.11.tgz" integrity sha512-dHFOsxoLFtrIcSj5h0QoBT/89hxQONwmn3FOQ0GOQcLOOXm+MIrS8zEAhs4tWl5MraxCY3ZJpaXQQdFMc2Tu+Q== dependencies: "@typescript-eslint/types" "5.59.11" "@typescript-eslint/visitor-keys" "5.59.11" -"@typescript-eslint/scope-manager@6.14.0": - version "6.14.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.14.0.tgz#53d24363fdb5ee0d1d8cda4ed5e5321272ab3d48" - integrity sha512-VT7CFWHbZipPncAZtuALr9y3EuzY1b1t1AEkIq2bTXUPKw+pHoXflGNG5L+Gv6nKul1cz1VH8fz16IThIU0tdg== +"@typescript-eslint/scope-manager@6.15.0": + version "6.15.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.15.0.tgz#40e5214a3e9e048aca55ce33381bc61b6b51c32a" + integrity sha512-+BdvxYBltqrmgCNu4Li+fGDIkW9n//NrruzG9X1vBzaNK+ExVXPoGB71kneaVw/Jp+4rH/vaMAGC6JfMbHstVg== dependencies: - "@typescript-eslint/types" "6.14.0" - "@typescript-eslint/visitor-keys" "6.14.0" + "@typescript-eslint/types" "6.15.0" + "@typescript-eslint/visitor-keys" "6.15.0" -"@typescript-eslint/type-utils@6.14.0": - version "6.14.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-6.14.0.tgz#ac9cb5ba0615c837f1a6b172feeb273d36e4f8af" - integrity sha512-x6OC9Q7HfYKqjnuNu5a7kffIYs3No30isapRBJl1iCHLitD8O0lFbRcVGiOcuyN837fqXzPZ1NS10maQzZMKqw== +"@typescript-eslint/type-utils@6.15.0": + version "6.15.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-6.15.0.tgz#c22261bd00566821a300d08f4632533a8f9bed01" + integrity sha512-CnmHKTfX6450Bo49hPg2OkIm/D/TVYV7jO1MCfPYGwf6x3GO0VU8YMO5AYMn+u3X05lRRxA4fWCz87GFQV6yVQ== dependencies: - "@typescript-eslint/typescript-estree" "6.14.0" - "@typescript-eslint/utils" "6.14.0" + "@typescript-eslint/typescript-estree" "6.15.0" + "@typescript-eslint/utils" "6.15.0" debug "^4.3.4" ts-api-utils "^1.0.1" "@typescript-eslint/types@5.59.11": version "5.59.11" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.59.11.tgz#1a9018fe3c565ba6969561f2a49f330cf1fe8db1" + resolved "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.11.tgz" integrity sha512-epoN6R6tkvBYSc+cllrz+c2sOFWkbisJZWkOE+y3xHtvYaOE6Wk6B8e114McRJwFRjGvYdJwLXQH5c9osME/AA== -"@typescript-eslint/types@6.14.0": - version "6.14.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.14.0.tgz#935307f7a931016b7a5eb25d494ea3e1f613e929" - integrity sha512-uty9H2K4Xs8E47z3SnXEPRNDfsis8JO27amp2GNCnzGETEW3yTqEIVg5+AI7U276oGF/tw6ZA+UesxeQ104ceA== +"@typescript-eslint/types@6.15.0": + version "6.15.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.15.0.tgz#a9f7b006aee52b0948be6e03f521814bf435ddd5" + integrity sha512-yXjbt//E4T/ee8Ia1b5mGlbNj9fB9lJP4jqLbZualwpP2BCQ5is6BcWwxpIsY4XKAhmdv3hrW92GdtJbatC6dQ== "@typescript-eslint/typescript-estree@5.59.11": version "5.59.11" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.11.tgz#b2caaa31725e17c33970c1197bcd54e3c5f42b9f" + resolved "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.11.tgz" integrity sha512-YupOpot5hJO0maupJXixi6l5ETdrITxeo5eBOeuV7RSKgYdU3G5cxO49/9WRnJq9EMrB7AuTSLH/bqOsXi7wPA== dependencies: "@typescript-eslint/types" "5.59.11" @@ -2867,35 +3077,35 @@ semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/typescript-estree@6.14.0": - version "6.14.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.14.0.tgz#90c7ddd45cd22139adf3d4577580d04c9189ac13" - integrity sha512-yPkaLwK0yH2mZKFE/bXkPAkkFgOv15GJAUzgUVonAbv0Hr4PK/N2yaA/4XQbTZQdygiDkpt5DkxPELqHguNvyw== +"@typescript-eslint/typescript-estree@6.15.0": + version "6.15.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.15.0.tgz#2f8a513df1ce5e6e1ba8e5c6aa52f392ae023fc5" + integrity sha512-7mVZJN7Hd15OmGuWrp2T9UvqR2Ecg+1j/Bp1jXUEY2GZKV6FXlOIoqVDmLpBiEiq3katvj/2n2mR0SDwtloCew== dependencies: - "@typescript-eslint/types" "6.14.0" - "@typescript-eslint/visitor-keys" "6.14.0" + "@typescript-eslint/types" "6.15.0" + "@typescript-eslint/visitor-keys" "6.15.0" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" semver "^7.5.4" ts-api-utils "^1.0.1" -"@typescript-eslint/utils@6.14.0": - version "6.14.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-6.14.0.tgz#856a9e274367d99ffbd39c48128b93a86c4261e3" - integrity sha512-XwRTnbvRr7Ey9a1NT6jqdKX8y/atWG+8fAIu3z73HSP8h06i3r/ClMhmaF/RGWGW1tHJEwij1uEg2GbEmPYvYg== +"@typescript-eslint/utils@6.15.0": + version "6.15.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-6.15.0.tgz#f80dbb79f3b0f569077a8711dd44186a8933fa4c" + integrity sha512-eF82p0Wrrlt8fQSRL0bGXzK5nWPRV2dYQZdajcfzOD9+cQz9O7ugifrJxclB+xVOvWvagXfqS4Es7vpLP4augw== dependencies: "@eslint-community/eslint-utils" "^4.4.0" "@types/json-schema" "^7.0.12" "@types/semver" "^7.5.0" - "@typescript-eslint/scope-manager" "6.14.0" - "@typescript-eslint/types" "6.14.0" - "@typescript-eslint/typescript-estree" "6.14.0" + "@typescript-eslint/scope-manager" "6.15.0" + "@typescript-eslint/types" "6.15.0" + "@typescript-eslint/typescript-estree" "6.15.0" semver "^7.5.4" "@typescript-eslint/utils@^5.10.0": version "5.59.11" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.59.11.tgz#9dbff49dc80bfdd9289f9f33548f2e8db3c59ba1" + resolved "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.11.tgz" integrity sha512-didu2rHSOMUdJThLk4aZ1Or8IcO3HzCw/ZvEjTTIfjIrcdd5cvSIwwDy2AOlE7htSNp7QIZ10fLMyRCveesMLg== dependencies: "@eslint-community/eslint-utils" "^4.2.0" @@ -2909,62 +3119,69 @@ "@typescript-eslint/visitor-keys@5.59.11": version "5.59.11" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.11.tgz#dca561ddad169dc27d62396d64f45b2d2c3ecc56" + resolved "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.11.tgz" integrity sha512-KGYniTGG3AMTuKF9QBD7EIrvufkB6O6uX3knP73xbKLMpH+QRPcgnCxjWXSHjMRuOxFLovljqQgQpR0c7GvjoA== dependencies: "@typescript-eslint/types" "5.59.11" eslint-visitor-keys "^3.3.0" -"@typescript-eslint/visitor-keys@6.14.0": - version "6.14.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.14.0.tgz#1d1d486581819287de824a56c22f32543561138e" - integrity sha512-fB5cw6GRhJUz03MrROVuj5Zm/Q+XWlVdIsFj+Zb1Hvqouc8t+XP2H5y53QYU/MGtd2dPg6/vJJlhoX3xc2ehfw== +"@typescript-eslint/visitor-keys@6.15.0": + version "6.15.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.15.0.tgz#5baf97a7bfeec6f4894d400437055155a46b2330" + integrity sha512-1zvtdC1a9h5Tb5jU9x3ADNXO9yjP8rXlaoChu0DQX40vf5ACVpYIVIZhIMZ6d5sDXH7vq4dsZBT1fEGj8D2n2w== dependencies: - "@typescript-eslint/types" "6.14.0" + "@typescript-eslint/types" "6.15.0" eslint-visitor-keys "^3.4.1" "@ungap/structured-clone@^1.2.0": version "1.2.0" - resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" + resolved "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz" integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== "@vercel/ncc@0.38.1": version "0.38.1" - resolved "https://registry.yarnpkg.com/@vercel/ncc/-/ncc-0.38.1.tgz#13f08738111e1d9e8a22fd6141f3590e54d9a60e" + resolved "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.1.tgz" integrity sha512-IBBb+iI2NLu4VQn3Vwldyi2QwaXt5+hTyh58ggAMoCGE6DJmPvwL3KPBWcJl1m9LYPChBLE980Jw+CS4Wokqxw== abort-controller@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" + resolved "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz" integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== dependencies: event-target-shim "^5.0.0" acorn-jsx@^5.3.2: version "5.3.2" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + resolved "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== acorn-walk@^8.1.1: version "8.3.1" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.1.tgz#2f10f5b69329d90ae18c58bf1fa8fccd8b959a43" + resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.1.tgz" integrity sha512-TgUZgYvqZprrl7YldZNoa9OciCAyZR+Ejm9eXzKCmjsF5IKp/wgQ7Z/ZpjpGTIUPwrHQIcYeI8qDh4PsEwxMbw== acorn@^8.4.1, acorn@^8.9.0: - version "8.11.2" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.2.tgz#ca0d78b51895be5390a5903c5b3bdcdaf78ae40b" - integrity sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w== + version "8.14.0" + resolved "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz" + integrity sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA== agent-base@6: version "6.0.2" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + resolved "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz" integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== dependencies: debug "4" +agent-base@^7.0.2: + version "7.1.1" + resolved "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz" + integrity sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA== + dependencies: + debug "^4.3.4" + aggregate-error@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + resolved "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz" integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== dependencies: clean-stack "^2.0.0" @@ -2972,7 +3189,7 @@ aggregate-error@^3.1.0: ajv@^6.12.4: version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== dependencies: fast-deep-equal "^3.1.1" @@ -2982,38 +3199,31 @@ ajv@^6.12.4: ansi-escapes@^4.2.1: version "4.3.2" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + resolved "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz" integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== dependencies: type-fest "^0.21.3" ansi-regex@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== dependencies: color-convert "^2.0.1" ansi-styles@^5.0.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz" integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== anymatch@^3.0.3, anymatch@~3.1.2: version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz" integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== dependencies: normalize-path "^3.0.0" @@ -3021,32 +3231,32 @@ anymatch@^3.0.3, anymatch@~3.1.2: arg@^4.1.0: version "4.1.3" - resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + resolved "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz" integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== argparse@^1.0.7: version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" argparse@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== -array-buffer-byte-length@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead" - integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A== +array-buffer-byte-length@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz" + integrity sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg== dependencies: - call-bind "^1.0.2" - is-array-buffer "^3.0.1" + call-bind "^1.0.5" + is-array-buffer "^3.0.4" array-includes@^3.1.7: version "3.1.7" - resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.7.tgz#8cd2e01b26f7a3086cbc87271593fe921c62abda" + resolved "https://registry.npmjs.org/array-includes/-/array-includes-3.1.7.tgz" integrity sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ== dependencies: call-bind "^1.0.2" @@ -3057,12 +3267,12 @@ array-includes@^3.1.7: array-union@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== array.prototype.findlastindex@^1.2.3: version "1.2.3" - resolved "https://registry.yarnpkg.com/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz#b37598438f97b579166940814e2c0493a4f50207" + resolved "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz" integrity sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA== dependencies: call-bind "^1.0.2" @@ -3073,7 +3283,7 @@ array.prototype.findlastindex@^1.2.3: array.prototype.flat@^1.3.2: version "1.3.2" - resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz#1476217df8cff17d72ee8f3ba06738db5b387d18" + resolved "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz" integrity sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA== dependencies: call-bind "^1.0.2" @@ -3083,7 +3293,7 @@ array.prototype.flat@^1.3.2: array.prototype.flatmap@^1.3.2: version "1.3.2" - resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz#c9a7c6831db8e719d6ce639190146c24bbd3e527" + resolved "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz" integrity sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ== dependencies: call-bind "^1.0.2" @@ -3091,32 +3301,47 @@ array.prototype.flatmap@^1.3.2: es-abstract "^1.22.1" es-shim-unscopables "^1.0.0" -arraybuffer.prototype.slice@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz#98bd561953e3e74bb34938e77647179dfe6e9f12" - integrity sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw== +arraybuffer.prototype.slice@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz" + integrity sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A== dependencies: - array-buffer-byte-length "^1.0.0" - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - get-intrinsic "^1.2.1" - is-array-buffer "^3.0.2" + array-buffer-byte-length "^1.0.1" + call-bind "^1.0.5" + define-properties "^1.2.1" + es-abstract "^1.22.3" + es-errors "^1.2.1" + get-intrinsic "^1.2.3" + is-array-buffer "^3.0.4" is-shared-array-buffer "^1.0.2" +arrify@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz" + integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== + +async-retry@^1.3.3: + version "1.3.3" + resolved "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz" + integrity sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw== + dependencies: + retry "0.13.1" + asynckit@^0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== -available-typed-arrays@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" - integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== +available-typed-arrays@^1.0.7: + version "1.0.7" + resolved "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz" + integrity sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ== + dependencies: + possible-typed-array-names "^1.0.0" axios@^1.6.2: version "1.6.2" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.2.tgz#de67d42c755b571d3e698df1b6504cde9b0ee9f2" + resolved "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz" integrity sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A== dependencies: follow-redirects "^1.15.0" @@ -3125,7 +3350,7 @@ axios@^1.6.2: babel-jest@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-29.7.0.tgz#f4369919225b684c56085998ac63dbd05be020d5" + resolved "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz" integrity sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg== dependencies: "@jest/transform" "^29.7.0" @@ -3138,7 +3363,7 @@ babel-jest@^29.7.0: babel-plugin-istanbul@^6.1.1: version "6.1.1" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + resolved "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz" integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" @@ -3149,7 +3374,7 @@ babel-plugin-istanbul@^6.1.1: babel-plugin-jest-hoist@^29.6.3: version "29.6.3" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz#aadbe943464182a8922c3c927c3067ff40d24626" + resolved "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz" integrity sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg== dependencies: "@babel/template" "^7.3.3" @@ -3158,26 +3383,29 @@ babel-plugin-jest-hoist@^29.6.3: "@types/babel__traverse" "^7.0.6" babel-preset-current-node-syntax@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" - integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + version "1.1.0" + resolved "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz" + integrity sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw== dependencies: "@babel/plugin-syntax-async-generators" "^7.8.4" "@babel/plugin-syntax-bigint" "^7.8.3" - "@babel/plugin-syntax-class-properties" "^7.8.3" - "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-import-attributes" "^7.24.7" + "@babel/plugin-syntax-import-meta" "^7.10.4" "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" "@babel/plugin-syntax-object-rest-spread" "^7.8.3" "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-top-level-await" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" babel-preset-jest@^29.6.3: version "29.6.3" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz#fa05fa510e7d493896d7b0dd2033601c840f171c" + resolved "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz" integrity sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA== dependencies: babel-plugin-jest-hoist "^29.6.3" @@ -3185,27 +3413,32 @@ babel-preset-jest@^29.6.3: balanced-match@^1.0.0: version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -base64-js@^1.3.1: +base64-js@^1.3.0, base64-js@^1.3.1: version "1.5.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== before-after-hook@^2.2.0: version "2.2.3" - resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c" + resolved "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz" integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ== +bignumber.js@^9.0.0: + version "9.1.2" + resolved "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz" + integrity sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug== + binary-extensions@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + version "2.3.0" + resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz" + integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw== bl@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + resolved "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz" integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== dependencies: buffer "^5.5.0" @@ -3214,125 +3447,110 @@ bl@^4.1.0: bottleneck@^2.15.3: version "2.19.5" - resolved "https://registry.yarnpkg.com/bottleneck/-/bottleneck-2.19.5.tgz#5df0b90f59fd47656ebe63c78a98419205cadd91" + resolved "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz" integrity sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw== bowser@^2.11.0: version "2.11.0" - resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f" + resolved "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz" integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA== brace-expansion@^1.1.7: version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" -braces@^3.0.2, braces@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== +braces@^3.0.3, braces@~3.0.2: + version "3.0.3" + resolved "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz" + integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== dependencies: - fill-range "^7.0.1" + fill-range "^7.1.1" -browserslist@^4.21.3: - version "4.21.8" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.8.tgz#db2498e1f4b80ed199c076248a094935860b6017" - integrity sha512-j+7xYe+v+q2Id9qbBeCI8WX5NmZSRe8es1+0xntD/+gaWXznP8tFEkv5IgSaHf5dS1YwVMbX/4W6m937mj+wQw== +browserslist@^4.24.0: + version "4.24.2" + resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.24.2.tgz" + integrity sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg== dependencies: - caniuse-lite "^1.0.30001502" - electron-to-chromium "^1.4.428" - node-releases "^2.0.12" - update-browserslist-db "^1.0.11" + caniuse-lite "^1.0.30001669" + electron-to-chromium "^1.5.41" + node-releases "^2.0.18" + update-browserslist-db "^1.1.1" bs-logger@0.x: version "0.2.6" - resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" + resolved "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz" integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== dependencies: fast-json-stable-stringify "2.x" bser@2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + resolved "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz" integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== dependencies: node-int64 "^0.4.0" btoa-lite@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/btoa-lite/-/btoa-lite-1.0.0.tgz#337766da15801210fdd956c22e9c6891ab9d0337" + resolved "https://registry.npmjs.org/btoa-lite/-/btoa-lite-1.0.0.tgz" integrity sha512-gvW7InbIyF8AicrqWoptdW08pUxuhq8BEgowNajy9RhiE86fmGAGl+bLKo6oB8QP0CkqHLowfN0oJdKC/J6LbA== buffer-equal-constant-time@1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" + resolved "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz" integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA== buffer-from@^1.0.0: version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== buffer@^5.5.0: version "5.7.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + resolved "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== dependencies: base64-js "^1.3.1" ieee754 "^1.1.13" -call-bind@^1.0.0, call-bind@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" - integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== - dependencies: - function-bind "^1.1.1" - get-intrinsic "^1.0.2" - -call-bind@^1.0.4, call-bind@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513" - integrity sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ== +call-bind@^1.0.2, call-bind@^1.0.5, call-bind@^1.0.6, call-bind@^1.0.7: + version "1.0.7" + resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz" + integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" function-bind "^1.1.2" - get-intrinsic "^1.2.1" - set-function-length "^1.1.1" + get-intrinsic "^1.2.4" + set-function-length "^1.2.1" callsites@^3.0.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== camelcase@^5.3.1: version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== camelcase@^6.2.0: version "6.3.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + resolved "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== -caniuse-lite@^1.0.30001502: - version "1.0.30001502" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001502.tgz#f7e4a76eb1d2d585340f773767be1fefc118dca8" - integrity sha512-AZ+9tFXw1sS0o0jcpJQIXvFTOB/xGiQ4OQ2t98QX3NDn2EZTSRBC801gxrsGgViuq2ak/NLkNgSNEPtCr5lfKg== - -chalk@^2.0.0: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" +caniuse-lite@^1.0.30001669: + version "1.0.30001680" + resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001680.tgz" + integrity sha512-rPQy70G6AGUMnbwS1z6Xg+RkHYPAi18ihs47GH0jcxIG7wArmPgY3XbS2sRdBbxJljp3thdT8BIqv9ccCypiPA== chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== dependencies: ansi-styles "^4.1.0" @@ -3340,17 +3558,17 @@ chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: char-regex@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + resolved "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz" integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== chardet@^0.7.0: version "0.7.0" - resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" + resolved "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz" integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== chokidar@^3.4.2: version "3.5.3" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== dependencies: anymatch "~3.1.2" @@ -3364,40 +3582,40 @@ chokidar@^3.4.2: fsevents "~2.3.2" ci-info@^3.2.0: - version "3.8.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" - integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw== + version "3.9.0" + resolved "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz" + integrity sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ== cjs-module-lexer@^1.0.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz#6c370ab19f8a3394e318fe682686ec0ac684d107" - integrity sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ== + version "1.4.1" + resolved "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.1.tgz" + integrity sha512-cuSVIHi9/9E/+821Qjdvngor+xpnlwnuwIyZOaLmHBVdXL+gP+I6QQB9VkO7RI77YIcTV+S1W9AreJ5eN63JBA== clean-stack@^2.0.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + resolved "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz" integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== cli-cursor@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" + resolved "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz" integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== dependencies: restore-cursor "^3.1.0" cli-spinners@^2.5.0: - version "2.9.0" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.0.tgz#5881d0ad96381e117bbe07ad91f2008fe6ffd8db" - integrity sha512-4/aL9X3Wh0yiMQlE+eeRhWP6vclO3QRtw1JHKIT0FFUs5FjpFmESqtMvYZ0+lbzBw900b95mS0hohy+qn2VK/g== + version "2.9.2" + resolved "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz" + integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg== cli-width@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" + resolved "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz" integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== cliui@^8.0.1: version "8.0.1" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + resolved "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz" integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== dependencies: string-width "^4.2.0" @@ -3406,78 +3624,61 @@ cliui@^8.0.1: clone@^1.0.2: version "1.0.4" - resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + resolved "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz" integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== co@^4.6.0: version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + resolved "https://registry.npmjs.org/co/-/co-4.6.0.tgz" integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== collect-v8-coverage@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" - integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" + version "1.0.2" + resolved "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz" + integrity sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q== color-convert@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz" integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== dependencies: color-name "~1.1.4" -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - color-name@~1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== colors@1.2.3: version "1.2.3" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.2.3.tgz#1b152a9c4f6c9f74bc4bb96233ad0b7983b79744" + resolved "https://registry.npmjs.org/colors/-/colors-1.2.3.tgz" integrity sha512-qTfM2pNFeMZcLvf/RbrVAzDEVttZjFhaApfx9dplNjvHSX88Ui66zBRb/4YGob/xUWxDceirgoC1lT676asfCQ== combined-stream@^1.0.6, combined-stream@^1.0.8: version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" concat-map@0.0.1: version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== -convert-source-map@^1.6.0, convert-source-map@^1.7.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" - integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== - convert-source-map@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz" integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== cookie@^0.5.0: version "0.5.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + resolved "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz" integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== create-jest@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/create-jest/-/create-jest-29.7.0.tgz#a355c5b3cb1e1af02ba177fe7afd7feee49a5320" + resolved "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz" integrity sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q== dependencies: "@jest/types" "^29.6.3" @@ -3490,264 +3691,330 @@ create-jest@^29.7.0: create-require@^1.1.0: version "1.1.1" - resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + resolved "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz" integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== dependencies: path-key "^3.1.0" shebang-command "^2.0.0" which "^2.0.1" +data-view-buffer@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz" + integrity sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA== + dependencies: + call-bind "^1.0.6" + es-errors "^1.3.0" + is-data-view "^1.0.1" + +data-view-byte-length@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz" + integrity sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ== + dependencies: + call-bind "^1.0.7" + es-errors "^1.3.0" + is-data-view "^1.0.1" + +data-view-byte-offset@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz" + integrity sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA== + dependencies: + call-bind "^1.0.6" + es-errors "^1.3.0" + is-data-view "^1.0.1" + dateformat@3.0.3: version "3.0.3" - resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-3.0.3.tgz#a6e37499a4d9a9cf85ef5872044d62901c9889ae" + resolved "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz" integrity sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q== -debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" debug@^3.2.7: version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== dependencies: ms "^2.1.1" dedent@^1.0.0: - version "1.5.1" - resolved "https://registry.yarnpkg.com/dedent/-/dedent-1.5.1.tgz#4f3fc94c8b711e9bb2800d185cd6ad20f2a90aff" - integrity sha512-+LxW+KLWxu3HW3M2w2ympwtqPrqYRzU8fqi6Fhd18fBALe15blJPI/I4+UHveMVG6lJqB4JNd4UG0S5cnVHwIg== + version "1.5.3" + resolved "https://registry.npmjs.org/dedent/-/dedent-1.5.3.tgz" + integrity sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ== deep-is@^0.1.3: version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + resolved "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== deepmerge@^4.2.2: version "4.3.1" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + resolved "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz" integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== defaults@^1.0.3: version "1.0.4" - resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" + resolved "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz" integrity sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A== dependencies: clone "^1.0.2" -define-data-property@^1.0.1, define-data-property@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3" - integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== +define-data-property@^1.0.1, define-data-property@^1.1.4: + version "1.1.4" + resolved "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== dependencies: - get-intrinsic "^1.2.1" + es-define-property "^1.0.0" + es-errors "^1.3.0" gopd "^1.0.1" - has-property-descriptors "^1.0.0" define-lazy-prop@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + resolved "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz" integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== -define-properties@^1.1.3, define-properties@^1.1.4, define-properties@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" - integrity sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA== +define-properties@^1.2.0, define-properties@^1.2.1: + version "1.2.1" + resolved "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz" + integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== dependencies: + define-data-property "^1.0.1" has-property-descriptors "^1.0.0" object-keys "^1.1.1" delayed-stream@~1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== deprecation@^2.0.0, deprecation@^2.3.1: version "2.3.1" - resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919" + resolved "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz" integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ== detect-newline@^3.0.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + resolved "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz" integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== -diff-sequences@^29.4.3: - version "29.4.3" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.4.3.tgz#9314bc1fabe09267ffeca9cbafc457d8499a13f2" - integrity sha512-ofrBgwpPhCD85kMKtE9RYFFq6OC1A89oW2vvgWZNCwxrUpRUILopY7lsYyMDSjc8g6U6aiO0Qubg6r4Wgt5ZnA== - diff-sequences@^29.6.3: version "29.6.3" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.6.3.tgz#4deaf894d11407c51efc8418012f9e70b84ea921" + resolved "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz" integrity sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q== diff@^4.0.1: version "4.0.2" - resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + resolved "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== dir-glob@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== dependencies: path-type "^4.0.0" doctrine@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + resolved "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz" integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== dependencies: esutils "^2.0.2" doctrine@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + resolved "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz" integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== dependencies: esutils "^2.0.2" dotenv@^16.3.1: version "16.3.1" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" + resolved "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz" integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== -ecdsa-sig-formatter@1.0.11: +duplexify@^4.0.0, duplexify@^4.1.3: + version "4.1.3" + resolved "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz" + integrity sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA== + dependencies: + end-of-stream "^1.4.1" + inherits "^2.0.3" + readable-stream "^3.1.1" + stream-shift "^1.0.2" + +ecdsa-sig-formatter@1.0.11, ecdsa-sig-formatter@^1.0.11: version "1.0.11" - resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" + resolved "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz" integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== dependencies: safe-buffer "^5.0.1" -electron-to-chromium@^1.4.428: - version "1.4.428" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.428.tgz#c31fc88e854f49d8305cdabf6ec934ff1588a902" - integrity sha512-L7uUknyY286of0AYC8CKfgWstD0Smk2DvHDi9F0GWQhSH90Bzi7iDrmCbZKz75tYJxeGSAc7TYeKpmbjMDoh1w== +electron-to-chromium@^1.5.41: + version "1.5.63" + resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.63.tgz" + integrity sha512-ddeXKuY9BHo/mw145axlyWjlJ1UBt4WK3AlvkT7W2AbqfRQoacVoRUCF6wL3uIx/8wT9oLKXzI+rFqHHscByaA== emittery@^0.13.1: version "0.13.1" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.13.1.tgz#c04b8c3457490e0847ae51fced3af52d338e3dad" + resolved "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz" integrity sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ== emoji-regex@^8.0.0: version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== +end-of-stream@^1.4.1: + version "1.4.4" + resolved "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + error-ex@^1.3.1: version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + resolved "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" -es-abstract@^1.22.1: - version "1.22.3" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.22.3.tgz#48e79f5573198de6dee3589195727f4f74bc4f32" - integrity sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA== - dependencies: - array-buffer-byte-length "^1.0.0" - arraybuffer.prototype.slice "^1.0.2" - available-typed-arrays "^1.0.5" - call-bind "^1.0.5" - es-set-tostringtag "^2.0.1" +es-abstract@^1.22.1, es-abstract@^1.22.3, es-abstract@^1.23.0: + version "1.23.5" + resolved "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.5.tgz" + integrity sha512-vlmniQ0WNPwXqA0BnmwV3Ng7HxiGlh6r5U6JcTMNx8OilcAGqVJBHJcPjqOMaczU9fRuRK5Px2BdVyPRnKMMVQ== + dependencies: + array-buffer-byte-length "^1.0.1" + arraybuffer.prototype.slice "^1.0.3" + available-typed-arrays "^1.0.7" + call-bind "^1.0.7" + data-view-buffer "^1.0.1" + data-view-byte-length "^1.0.1" + data-view-byte-offset "^1.0.0" + es-define-property "^1.0.0" + es-errors "^1.3.0" + es-object-atoms "^1.0.0" + es-set-tostringtag "^2.0.3" es-to-primitive "^1.2.1" function.prototype.name "^1.1.6" - get-intrinsic "^1.2.2" - get-symbol-description "^1.0.0" - globalthis "^1.0.3" + get-intrinsic "^1.2.4" + get-symbol-description "^1.0.2" + globalthis "^1.0.4" gopd "^1.0.1" - has-property-descriptors "^1.0.0" - has-proto "^1.0.1" + has-property-descriptors "^1.0.2" + has-proto "^1.0.3" has-symbols "^1.0.3" - hasown "^2.0.0" - internal-slot "^1.0.5" - is-array-buffer "^3.0.2" + hasown "^2.0.2" + internal-slot "^1.0.7" + is-array-buffer "^3.0.4" is-callable "^1.2.7" - is-negative-zero "^2.0.2" + is-data-view "^1.0.1" + is-negative-zero "^2.0.3" is-regex "^1.1.4" - is-shared-array-buffer "^1.0.2" + is-shared-array-buffer "^1.0.3" is-string "^1.0.7" - is-typed-array "^1.1.12" + is-typed-array "^1.1.13" is-weakref "^1.0.2" - object-inspect "^1.13.1" + object-inspect "^1.13.3" object-keys "^1.1.1" - object.assign "^4.1.4" - regexp.prototype.flags "^1.5.1" - safe-array-concat "^1.0.1" - safe-regex-test "^1.0.0" - string.prototype.trim "^1.2.8" - string.prototype.trimend "^1.0.7" - string.prototype.trimstart "^1.0.7" - typed-array-buffer "^1.0.0" - typed-array-byte-length "^1.0.0" - typed-array-byte-offset "^1.0.0" - typed-array-length "^1.0.4" + object.assign "^4.1.5" + regexp.prototype.flags "^1.5.3" + safe-array-concat "^1.1.2" + safe-regex-test "^1.0.3" + string.prototype.trim "^1.2.9" + string.prototype.trimend "^1.0.8" + string.prototype.trimstart "^1.0.8" + typed-array-buffer "^1.0.2" + typed-array-byte-length "^1.0.1" + typed-array-byte-offset "^1.0.2" + typed-array-length "^1.0.6" unbox-primitive "^1.0.2" - which-typed-array "^1.1.13" + which-typed-array "^1.1.15" -es-set-tostringtag@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz#338d502f6f674301d710b80c8592de8a15f09cd8" - integrity sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg== +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== dependencies: - get-intrinsic "^1.1.3" - has "^1.0.3" - has-tostringtag "^1.0.0" + get-intrinsic "^1.2.4" -es-shim-unscopables@^1.0.0: +es-errors@^1.2.1, es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + +es-object-atoms@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" - integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + resolved "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz" + integrity sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw== dependencies: - has "^1.0.3" + es-errors "^1.3.0" + +es-set-tostringtag@^2.0.3: + version "2.0.3" + resolved "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz" + integrity sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ== + dependencies: + get-intrinsic "^1.2.4" + has-tostringtag "^1.0.2" + hasown "^2.0.1" + +es-shim-unscopables@^1.0.0: + version "1.0.2" + resolved "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz" + integrity sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw== + dependencies: + hasown "^2.0.0" es-to-primitive@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + resolved "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" -escalade@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== +escalade@^3.1.1, escalade@^3.2.0: + version "3.2.0" + resolved "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== escape-string-regexp@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== escape-string-regexp@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz" integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== escape-string-regexp@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== eslint-config-prettier@9.1.0: version "9.1.0" - resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz#31af3d94578645966c082fcb71a5846d3c94867f" + resolved "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz" integrity sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw== eslint-import-resolver-node@^0.3.9: version "0.3.9" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz#d4eaac52b8a2e7c3cd1903eb00f7e053356118ac" + resolved "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz" integrity sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g== dependencies: debug "^3.2.7" @@ -3756,14 +4023,14 @@ eslint-import-resolver-node@^0.3.9: eslint-module-utils@^2.8.0: version "2.8.0" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz#e439fee65fc33f6bba630ff621efc38ec0375c49" + resolved "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz" integrity sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw== dependencies: debug "^3.2.7" eslint-plugin-import@^2.29.1: version "2.29.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz#d45b37b5ef5901d639c15270d74d46d161150643" + resolved "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz" integrity sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw== dependencies: array-includes "^3.1.7" @@ -3786,21 +4053,21 @@ eslint-plugin-import@^2.29.1: eslint-plugin-jest@27.6.0: version "27.6.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-27.6.0.tgz#e5c0cf735b3c8cad0ef9db5b565b2fc99f5e55ed" + resolved "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-27.6.0.tgz" integrity sha512-MTlusnnDMChbElsszJvrwD1dN3x6nZl//s4JD23BxB6MgR66TZlL064su24xEIS3VACfAoHV1vgyMgPw8nkdng== dependencies: "@typescript-eslint/utils" "^5.10.0" eslint-plugin-sort-destructure-keys@^1.4.0: version "1.5.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-sort-destructure-keys/-/eslint-plugin-sort-destructure-keys-1.5.0.tgz#dc45ff119b6886d4e72d3e0ff8a528af83b89388" + resolved "https://registry.npmjs.org/eslint-plugin-sort-destructure-keys/-/eslint-plugin-sort-destructure-keys-1.5.0.tgz" integrity sha512-xGLyqHtbFXZNXQSvAiQ4ISBYokrbUywEhmaA50fKtSKgceCv5y3zjoNuZwcnajdM6q29Nxj+oXC9KcqfMsAPrg== dependencies: natural-compare-lite "^1.4.0" eslint-scope@^5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz" integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== dependencies: esrecurse "^4.3.0" @@ -3808,25 +4075,20 @@ eslint-scope@^5.1.1: eslint-scope@^7.2.2: version "7.2.2" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" + resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz" integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== dependencies: esrecurse "^4.3.0" estraverse "^5.2.0" -eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz#c22c48f48942d08ca824cc526211ae400478a994" - integrity sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA== - -eslint-visitor-keys@^3.4.3: +eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3: version "3.4.3" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" + resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz" integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== eslint@^8.56.0: version "8.56.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.56.0.tgz#4957ce8da409dc0809f99ab07a1b94832ab74b15" + resolved "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz" integrity sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ== dependencies: "@eslint-community/eslint-utils" "^4.2.0" @@ -3870,7 +4132,7 @@ eslint@^8.56.0: espree@^9.6.0, espree@^9.6.1: version "9.6.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f" + resolved "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz" integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ== dependencies: acorn "^8.9.0" @@ -3879,61 +4141,61 @@ espree@^9.6.0, espree@^9.6.1: esprima@^4.0.0: version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esquery@^1.4.2: version "1.5.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" + resolved "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz" integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg== dependencies: estraverse "^5.1.0" esrecurse@^4.3.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + resolved "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz" integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== dependencies: estraverse "^5.2.0" estraverse@^4.1.1: version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + resolved "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== estraverse@^5.1.0, estraverse@^5.2.0: version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz" integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== esutils@^2.0.2: version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== event-target-shim@^5.0.0: version "5.0.1" - resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" + resolved "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== events@^3.0.0: version "3.3.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== eventsource@^1.0.7: version "1.1.2" - resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.1.2.tgz#bc75ae1c60209e7cb1541231980460343eaea7c2" + resolved "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz" integrity sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA== eventsourcemock@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/eventsourcemock/-/eventsourcemock-2.0.0.tgz#83f66bc537e4909ef385bf84272e300737954ef0" + resolved "https://registry.npmjs.org/eventsourcemock/-/eventsourcemock-2.0.0.tgz" integrity sha512-tSmJnuE+h6A8/hLRg0usf1yL+Q8w01RQtmg0Uzgoxk/HIPZrIUeAr/A4es/8h1wNsoG8RdiESNQLTKiNwbSC3Q== execa@^5.0.0: version "5.1.1" - resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + resolved "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz" integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== dependencies: cross-spawn "^7.0.3" @@ -3948,23 +4210,12 @@ execa@^5.0.0: exit@^0.1.2: version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + resolved "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== -expect@^29.0.0: - version "29.5.0" - resolved "https://registry.yarnpkg.com/expect/-/expect-29.5.0.tgz#68c0509156cb2a0adb8865d413b137eeaae682f7" - integrity sha512-yM7xqUrCO2JdpFo4XpM82t+PJBFybdqoQuJLDGeDX2ij8NZzqRHyu3Hp188/JX7SWqud+7t4MUdvcgGBICMHZg== - dependencies: - "@jest/expect-utils" "^29.5.0" - jest-get-type "^29.4.3" - jest-matcher-utils "^29.5.0" - jest-message-util "^29.5.0" - jest-util "^29.5.0" - -expect@^29.7.0: +expect@^29.0.0, expect@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/expect/-/expect-29.7.0.tgz#578874590dcb3214514084c08115d8aee61e11bc" + resolved "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz" integrity sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw== dependencies: "@jest/expect-utils" "^29.7.0" @@ -3973,9 +4224,14 @@ expect@^29.7.0: jest-message-util "^29.7.0" jest-util "^29.7.0" +extend@^3.0.2: + version "3.0.2" + resolved "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + external-editor@^3.0.3: version "3.1.0" - resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" + resolved "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz" integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== dependencies: chardet "^0.7.0" @@ -3984,13 +4240,13 @@ external-editor@^3.0.3: fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== fast-glob@^3.2.9: - version "3.2.12" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" - integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== + version "3.3.2" + resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz" + integrity sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -4000,59 +4256,66 @@ fast-glob@^3.2.9: fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== fast-levenshtein@^2.0.6: version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== fast-xml-parser@4.2.5: version "4.2.5" - resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz#a6747a09296a6cb34f2ae634019bf1738f3b421f" + resolved "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz" integrity sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g== dependencies: strnum "^1.0.5" +fast-xml-parser@^4.4.1: + version "4.5.0" + resolved "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.0.tgz" + integrity sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg== + dependencies: + strnum "^1.0.5" + fastq@^1.6.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" - integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== + version "1.17.1" + resolved "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz" + integrity sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w== dependencies: reusify "^1.0.4" fb-watchman@^2.0.0: version "2.0.2" - resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + resolved "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz" integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== dependencies: bser "2.1.1" figures@^3.0.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" + resolved "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz" integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== dependencies: escape-string-regexp "^1.0.5" file-entry-cache@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + resolved "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz" integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== dependencies: flat-cache "^3.0.4" -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== +fill-range@^7.1.1: + version "7.1.1" + resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz" + integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== dependencies: to-regex-range "^5.0.1" find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== dependencies: locate-path "^5.0.0" @@ -4060,58 +4323,50 @@ find-up@^4.0.0, find-up@^4.1.0: find-up@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz" integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== dependencies: locate-path "^6.0.0" path-exists "^4.0.0" flat-cache@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + version "3.2.0" + resolved "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz" + integrity sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw== dependencies: - flatted "^3.1.0" + flatted "^3.2.9" + keyv "^4.5.3" rimraf "^3.0.2" -flatted@^3.1.0: - version "3.2.7" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" - integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== +flatted@^3.2.9: + version "3.3.2" + resolved "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz" + integrity sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA== follow-redirects@^1.15.0: version "1.15.2" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz" integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== for-each@^0.3.3: version "0.3.3" - resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" + resolved "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz" integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== dependencies: is-callable "^1.1.3" form-data@^2.5.0: version "2.5.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.1.tgz#f2cbec57b5e59e23716e128fe44d4e5dd23895f4" + resolved "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz" integrity sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" -form-data@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - form-data@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + resolved "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz" integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== dependencies: asynckit "^0.4.0" @@ -4120,27 +4375,22 @@ form-data@^4.0.0: fs.realpath@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== fsevents@^2.3.2, fsevents@~2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" - integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== - -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + version "2.3.3" + resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== function-bind@^1.1.2: version "1.1.2" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz" integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== function.prototype.name@^1.1.6: version "1.1.6" - resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.6.tgz#cdf315b7d90ee77a4c6ee216c3c3362da07533fd" + resolved "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz" integrity sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg== dependencies: call-bind "^1.0.2" @@ -4148,36 +4398,51 @@ function.prototype.name@^1.1.6: es-abstract "^1.22.1" functions-have-names "^1.2.3" +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz" + integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g== + functions-have-names@^1.2.3: version "1.2.3" - resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + resolved "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== +gaxios@^6.0.0, gaxios@^6.0.2, gaxios@^6.1.1: + version "6.7.1" + resolved "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz" + integrity sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ== + dependencies: + extend "^3.0.2" + https-proxy-agent "^7.0.1" + is-stream "^2.0.0" + node-fetch "^2.6.9" + uuid "^9.0.1" + +gcp-metadata@^6.1.0: + version "6.1.0" + resolved "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz" + integrity sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg== + dependencies: + gaxios "^6.0.0" + json-bigint "^1.0.0" + gensync@^1.0.0-beta.2: version "1.0.0-beta.2" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + resolved "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== get-caller-file@^2.0.5: version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + resolved "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== -get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.1.tgz#d295644fed4505fc9cde952c37ee12b477a83d82" - integrity sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw== - dependencies: - function-bind "^1.1.1" - has "^1.0.3" - has-proto "^1.0.1" - has-symbols "^1.0.3" - -get-intrinsic@^1.2.1, get-intrinsic@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.2.tgz#281b7622971123e1ef4b3c90fd7539306da93f3b" - integrity sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA== +get-intrinsic@^1.1.3, get-intrinsic@^1.2.1, get-intrinsic@^1.2.3, get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== dependencies: + es-errors "^1.3.0" function-bind "^1.1.2" has-proto "^1.0.1" has-symbols "^1.0.3" @@ -4185,39 +4450,40 @@ get-intrinsic@^1.2.1, get-intrinsic@^1.2.2: get-package-type@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + resolved "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== get-stream@^6.0.0: version "6.0.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== -get-symbol-description@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" - integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== +get-symbol-description@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz" + integrity sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg== dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.1" + call-bind "^1.0.5" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: is-glob "^4.0.1" glob-parent@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz" integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== dependencies: is-glob "^4.0.3" glob@^7.1.3, glob@^7.1.4: version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== dependencies: fs.realpath "^1.0.0" @@ -4229,26 +4495,27 @@ glob@^7.1.3, glob@^7.1.4: globals@^11.1.0: version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + resolved "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^13.19.0: - version "13.20.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.20.0.tgz#ea276a1e508ffd4f1612888f9d1bad1e2717bf82" - integrity sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ== + version "13.24.0" + resolved "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz" + integrity sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ== dependencies: type-fest "^0.20.2" -globalthis@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" - integrity sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA== +globalthis@^1.0.4: + version "1.0.4" + resolved "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz" + integrity sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ== dependencies: - define-properties "^1.1.3" + define-properties "^1.2.1" + gopd "^1.0.1" globby@^11.1.0: version "11.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz" integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== dependencies: array-union "^2.1.0" @@ -4258,94 +4525,132 @@ globby@^11.1.0: merge2 "^1.4.1" slash "^3.0.0" +google-auth-library@^9.3.0, google-auth-library@^9.6.3: + version "9.15.0" + resolved "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz" + integrity sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ== + dependencies: + base64-js "^1.3.0" + ecdsa-sig-formatter "^1.0.11" + gaxios "^6.1.1" + gcp-metadata "^6.1.0" + gtoken "^7.0.0" + jws "^4.0.0" + +google-gax@^4.3.3: + version "4.4.1" + resolved "https://registry.npmjs.org/google-gax/-/google-gax-4.4.1.tgz" + integrity sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg== + dependencies: + "@grpc/grpc-js" "^1.10.9" + "@grpc/proto-loader" "^0.7.13" + "@types/long" "^4.0.0" + abort-controller "^3.0.0" + duplexify "^4.0.0" + google-auth-library "^9.3.0" + node-fetch "^2.7.0" + object-hash "^3.0.0" + proto3-json-serializer "^2.0.2" + protobufjs "^7.3.2" + retry-request "^7.0.0" + uuid "^9.0.1" + gopd@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + resolved "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz" integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== dependencies: get-intrinsic "^1.1.3" graceful-fs@^4.2.9: version "4.2.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== graphemer@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" + resolved "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz" integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== graphql@^16.8.1: version "16.8.1" - resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.8.1.tgz#1930a965bef1170603702acdb68aedd3f3cf6f07" + resolved "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz" integrity sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw== +gtoken@^7.0.0: + version "7.1.0" + resolved "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz" + integrity sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw== + dependencies: + gaxios "^6.0.0" + jws "^4.0.0" + has-bigints@^1.0.1, has-bigints@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + resolved "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz" integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - has-flag@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== -has-property-descriptors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" - integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== +has-property-descriptors@^1.0.0, has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== dependencies: - get-intrinsic "^1.1.1" + es-define-property "^1.0.0" -has-proto@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" - integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== +has-proto@^1.0.1, has-proto@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz" + integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q== has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + resolved "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== -has-tostringtag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" - integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== - dependencies: - has-symbols "^1.0.2" - -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== +has-tostringtag@^1.0.0, has-tostringtag@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz" + integrity sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw== dependencies: - function-bind "^1.1.1" + has-symbols "^1.0.3" hasown@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c" + resolved "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz" integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA== dependencies: function-bind "^1.1.2" +hasown@^2.0.1, hasown@^2.0.2: + version "2.0.2" + resolved "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + headers-polyfill@^4.0.1: version "4.0.2" - resolved "https://registry.yarnpkg.com/headers-polyfill/-/headers-polyfill-4.0.2.tgz#9115a76eee3ce8fbf95b6e3c6bf82d936785b44a" + resolved "https://registry.npmjs.org/headers-polyfill/-/headers-polyfill-4.0.2.tgz" integrity sha512-EWGTfnTqAO2L/j5HZgoM/3z82L7necsJ0pO9Tp0X1wil3PDLrkypTBRgVO2ExehEEvUycejZD3FuRaXpZZc3kw== +html-entities@^2.5.2: + version "2.5.2" + resolved "https://registry.npmjs.org/html-entities/-/html-entities-2.5.2.tgz" + integrity sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA== + html-escaper@^2.0.0: version "2.0.2" - resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + resolved "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz" integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== http-proxy-agent@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz#5129800203520d434f142bc78ff3c170800f2b43" + resolved "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz" integrity sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w== dependencies: "@tootallnate/once" "2" @@ -4354,68 +4659,71 @@ http-proxy-agent@^5.0.0: https-proxy-agent@^5.0.0: version "5.0.1" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + resolved "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz" integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== dependencies: agent-base "6" debug "4" +https-proxy-agent@^7.0.1: + version "7.0.5" + resolved "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz" + integrity sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw== + dependencies: + agent-base "^7.0.2" + debug "4" + human-signals@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz" integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== iconv-lite@^0.4.24: version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" ieee754@^1.1.13: version "1.2.1" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== -ignore@^5.2.0: - version "5.2.4" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" - integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== - -ignore@^5.2.4: +ignore@^5.2.0, ignore@^5.2.4: version "5.3.0" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.0.tgz#67418ae40d34d6999c95ff56016759c718c82f78" + resolved "https://registry.npmjs.org/ignore/-/ignore-5.3.0.tgz" integrity sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg== import-fresh@^3.2.1: version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz" integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== dependencies: parent-module "^1.0.0" resolve-from "^4.0.0" import-local@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" - integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + version "3.2.0" + resolved "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz" + integrity sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA== dependencies: pkg-dir "^4.2.0" resolve-cwd "^3.0.0" imurmurhash@^0.1.4: version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + resolved "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz" integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== indent-string@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== inflight@^1.0.4: version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== dependencies: once "^1.3.0" @@ -4423,12 +4731,12 @@ inflight@^1.0.4: inherits@2, inherits@^2.0.3, inherits@^2.0.4: version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inquirer@^8.2.0: version "8.2.5" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.5.tgz#d8654a7542c35a9b9e069d27e2df4858784d54f8" + resolved "https://registry.npmjs.org/inquirer/-/inquirer-8.2.5.tgz" integrity sha512-QAgPDQMEgrDssk1XiwwHoOGYF9BAbUcc1+j+FhEvaOt8/cKRqyLn0U5qA6F74fGhTMGxf92pOvPBeh29jQJDTQ== dependencies: ansi-escapes "^4.2.1" @@ -4447,46 +4755,45 @@ inquirer@^8.2.0: through "^2.3.6" wrap-ansi "^7.0.0" -internal-slot@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.5.tgz#f2a2ee21f668f8627a4667f309dc0f4fb6674986" - integrity sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ== +internal-slot@^1.0.7: + version "1.0.7" + resolved "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz" + integrity sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g== dependencies: - get-intrinsic "^1.2.0" - has "^1.0.3" + es-errors "^1.3.0" + hasown "^2.0.0" side-channel "^1.0.4" -is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe" - integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w== +is-array-buffer@^3.0.4: + version "3.0.4" + resolved "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz" + integrity sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw== dependencies: call-bind "^1.0.2" - get-intrinsic "^1.2.0" - is-typed-array "^1.1.10" + get-intrinsic "^1.2.1" is-arrayish@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + resolved "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz" integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== is-bigint@^1.0.1: version "1.0.4" - resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + resolved "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz" integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== dependencies: has-bigints "^1.0.1" is-binary-path@~2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-boolean-object@^1.1.0: version "1.1.2" - resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + resolved "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz" integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== dependencies: call-bind "^1.0.2" @@ -4494,178 +4801,167 @@ is-boolean-object@^1.1.0: is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: version "1.2.7" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + resolved "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz" integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== -is-core-module@^2.11.0: - version "2.12.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.12.1.tgz#0c0b6885b6f80011c71541ce15c8d66cf5a4f9fd" - integrity sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg== - dependencies: - has "^1.0.3" - is-core-module@^2.13.0, is-core-module@^2.13.1: version "2.13.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" + resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz" integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== dependencies: hasown "^2.0.0" +is-data-view@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz" + integrity sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w== + dependencies: + is-typed-array "^1.1.13" + is-date-object@^1.0.1: version "1.0.5" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + resolved "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz" integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== dependencies: has-tostringtag "^1.0.0" is-docker@^2.0.0, is-docker@^2.1.1: version "2.2.1" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + resolved "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz" integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== is-extglob@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== is-fullwidth-code-point@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== is-generator-fn@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + resolved "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz" integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: is-extglob "^2.1.1" is-interactive@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + resolved "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz" integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== -is-negative-zero@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" - integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== +is-negative-zero@^2.0.3: + version "2.0.3" + resolved "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz" + integrity sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw== is-node-process@^1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/is-node-process/-/is-node-process-1.2.0.tgz#ea02a1b90ddb3934a19aea414e88edef7e11d134" + resolved "https://registry.npmjs.org/is-node-process/-/is-node-process-1.2.0.tgz" integrity sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw== is-number-object@^1.0.4: version "1.0.7" - resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + resolved "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz" integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== dependencies: has-tostringtag "^1.0.0" is-number@^7.0.0: version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-inside@^3.0.3: version "3.0.3" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + resolved "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz" integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== is-regex@^1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + resolved "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz" integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== dependencies: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-shared-array-buffer@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" - integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== +is-shared-array-buffer@^1.0.2, is-shared-array-buffer@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz" + integrity sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg== dependencies: - call-bind "^1.0.2" + call-bind "^1.0.7" is-stream@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz" integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== is-string@^1.0.5, is-string@^1.0.7: version "1.0.7" - resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + resolved "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz" integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== dependencies: has-tostringtag "^1.0.0" is-symbol@^1.0.2, is-symbol@^1.0.3: version "1.0.4" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + resolved "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz" integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== dependencies: has-symbols "^1.0.2" -is-typed-array@^1.1.10, is-typed-array@^1.1.9: - version "1.1.10" - resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.10.tgz#36a5b5cb4189b575d1a3e4b08536bfb485801e3f" - integrity sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A== - dependencies: - available-typed-arrays "^1.0.5" - call-bind "^1.0.2" - for-each "^0.3.3" - gopd "^1.0.1" - has-tostringtag "^1.0.0" - -is-typed-array@^1.1.12: - version "1.1.12" - resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a" - integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg== +is-typed-array@^1.1.13: + version "1.1.13" + resolved "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz" + integrity sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw== dependencies: - which-typed-array "^1.1.11" + which-typed-array "^1.1.14" is-unicode-supported@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" + resolved "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz" integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== is-weakref@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + resolved "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz" integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== dependencies: call-bind "^1.0.2" is-wsl@^2.2.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + resolved "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz" integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== dependencies: is-docker "^2.0.0" isarray@^2.0.5: version "2.0.5" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" + resolved "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz" integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== isexe@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" - integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + version "3.2.2" + resolved "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz" + integrity sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg== istanbul-lib-instrument@^5.0.4: version "5.2.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + resolved "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz" integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== dependencies: "@babel/core" "^7.12.3" @@ -4675,28 +4971,28 @@ istanbul-lib-instrument@^5.0.4: semver "^6.3.0" istanbul-lib-instrument@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.1.tgz#71e87707e8041428732518c6fb5211761753fbdf" - integrity sha512-EAMEJBsYuyyztxMxW3g7ugGPkrZsV57v0Hmv3mm1uQsmB+QnZuepg731CRaIgeUVSdmsTngOkSnauNF8p7FIhA== + version "6.0.3" + resolved "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz" + integrity sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q== dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" + "@babel/core" "^7.23.9" + "@babel/parser" "^7.23.9" + "@istanbuljs/schema" "^0.1.3" istanbul-lib-coverage "^3.2.0" semver "^7.5.4" istanbul-lib-report@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" - integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + version "3.0.1" + resolved "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz" + integrity sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw== dependencies: istanbul-lib-coverage "^3.0.0" - make-dir "^3.0.0" + make-dir "^4.0.0" supports-color "^7.1.0" istanbul-lib-source-maps@^4.0.0: version "4.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + resolved "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz" integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== dependencies: debug "^4.1.1" @@ -4704,16 +5000,16 @@ istanbul-lib-source-maps@^4.0.0: source-map "^0.6.1" istanbul-reports@^3.1.3: - version "3.1.5" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" - integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== + version "3.1.7" + resolved "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz" + integrity sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g== dependencies: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" jest-changed-files@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-29.7.0.tgz#1c06d07e77c78e1585d020424dedc10d6e17ac3a" + resolved "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz" integrity sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w== dependencies: execa "^5.0.0" @@ -4722,7 +5018,7 @@ jest-changed-files@^29.7.0: jest-circus@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-29.7.0.tgz#b6817a45fcc835d8b16d5962d0c026473ee3668a" + resolved "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz" integrity sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw== dependencies: "@jest/environment" "^29.7.0" @@ -4748,7 +5044,7 @@ jest-circus@^29.7.0: jest-cli@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-29.7.0.tgz#5592c940798e0cae677eec169264f2d839a37995" + resolved "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz" integrity sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg== dependencies: "@jest/core" "^29.7.0" @@ -4765,7 +5061,7 @@ jest-cli@^29.7.0: jest-config@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-29.7.0.tgz#bcbda8806dbcc01b1e316a46bb74085a84b0245f" + resolved "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz" integrity sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ== dependencies: "@babel/core" "^7.11.6" @@ -4791,19 +5087,9 @@ jest-config@^29.7.0: slash "^3.0.0" strip-json-comments "^3.1.1" -jest-diff@^29.5.0: - version "29.5.0" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.5.0.tgz#e0d83a58eb5451dcc1fa61b1c3ee4e8f5a290d63" - integrity sha512-LtxijLLZBduXnHSniy0WMdaHjmQnt3g5sa16W4p0HqukYTTsyTW3GD1q41TyGl5YFXj/5B2U6dlh5FM1LIMgxw== - dependencies: - chalk "^4.0.0" - diff-sequences "^29.4.3" - jest-get-type "^29.4.3" - pretty-format "^29.5.0" - jest-diff@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a" + resolved "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz" integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw== dependencies: chalk "^4.0.0" @@ -4813,14 +5099,14 @@ jest-diff@^29.7.0: jest-docblock@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-29.7.0.tgz#8fddb6adc3cdc955c93e2a87f61cfd350d5d119a" + resolved "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz" integrity sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g== dependencies: detect-newline "^3.0.0" jest-each@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-29.7.0.tgz#162a9b3f2328bdd991beaabffbb74745e56577d1" + resolved "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz" integrity sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ== dependencies: "@jest/types" "^29.6.3" @@ -4831,7 +5117,7 @@ jest-each@^29.7.0: jest-environment-node@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-29.7.0.tgz#0b93e111dda8ec120bc8300e6d1fb9576e164376" + resolved "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz" integrity sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw== dependencies: "@jest/environment" "^29.7.0" @@ -4841,19 +5127,14 @@ jest-environment-node@^29.7.0: jest-mock "^29.7.0" jest-util "^29.7.0" -jest-get-type@^29.4.3: - version "29.4.3" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.4.3.tgz#1ab7a5207c995161100b5187159ca82dd48b3dd5" - integrity sha512-J5Xez4nRRMjk8emnTpWrlkyb9pfRQQanDrvWHhsR1+VUfbwxi30eVcZFlcdGInRibU4G5LwHXpI7IRHU0CY+gg== - jest-get-type@^29.6.3: version "29.6.3" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.6.3.tgz#36f499fdcea197c1045a127319c0481723908fd1" + resolved "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz" integrity sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw== jest-haste-map@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-29.7.0.tgz#3c2396524482f5a0506376e6c858c3bbcc17b104" + resolved "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz" integrity sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA== dependencies: "@jest/types" "^29.6.3" @@ -4872,25 +5153,15 @@ jest-haste-map@^29.7.0: jest-leak-detector@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz#5b7ec0dadfdfec0ca383dc9aa016d36b5ea4c728" + resolved "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz" integrity sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw== dependencies: jest-get-type "^29.6.3" pretty-format "^29.7.0" -jest-matcher-utils@^29.5.0: - version "29.5.0" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.5.0.tgz#d957af7f8c0692c5453666705621ad4abc2c59c5" - integrity sha512-lecRtgm/rjIK0CQ7LPQwzCs2VwW6WAahA55YBuI+xqmhm7LAaxokSB8C97yJeYyT+HvQkH741StzpU41wohhWw== - dependencies: - chalk "^4.0.0" - jest-diff "^29.5.0" - jest-get-type "^29.4.3" - pretty-format "^29.5.0" - jest-matcher-utils@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz#ae8fec79ff249fd592ce80e3ee474e83a6c44f12" + resolved "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz" integrity sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g== dependencies: chalk "^4.0.0" @@ -4898,24 +5169,9 @@ jest-matcher-utils@^29.7.0: jest-get-type "^29.6.3" pretty-format "^29.7.0" -jest-message-util@^29.5.0: - version "29.5.0" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.5.0.tgz#1f776cac3aca332ab8dd2e3b41625435085c900e" - integrity sha512-Kijeg9Dag6CKtIDA7O21zNTACqD5MD/8HfIV8pdD94vFyFuer52SigdC3IQMhab3vACxXMiFk+yMHNdbqtyTGA== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^29.5.0" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^29.5.0" - slash "^3.0.0" - stack-utils "^2.0.3" - jest-message-util@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.7.0.tgz#8bc392e204e95dfe7564abbe72a404e28e51f7f3" + resolved "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz" integrity sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w== dependencies: "@babel/code-frame" "^7.12.13" @@ -4930,7 +5186,7 @@ jest-message-util@^29.7.0: jest-mock-axios@^4.7.3: version "4.7.3" - resolved "https://registry.yarnpkg.com/jest-mock-axios/-/jest-mock-axios-4.7.3.tgz#8b93cf651eae46d21aa969729a746d819ba6b9c6" + resolved "https://registry.npmjs.org/jest-mock-axios/-/jest-mock-axios-4.7.3.tgz" integrity sha512-RHHdCZWreeX1EAl77u46yqYJG5aKX9l4zsCwf6wsIb3uy3w/XaEC5n4wbyluNujXQSZfNH1ir8OXinoewYQkUw== dependencies: "@jest/globals" "^29.7.0" @@ -4939,7 +5195,7 @@ jest-mock-axios@^4.7.3: jest-mock@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-29.7.0.tgz#4e836cf60e99c6fcfabe9f99d017f3fdd50a6347" + resolved "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz" integrity sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw== dependencies: "@jest/types" "^29.6.3" @@ -4948,7 +5204,7 @@ jest-mock@^29.7.0: jest-nock@^0.2.2: version "0.2.2" - resolved "https://registry.yarnpkg.com/jest-nock/-/jest-nock-0.2.2.tgz#81970e881928e9e9e36cdd07c843cd6ec2a8572c" + resolved "https://registry.npmjs.org/jest-nock/-/jest-nock-0.2.2.tgz" integrity sha512-8L1Jf41PYF9JUiQZGe1kmNmQwLodi2W+am09lALOSiJ7+zLnQOQfPTGU1/DJ2lcLBg4Ok17CvQrGR7BTUe89xQ== dependencies: eventsource "^1.0.7" @@ -4958,17 +5214,17 @@ jest-nock@^0.2.2: jest-pnp-resolver@^1.2.2: version "1.2.3" - resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz#930b1546164d4ad5937d5540e711d4d38d4cad2e" + resolved "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz" integrity sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w== jest-regex-util@^29.6.3: version "29.6.3" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-29.6.3.tgz#4a556d9c776af68e1c5f48194f4d0327d24e8a52" + resolved "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz" integrity sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg== jest-resolve-dependencies@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz#1b04f2c095f37fc776ff40803dc92921b1e88428" + resolved "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz" integrity sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA== dependencies: jest-regex-util "^29.6.3" @@ -4976,7 +5232,7 @@ jest-resolve-dependencies@^29.7.0: jest-resolve@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-29.7.0.tgz#64d6a8992dd26f635ab0c01e5eef4399c6bcbc30" + resolved "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz" integrity sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA== dependencies: chalk "^4.0.0" @@ -4991,7 +5247,7 @@ jest-resolve@^29.7.0: jest-runner@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-29.7.0.tgz#809af072d408a53dcfd2e849a4c976d3132f718e" + resolved "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz" integrity sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ== dependencies: "@jest/console" "^29.7.0" @@ -5018,7 +5274,7 @@ jest-runner@^29.7.0: jest-runtime@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-29.7.0.tgz#efecb3141cf7d3767a3a0cc8f7c9990587d3d817" + resolved "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz" integrity sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ== dependencies: "@jest/environment" "^29.7.0" @@ -5046,7 +5302,7 @@ jest-runtime@^29.7.0: jest-snapshot@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-29.7.0.tgz#c2c574c3f51865da1bb329036778a69bf88a6be5" + resolved "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz" integrity sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw== dependencies: "@babel/core" "^7.11.6" @@ -5070,21 +5326,9 @@ jest-snapshot@^29.7.0: pretty-format "^29.7.0" semver "^7.5.3" -jest-util@^29.0.0, jest-util@^29.5.0: - version "29.5.0" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.5.0.tgz#24a4d3d92fc39ce90425311b23c27a6e0ef16b8f" - integrity sha512-RYMgG/MTadOr5t8KdhejfvUU82MxsCu5MF6KuDUHl+NuwzUt+Sm6jJWxTJVrDR1j5M/gJVCPKQEpWXY+yIQ6lQ== - dependencies: - "@jest/types" "^29.5.0" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-util@^29.7.0: +jest-util@^29.0.0, jest-util@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.7.0.tgz#23c2b62bfb22be82b44de98055802ff3710fc0bc" + resolved "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz" integrity sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA== dependencies: "@jest/types" "^29.6.3" @@ -5096,7 +5340,7 @@ jest-util@^29.7.0: jest-validate@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-29.7.0.tgz#7bf705511c64da591d46b15fce41400d52147d9c" + resolved "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz" integrity sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw== dependencies: "@jest/types" "^29.6.3" @@ -5108,7 +5352,7 @@ jest-validate@^29.7.0: jest-watcher@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-29.7.0.tgz#7810d30d619c3a62093223ce6bb359ca1b28a2f2" + resolved "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz" integrity sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g== dependencies: "@jest/test-result" "^29.7.0" @@ -5122,7 +5366,7 @@ jest-watcher@^29.7.0: jest-worker@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.7.0.tgz#acad073acbbaeb7262bd5389e1bcf43e10058d4a" + resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz" integrity sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw== dependencies: "@types/node" "*" @@ -5130,9 +5374,9 @@ jest-worker@^29.7.0: merge-stream "^2.0.0" supports-color "^8.0.0" -jest@29.7.0, jest@~29.7.0: +jest@^29.7.0, jest@~29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest/-/jest-29.7.0.tgz#994676fc24177f088f1c5e3737f5697204ff2613" + resolved "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz" integrity sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw== dependencies: "@jest/core" "^29.7.0" @@ -5142,17 +5386,17 @@ jest@29.7.0, jest@~29.7.0: js-levenshtein@^1.1.6: version "1.1.6" - resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" + resolved "https://registry.npmjs.org/js-levenshtein/-/js-levenshtein-1.1.6.tgz" integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== js-tokens@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-yaml@^3.13.1: version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== dependencies: argparse "^1.0.7" @@ -5160,56 +5404,68 @@ js-yaml@^3.13.1: js-yaml@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz" integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== dependencies: argparse "^2.0.1" jsbi@^3.1.3: version "3.2.5" - resolved "https://registry.yarnpkg.com/jsbi/-/jsbi-3.2.5.tgz#b37bb90e0e5c2814c1c2a1bcd8c729888a2e37d6" + resolved "https://registry.npmjs.org/jsbi/-/jsbi-3.2.5.tgz" integrity sha512-aBE4n43IPvjaddScbvWRA2YlTzKEynHzu7MqOyTipdHucf/VxS63ViCjxYRg86M8Rxwbt/GfzHl1kKERkt45fQ== -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== +jsesc@^3.0.2: + version "3.0.2" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz" + integrity sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g== + +json-bigint@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz" + integrity sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ== + dependencies: + bignumber.js "^9.0.0" + +json-buffer@3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz" + integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== json-parse-even-better-errors@^2.3.0: version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + resolved "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz" integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== json-schema-traverse@^0.4.1: version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + resolved "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz" integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== json-stringify-safe@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + resolved "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== json5@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + resolved "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz" integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== dependencies: minimist "^1.2.0" -json5@^2.2.2, json5@^2.2.3: +json5@^2.2.3: version "2.2.3" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" + resolved "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== jsonwebtoken@^9.0.0: version "9.0.0" - resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz#d0faf9ba1cc3a56255fe49c0961a67e520c1926d" + resolved "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz" integrity sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw== dependencies: jws "^3.2.2" @@ -5217,9 +5473,25 @@ jsonwebtoken@^9.0.0: ms "^2.1.1" semver "^7.3.8" +jsonwebtoken@^9.0.2: + version "9.0.2" + resolved "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz" + integrity sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ== + dependencies: + jws "^3.2.2" + lodash.includes "^4.3.0" + lodash.isboolean "^3.0.3" + lodash.isinteger "^4.0.4" + lodash.isnumber "^3.0.3" + lodash.isplainobject "^4.0.6" + lodash.isstring "^4.0.1" + lodash.once "^4.0.0" + ms "^2.1.1" + semver "^7.5.4" + jwa@^1.4.1: version "1.4.1" - resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" + resolved "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz" integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== dependencies: buffer-equal-constant-time "1.0.1" @@ -5228,7 +5500,7 @@ jwa@^1.4.1: jwa@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/jwa/-/jwa-2.0.0.tgz#a7e9c3f29dae94027ebcaf49975c9345593410fc" + resolved "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz" integrity sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA== dependencies: buffer-equal-constant-time "1.0.1" @@ -5237,7 +5509,7 @@ jwa@^2.0.0: jws@^3.2.2: version "3.2.2" - resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" + resolved "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz" integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== dependencies: jwa "^1.4.1" @@ -5245,25 +5517,32 @@ jws@^3.2.2: jws@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/jws/-/jws-4.0.0.tgz#2d4e8cf6a318ffaa12615e9dec7e86e6c97310f4" + resolved "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz" integrity sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg== dependencies: jwa "^2.0.0" safe-buffer "^5.0.1" +keyv@^4.5.3: + version "4.5.4" + resolved "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz" + integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== + dependencies: + json-buffer "3.0.1" + kleur@^3.0.3: version "3.0.3" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + resolved "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== leven@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + resolved "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz" integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== levn@^0.4.1: version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + resolved "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz" integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== dependencies: prelude-ls "^1.2.1" @@ -5271,158 +5550,203 @@ levn@^0.4.1: lines-and-columns@^1.1.6: version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz" integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== locate-path@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz" integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== dependencies: p-locate "^4.1.0" locate-path@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz" integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== dependencies: p-locate "^5.0.0" +lodash.camelcase@^4.3.0: + version "4.3.0" + resolved "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz" + integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== + +lodash.includes@^4.3.0: + version "4.3.0" + resolved "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz" + integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w== + +lodash.isboolean@^3.0.3: + version "3.0.3" + resolved "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz" + integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg== + +lodash.isinteger@^4.0.4: + version "4.0.4" + resolved "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz" + integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA== + +lodash.isnumber@^3.0.3: + version "3.0.3" + resolved "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz" + integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw== + +lodash.isplainobject@^4.0.6: + version "4.0.6" + resolved "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz" + integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== + +lodash.isstring@^4.0.1: + version "4.0.1" + resolved "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz" + integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw== + lodash.memoize@4.x: version "4.1.2" - resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + resolved "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz" integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== lodash.merge@^4.6.2: version "4.6.2" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + resolved "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== +lodash.once@^4.0.0: + version "4.1.1" + resolved "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz" + integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg== + lodash@^4.17.21: version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-symbols@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" + resolved "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz" integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== dependencies: chalk "^4.1.0" is-unicode-supported "^0.1.0" -lru-cache@^10.0.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.1.0.tgz#2098d41c2dc56500e6c88584aa656c84de7d0484" - integrity sha512-/1clY/ui8CzjKFyjdvwPWJUYKiFVXG2I2cY0ssG7h4+hwk+XOIX7ZSG9Q7TW8TW3Kp3BUSqgFWBLgL4PJ+Blag== +long@^5.0.0: + version "5.2.3" + resolved "https://registry.npmjs.org/long/-/long-5.2.3.tgz" + integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q== lru-cache@^5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" lru-cache@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" -make-dir@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== +"lru-cache@npm:@wolfy1339/lru-cache@^11.0.2-patch.1": + version "11.0.2-patch.1" + resolved "https://registry.npmjs.org/@wolfy1339/lru-cache/-/lru-cache-11.0.2-patch.1.tgz" + integrity sha512-BgYZfL2ADCXKOw2wJtkM3slhHotawWkgIRRxq4wEybnZQPjvAp71SPX35xepMykTw8gXlzWcWPTY31hlbnRsDA== + +make-dir@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz" + integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw== dependencies: - semver "^6.0.0" + semver "^7.5.3" make-error@1.x, make-error@^1.1.1: version "1.3.6" - resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + resolved "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== makeerror@1.0.12: version "1.0.12" - resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + resolved "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz" integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== dependencies: tmpl "1.0.5" merge-stream@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + resolved "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz" integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== merge2@^1.3.0, merge2@^1.4.1: version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + version "4.0.8" + resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz" + integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== dependencies: - braces "^3.0.2" + braces "^3.0.3" picomatch "^2.3.1" mime-db@1.52.0: version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== mime-types@^2.1.12: version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== dependencies: mime-db "1.52.0" +mime@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz" + integrity sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A== + mimic-fn@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: brace-expansion "^1.1.7" minimist@^1.2.0, minimist@^1.2.6: version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== mkdirp@^0.5.1: version "0.5.6" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz" integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== dependencies: minimist "^1.2.6" mnemonist@0.38.3: version "0.38.3" - resolved "https://registry.yarnpkg.com/mnemonist/-/mnemonist-0.38.3.tgz#35ec79c1c1f4357cfda2fe264659c2775ccd7d9d" + resolved "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.3.tgz" integrity sha512-2K9QYubXx/NAjv4VLq1d1Ly8pWNC5L3BrixtdkyTegXWJIqY+zLNDhhX/A+ZwWt70tB1S8H4BE8FLYEFyNoOBw== dependencies: obliterator "^1.6.1" -ms@2.1.2: +ms@2.1.2, ms@^2.1.1: version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@^2.1.1: - version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" - integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== - msw@^2.0.11: version "2.0.11" - resolved "https://registry.yarnpkg.com/msw/-/msw-2.0.11.tgz#f0a5878952a79bb5c0ef489bd01755170ed01adf" + resolved "https://registry.npmjs.org/msw/-/msw-2.0.11.tgz" integrity sha512-dAXFS2DxZX0uFqMPhS3oUAu8S/5IQ5qKKSwtXl3/dMTeML0C8JfSvbeWtowYg6pu4Iehgp5L/pHLrlIcG++y/A== dependencies: "@bundled-es-modules/cookie" "^2.0.0" @@ -5449,22 +5773,22 @@ msw@^2.0.11: mute-stream@0.0.8: version "0.0.8" - resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" + resolved "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz" integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== natural-compare-lite@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz#17b09581988979fddafe0201e931ba933c96cbb4" + resolved "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz" integrity sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g== natural-compare@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz" integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== ncc@^0.3.6: version "0.3.6" - resolved "https://registry.yarnpkg.com/ncc/-/ncc-0.3.6.tgz#11b47af973d7f2374af46bc0d8533366acc908eb" + resolved "https://registry.npmjs.org/ncc/-/ncc-0.3.6.tgz" integrity sha512-OXudTB2Ebt/FnOuDoPQbaa17+tdVqSOWA+gLfPxccWwsNED1uA2zEhpoB1hwdFC9yYbio/mdV5cvOtQI3Zrx1w== dependencies: mkdirp "^0.5.1" @@ -5472,19 +5796,9 @@ ncc@^0.3.6: tracer "^0.8.7" ws "^2.3.1" -nock@^13.0.4: - version "13.3.1" - resolved "https://registry.yarnpkg.com/nock/-/nock-13.3.1.tgz#f22d4d661f7a05ebd9368edae1b5dc0a62d758fc" - integrity sha512-vHnopocZuI93p2ccivFyGuUfzjq2fxNyNurp7816mlT5V5HF4SzXu8lvLrVzBbNqzs+ODooZ6OksuSUNM7Njkw== - dependencies: - debug "^4.1.0" - json-stringify-safe "^5.0.1" - lodash "^4.17.21" - propagate "^2.0.0" - -nock@^13.4.0: +nock@^13.0.4, nock@^13.4.0: version "13.4.0" - resolved "https://registry.yarnpkg.com/nock/-/nock-13.4.0.tgz#60aa3f7a4afa9c12052e74d8fb7550f682ef0115" + resolved "https://registry.npmjs.org/nock/-/nock-13.4.0.tgz" integrity sha512-W8NVHjO/LCTNA64yxAPHV/K47LpGYcVzgKd3Q0n6owhwvD0Dgoterc25R4rnZbckJEb6Loxz1f5QMuJpJnbSyQ== dependencies: debug "^4.1.0" @@ -5493,66 +5807,66 @@ nock@^13.4.0: node-abort-controller@^3.0.0: version "3.1.1" - resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.1.1.tgz#a94377e964a9a37ac3976d848cb5c765833b8548" + resolved "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz" integrity sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ== -node-fetch@^2.6.7: +node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0: version "2.7.0" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz" integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== dependencies: whatwg-url "^5.0.0" node-int64@^0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + resolved "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz" integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== -node-releases@^2.0.12: - version "2.0.12" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.12.tgz#35627cc224a23bfb06fb3380f2b3afaaa7eb1039" - integrity sha512-QzsYKWhXTWx8h1kIvqfnC++o0pEmpRQA/aenALsL2F4pqNVr7YzcdMlDij5WBnwftRbJCNJL/O7zdKaxKPHqgQ== +node-releases@^2.0.18: + version "2.0.18" + resolved "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz" + integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g== normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== npm-run-path@^4.0.1: version "4.0.1" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + resolved "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz" integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== dependencies: path-key "^3.0.0" -object-inspect@^1.13.1: - version "1.13.1" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2" - integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ== +object-hash@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz" + integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== -object-inspect@^1.9.0: - version "1.12.3" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" - integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== +object-inspect@^1.13.1, object-inspect@^1.13.3: + version "1.13.3" + resolved "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz" + integrity sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA== object-keys@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + resolved "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== -object.assign@^4.1.4: - version "4.1.4" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" - integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== +object.assign@^4.1.5: + version "4.1.5" + resolved "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz" + integrity sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ== dependencies: - call-bind "^1.0.2" - define-properties "^1.1.4" + call-bind "^1.0.5" + define-properties "^1.2.1" has-symbols "^1.0.3" object-keys "^1.1.1" object.fromentries@^2.0.7: version "2.0.7" - resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.7.tgz#71e95f441e9a0ea6baf682ecaaf37fa2a8d7e616" + resolved "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.7.tgz" integrity sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA== dependencies: call-bind "^1.0.2" @@ -5561,7 +5875,7 @@ object.fromentries@^2.0.7: object.groupby@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/object.groupby/-/object.groupby-1.0.1.tgz#d41d9f3c8d6c778d9cbac86b4ee9f5af103152ee" + resolved "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.1.tgz" integrity sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ== dependencies: call-bind "^1.0.2" @@ -5571,7 +5885,7 @@ object.groupby@^1.0.1: object.values@^1.1.7: version "1.1.7" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.7.tgz#617ed13272e7e1071b43973aa1655d9291b8442a" + resolved "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz" integrity sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng== dependencies: call-bind "^1.0.2" @@ -5580,12 +5894,12 @@ object.values@^1.1.7: obliterator@^1.6.1: version "1.6.1" - resolved "https://registry.yarnpkg.com/obliterator/-/obliterator-1.6.1.tgz#dea03e8ab821f6c4d96a299e17aef6a3af994ef3" + resolved "https://registry.npmjs.org/obliterator/-/obliterator-1.6.1.tgz" integrity sha512-9WXswnqINnnhOG/5SLimUlzuU1hFJUc8zkwyD59Sd+dPOMf05PmnYG/d6Q7HZ+KmgkZJa1PxRso6QdM3sTNHig== octokit@^3.1.2: version "3.1.2" - resolved "https://registry.yarnpkg.com/octokit/-/octokit-3.1.2.tgz#e574e4f2f5f8712e10412ce81fb56a74c93d4cfa" + resolved "https://registry.npmjs.org/octokit/-/octokit-3.1.2.tgz" integrity sha512-MG5qmrTL5y8KYwFgE1A4JWmgfQBaIETE/lOlfwNYx1QOtCQHGVxkRJmdUJltFc1HVn73d61TlMhMyNTOtMl+ng== dependencies: "@octokit/app" "^14.0.2" @@ -5601,21 +5915,21 @@ octokit@^3.1.2: once@^1.3.0, once@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== dependencies: wrappy "1" onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + resolved "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz" integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== dependencies: mimic-fn "^2.1.0" open@^8.0.0: version "8.4.2" - resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" + resolved "https://registry.npmjs.org/open/-/open-8.4.2.tgz" integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== dependencies: define-lazy-prop "^2.0.0" @@ -5624,7 +5938,7 @@ open@^8.0.0: optionator@^0.9.3: version "0.9.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.3.tgz#007397d44ed1872fdc6ed31360190f81814e2c64" + resolved "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz" integrity sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg== dependencies: "@aashutoshrathi/word-wrap" "^1.2.3" @@ -5636,7 +5950,7 @@ optionator@^0.9.3: ora@^5.4.1: version "5.4.1" - resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + resolved "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz" integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== dependencies: bl "^4.1.0" @@ -5651,57 +5965,57 @@ ora@^5.4.1: os-tmpdir@~1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + resolved "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz" integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g== outvariant@^1.2.1, outvariant@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/outvariant/-/outvariant-1.4.0.tgz#e742e4bda77692da3eca698ef5bfac62d9fba06e" - integrity sha512-AlWY719RF02ujitly7Kk/0QlV+pXGFDHrHf9O2OKqyqgBieaPOIeuSkL8sRK6j2WK+/ZAURq2kZsY0d8JapUiw== + version "1.4.3" + resolved "https://registry.npmjs.org/outvariant/-/outvariant-1.4.3.tgz" + integrity sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA== p-limit@^2.2.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== dependencies: p-try "^2.0.0" -p-limit@^3.0.2, p-limit@^3.1.0: +p-limit@^3.0.1, p-limit@^3.0.2, p-limit@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz" integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== dependencies: yocto-queue "^0.1.0" p-locate@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz" integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== dependencies: p-limit "^2.2.0" p-locate@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz" integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== dependencies: p-limit "^3.0.2" p-try@^2.0.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== parent-module@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== dependencies: callsites "^3.0.0" parse-json@^5.2.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + resolved "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz" integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== dependencies: "@babel/code-frame" "^7.0.0" @@ -5711,78 +6025,74 @@ parse-json@^5.2.0: path-exists@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz" integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== path-is-absolute@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== path-parse@^1.0.7: version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@^6.2.0: version "6.2.1" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.1.tgz#d54934d6798eb9e5ef14e7af7962c945906918e5" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.1.tgz" integrity sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw== path-type@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== -picocolors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== +picocolors@^1.0.0, picocolors@^1.1.0: + version "1.1.1" + resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz" + integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== pirates@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" - integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + version "4.0.6" + resolved "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz" + integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== pkg-dir@^4.2.0: version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz" integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== dependencies: find-up "^4.0.0" +possible-typed-array-names@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz" + integrity sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q== + prelude-ls@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz" integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== prettier@3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.1.1.tgz#6ba9f23165d690b6cbdaa88cb0807278f7019848" + resolved "https://registry.npmjs.org/prettier/-/prettier-3.1.1.tgz" integrity sha512-22UbSzg8luF4UuZtzgiUOfcGM8s4tjBv6dJRT7j275NXsy2jb4aJa4NNveul5x4eqlF1wuhuR2RElK71RvmVaw== -pretty-format@^29.0.0, pretty-format@^29.5.0: - version "29.5.0" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.5.0.tgz#283134e74f70e2e3e7229336de0e4fce94ccde5a" - integrity sha512-V2mGkI31qdttvTFX7Mt4efOqHXqJWMu4/r66Xh3Z3BwZaPfPJgp6/gbwoujRpPUtfEF6AUUWx3Jim3GCw5g/Qw== - dependencies: - "@jest/schemas" "^29.4.3" - ansi-styles "^5.0.0" - react-is "^18.0.0" - -pretty-format@^29.7.0: +pretty-format@^29.0.0, pretty-format@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" + resolved "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz" integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ== dependencies: "@jest/schemas" "^29.6.3" @@ -5791,17 +6101,12 @@ pretty-format@^29.7.0: priorityqueuejs@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/priorityqueuejs/-/priorityqueuejs-1.0.0.tgz#2ee4f23c2560913e08c07ce5ccdd6de3df2c5af8" + resolved "https://registry.npmjs.org/priorityqueuejs/-/priorityqueuejs-1.0.0.tgz" integrity sha512-lg++21mreCEOuGWTbO5DnJKAdxfjrdN0S9ysoW9SzdSJvbkWpkaDdpG/cdsPCsEnoLUwmd9m3WcZhngW7yKA2g== -process@^0.11.10: - version "0.11.10" - resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" - integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A== - prompts@^2.0.1: version "2.4.2" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz" integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== dependencies: kleur "^3.0.3" @@ -5809,37 +6114,62 @@ prompts@^2.0.1: propagate@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/propagate/-/propagate-2.0.1.tgz#40cdedab18085c792334e64f0ac17256d38f9a45" + resolved "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz" integrity sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag== +proto3-json-serializer@^2.0.2: + version "2.0.2" + resolved "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-2.0.2.tgz" + integrity sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ== + dependencies: + protobufjs "^7.2.5" + +protobufjs@^7.2.5, protobufjs@^7.2.6, protobufjs@^7.3.2: + version "7.4.0" + resolved "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz" + integrity sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw== + dependencies: + "@protobufjs/aspromise" "^1.1.2" + "@protobufjs/base64" "^1.1.2" + "@protobufjs/codegen" "^2.0.4" + "@protobufjs/eventemitter" "^1.1.0" + "@protobufjs/fetch" "^1.1.0" + "@protobufjs/float" "^1.0.2" + "@protobufjs/inquire" "^1.1.0" + "@protobufjs/path" "^1.1.2" + "@protobufjs/pool" "^1.1.0" + "@protobufjs/utf8" "^1.1.0" + "@types/node" ">=13.7.0" + long "^5.0.0" + proxy-from-env@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + resolved "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz" integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== punycode@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" - integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== + version "2.3.1" + resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== pure-rand@^6.0.0: - version "6.0.2" - resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-6.0.2.tgz#a9c2ddcae9b68d736a8163036f088a2781c8b306" - integrity sha512-6Yg0ekpKICSjPswYOuC5sku/TSWaRYlA0qsXqJgM/d/4pLPHPuTxK7Nbf7jFKzAeedUhR8C7K9Uv63FBsSo8xQ== + version "6.1.0" + resolved "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz" + integrity sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA== queue-microtask@^1.2.2: version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== react-is@^18.0.0: - version "18.2.0" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" - integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== + version "18.3.1" + resolved "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz" + integrity sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg== -readable-stream@^3.4.0: +readable-stream@^3.1.1, readable-stream@^3.4.0: version "3.6.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz" integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== dependencies: inherits "^2.0.3" @@ -5848,59 +6178,51 @@ readable-stream@^3.4.0: readdirp@~3.6.0: version "3.6.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== dependencies: picomatch "^2.2.1" -regexp.prototype.flags@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz#90ce989138db209f81492edd734183ce99f9677e" - integrity sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg== +regexp.prototype.flags@^1.5.3: + version "1.5.3" + resolved "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.3.tgz" + integrity sha512-vqlC04+RQoFalODCbCumG2xIOvapzVMHwsyIGM/SIE8fRhFFsXeH8/QQ+s0T0kDAhKc4k30s73/0ydkHQz6HlQ== dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - set-function-name "^2.0.0" + call-bind "^1.0.7" + define-properties "^1.2.1" + es-errors "^1.3.0" + set-function-name "^2.0.2" require-directory@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + resolved "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz" integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== resolve-cwd@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + resolved "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz" integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== dependencies: resolve-from "^5.0.0" resolve-from@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== resolve-from@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== resolve.exports@^2.0.0: version "2.0.2" - resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-2.0.2.tgz#f8c934b8e6a13f539e38b7098e2e36134f01e800" + resolved "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.2.tgz" integrity sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg== -resolve@^1.20.0: - version "1.22.2" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.2.tgz#0ed0943d4e301867955766c9f3e1ae6d01c6845f" - integrity sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g== - dependencies: - is-core-module "^2.11.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -resolve@^1.22.4: +resolve@^1.20.0, resolve@^1.22.4: version "1.22.8" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" + resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz" integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== dependencies: is-core-module "^2.13.0" @@ -5909,176 +6231,192 @@ resolve@^1.22.4: restore-cursor@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" + resolved "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz" integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== dependencies: onetime "^5.1.0" signal-exit "^3.0.2" +retry-request@^7.0.0: + version "7.0.2" + resolved "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz" + integrity sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w== + dependencies: + "@types/request" "^2.48.8" + extend "^3.0.2" + teeny-request "^9.0.0" + +retry@0.13.1: + version "0.13.1" + resolved "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + reusify@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== rimraf@^2.6.1: version "2.7.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + resolved "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" rimraf@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + resolved "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz" integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== dependencies: glob "^7.1.3" run-async@^2.4.0: version "2.4.1" - resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" + resolved "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz" integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== run-parallel@^1.1.9: version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== dependencies: queue-microtask "^1.2.2" rxjs@^7.5.5: version "7.8.1" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" + resolved "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz" integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== dependencies: tslib "^2.1.0" -safe-array-concat@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.0.1.tgz#91686a63ce3adbea14d61b14c99572a8ff84754c" - integrity sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q== +safe-array-concat@^1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz" + integrity sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q== dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.2.1" + call-bind "^1.0.7" + get-intrinsic "^1.2.4" has-symbols "^1.0.3" isarray "^2.0.5" safe-buffer@^5.0.1, safe-buffer@~5.2.0: version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-buffer@~5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.0.1.tgz#d263ca54696cd8a306b5ca6551e92de57918fbe7" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.0.1.tgz" integrity sha512-cr7dZWLwOeaFBLTIuZeYdkfO7UzGIKhjYENJFAxUOMKWGaWDm2nJM2rzxNRm5Owu0DH3ApwNo6kx5idXZfb/Iw== -safe-regex-test@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" - integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== +safe-regex-test@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz" + integrity sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw== dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.3" + call-bind "^1.0.6" + es-errors "^1.3.0" is-regex "^1.1.4" "safer-buffer@>= 2.1.2 < 3": version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@>=0.6.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.3.0.tgz#a5dbe77db3be05c9d1ee7785dbd3ea9de51593d0" - integrity sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA== + version "1.4.1" + resolved "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz" + integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg== semaphore@^1.0.5: version "1.1.0" - resolved "https://registry.yarnpkg.com/semaphore/-/semaphore-1.1.0.tgz#aaad8b86b20fe8e9b32b16dc2ee682a8cd26a8aa" + resolved "https://registry.npmjs.org/semaphore/-/semaphore-1.1.0.tgz" integrity sha512-O4OZEaNtkMd/K0i6js9SL+gqy0ZCBMgUvlSqHKi4IBdjhe7wB8pwztUk1BbZ1fmrvpwFrPbHzqd2w5pTcJH6LA== -semver@^6.0.0, semver@^6.1.0, semver@^6.3.0: +semver@^6.1.0, semver@^6.3.0: version "6.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== semver@^6.3.1: version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.7, semver@^7.3.8: - version "7.5.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.1.tgz#c90c4d631cf74720e46b21c1d37ea07edfab91ec" - integrity sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw== - dependencies: - lru-cache "^6.0.0" +semver@^7.3.7, semver@^7.3.8, semver@^7.5.3: + version "7.6.3" + resolved "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz" + integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== -semver@^7.5.3, semver@^7.5.4: +semver@^7.5.4: version "7.5.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" + resolved "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" -set-function-length@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.1.1.tgz#4bc39fafb0307224a33e106a7d35ca1218d659ed" - integrity sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ== +set-function-length@^1.2.1: + version "1.2.2" + resolved "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz" + integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== dependencies: - define-data-property "^1.1.1" - get-intrinsic "^1.2.1" + define-data-property "^1.1.4" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" gopd "^1.0.1" - has-property-descriptors "^1.0.0" + has-property-descriptors "^1.0.2" -set-function-name@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/set-function-name/-/set-function-name-2.0.1.tgz#12ce38b7954310b9f61faa12701620a0c882793a" - integrity sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA== +set-function-name@^2.0.2: + version "2.0.2" + resolved "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz" + integrity sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ== dependencies: - define-data-property "^1.0.1" + define-data-property "^1.1.4" + es-errors "^1.3.0" functions-have-names "^1.2.3" - has-property-descriptors "^1.0.0" + has-property-descriptors "^1.0.2" shebang-command@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== dependencies: shebang-regex "^3.0.0" shebang-regex@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== side-channel@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + version "1.0.6" + resolved "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz" + integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" + call-bind "^1.0.7" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + object-inspect "^1.13.1" signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: version "3.0.7" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== sisteransi@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + resolved "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz" integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== slash@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== source-map-support@0.5.13: version "0.5.13" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" + resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz" integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== dependencies: buffer-from "^1.0.0" @@ -6086,39 +6424,51 @@ source-map-support@0.5.13: source-map@^0.6.0, source-map@^0.6.1: version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== sprintf-js@~1.0.2: version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== stack-utils@^2.0.3: version "2.0.6" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" + resolved "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz" integrity sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ== dependencies: escape-string-regexp "^2.0.0" statuses@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== stoppable@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/stoppable/-/stoppable-1.1.0.tgz#32da568e83ea488b08e4d7ea2c3bcc9d75015d5b" + resolved "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz" integrity sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw== +stream-events@^1.0.5: + version "1.0.5" + resolved "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz" + integrity sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg== + dependencies: + stubs "^3.0.0" + +stream-shift@^1.0.2: + version "1.0.3" + resolved "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz" + integrity sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ== + strict-event-emitter@^0.5.0, strict-event-emitter@^0.5.1: version "0.5.1" - resolved "https://registry.yarnpkg.com/strict-event-emitter/-/strict-event-emitter-0.5.1.tgz#1602ece81c51574ca39c6815e09f1a3e8550bd93" + resolved "https://registry.npmjs.org/strict-event-emitter/-/strict-event-emitter-0.5.1.tgz" integrity sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ== string-length@^4.0.1: version "4.0.2" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + resolved "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz" integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== dependencies: char-regex "^1.0.2" @@ -6126,113 +6476,123 @@ string-length@^4.0.1: string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== dependencies: emoji-regex "^8.0.0" is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string.prototype.trim@^1.2.8: - version "1.2.8" - resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz#f9ac6f8af4bd55ddfa8895e6aea92a96395393bd" - integrity sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ== +string.prototype.trim@^1.2.9: + version "1.2.9" + resolved "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz" + integrity sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw== dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.0" + es-object-atoms "^1.0.0" -string.prototype.trimend@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz#1bb3afc5008661d73e2dc015cd4853732d6c471e" - integrity sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA== +string.prototype.trimend@^1.0.8: + version "1.0.8" + resolved "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz" + integrity sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ== dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" + call-bind "^1.0.7" + define-properties "^1.2.1" + es-object-atoms "^1.0.0" -string.prototype.trimstart@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz#d4cdb44b83a4737ffbac2d406e405d43d0184298" - integrity sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg== +string.prototype.trimstart@^1.0.8: + version "1.0.8" + resolved "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz" + integrity sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg== dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" + call-bind "^1.0.7" + define-properties "^1.2.1" + es-object-atoms "^1.0.0" string_decoder@^1.1.1: version "1.3.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== dependencies: ansi-regex "^5.0.1" strip-bom@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz" integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== strip-bom@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz" integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== strip-final-newline@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + resolved "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz" integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== strip-json-comments@^3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== strnum@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.0.5.tgz#5c4e829fe15ad4ff0d20c3db5ac97b73c9b072db" + resolved "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz" integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA== -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" +stubs@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz" + integrity sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw== supports-color@^7.1.0: version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz" integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== dependencies: has-flag "^4.0.0" supports-color@^8.0.0: version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz" integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== dependencies: has-flag "^4.0.0" supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== synchronous-promise@^2.0.17: version "2.0.17" - resolved "https://registry.yarnpkg.com/synchronous-promise/-/synchronous-promise-2.0.17.tgz#38901319632f946c982152586f2caf8ddc25c032" + resolved "https://registry.npmjs.org/synchronous-promise/-/synchronous-promise-2.0.17.tgz" integrity sha512-AsS729u2RHUfEra9xJrE39peJcc2stq2+poBXX8bcM08Y6g9j/i/PUzwNQqkaJde7Ntg1TO7bSREbR5sdosQ+g== +teeny-request@^9.0.0: + version "9.0.0" + resolved "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz" + integrity sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g== + dependencies: + http-proxy-agent "^5.0.0" + https-proxy-agent "^5.0.0" + node-fetch "^2.6.9" + stream-events "^1.0.5" + uuid "^9.0.0" + test-exclude@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + resolved "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz" integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== dependencies: "@istanbuljs/schema" "^0.1.2" @@ -6241,51 +6601,46 @@ test-exclude@^6.0.0: text-table@^0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + resolved "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz" integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== through@^2.3.6: version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz" integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== tinytim@0.1.1: version "0.1.1" - resolved "https://registry.yarnpkg.com/tinytim/-/tinytim-0.1.1.tgz#c968a1e5559ad9553224ef7627bab34e3caef8a8" + resolved "https://registry.npmjs.org/tinytim/-/tinytim-0.1.1.tgz" integrity sha512-NIpsp9lBIxPNzB++HnMmUd4byzJSVbbO4F+As1Gb1IG/YQT5QvmBDjpx8SpDS8fhGC+t+Qw8ldQgbcAIaU+2cA== tmp@^0.0.33: version "0.0.33" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + resolved "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz" integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" tmpl@1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + resolved "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz" integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== - to-regex-range@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" tr46@~0.0.3: version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz" integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== tracer@^0.8.7: version "0.8.15" - resolved "https://registry.yarnpkg.com/tracer/-/tracer-0.8.15.tgz#c4bb5b8c788ed3d7106c081288e22fd5a5abc474" + resolved "https://registry.npmjs.org/tracer/-/tracer-0.8.15.tgz" integrity sha512-ZQzlhd6zZFIpAhACiZkxLjl65XqVwi8t8UEBVGRIHAQN6nj55ftJWiFell+WSqWCP/vEycrIbUSuiyMwul+TFw== dependencies: colors "1.2.3" @@ -6295,12 +6650,12 @@ tracer@^0.8.7: ts-api-utils@^1.0.1: version "1.0.3" - resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.0.3.tgz#f12c1c781d04427313dbac808f453f050e54a331" + resolved "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.0.3.tgz" integrity sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg== -ts-jest@29.1.1: +ts-jest@^29.1.1: version "29.1.1" - resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-29.1.1.tgz#f58fe62c63caf7bfcc5cc6472082f79180f0815b" + resolved "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.1.tgz" integrity sha512-D6xjnnbP17cC85nliwGiL+tpoKN0StpgE0TeOjXQTU6MVCfsB4v7aW05CgQ/1OywGb0x/oy9hHFnN+sczTiRaA== dependencies: bs-logger "0.x" @@ -6314,7 +6669,7 @@ ts-jest@29.1.1: ts-node@^10.9.2: version "10.9.2" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f" + resolved "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz" integrity sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ== dependencies: "@cspotcode/source-map-support" "^0.8.0" @@ -6333,7 +6688,7 @@ ts-node@^10.9.2: tsconfig-paths@^3.15.0: version "3.15.0" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4" + resolved "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz" integrity sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg== dependencies: "@types/json5" "^0.0.29" @@ -6343,96 +6698,96 @@ tsconfig-paths@^3.15.0: tslib@^1.10.0, tslib@^1.11.1, tslib@^1.8.1: version "1.14.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + resolved "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.1.0, tslib@^2.3.1, tslib@^2.5.0: - version "2.5.3" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.3.tgz#24944ba2d990940e6e982c4bea147aba80209913" - integrity sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w== - -tslib@^2.2.0, tslib@^2.6.2: - version "2.6.2" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" - integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== +tslib@^2.1.0, tslib@^2.2.0, tslib@^2.3.1, tslib@^2.5.0, tslib@^2.6.2: + version "2.8.1" + resolved "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz" + integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== tsutils@^3.21.0: version "3.21.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + resolved "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz" integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== dependencies: tslib "^1.8.1" tunnel@0.0.6, tunnel@^0.0.6: version "0.0.6" - resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c" + resolved "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz" integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg== type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + resolved "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz" integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== dependencies: prelude-ls "^1.2.1" type-detect@4.0.8: version "4.0.8" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + resolved "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== type-fest@^0.20.2: version "0.20.2" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== type-fest@^0.21.3: version "0.21.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz" integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== type-fest@^2.19.0: version "2.19.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz" integrity sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA== -typed-array-buffer@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz#18de3e7ed7974b0a729d3feecb94338d1472cd60" - integrity sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw== +typed-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz" + integrity sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ== dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.2.1" - is-typed-array "^1.1.10" + call-bind "^1.0.7" + es-errors "^1.3.0" + is-typed-array "^1.1.13" -typed-array-byte-length@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz#d787a24a995711611fb2b87a4052799517b230d0" - integrity sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA== +typed-array-byte-length@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz" + integrity sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw== dependencies: - call-bind "^1.0.2" + call-bind "^1.0.7" for-each "^0.3.3" - has-proto "^1.0.1" - is-typed-array "^1.1.10" + gopd "^1.0.1" + has-proto "^1.0.3" + is-typed-array "^1.1.13" -typed-array-byte-offset@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz#cbbe89b51fdef9cd6aaf07ad4707340abbc4ea0b" - integrity sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg== +typed-array-byte-offset@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz" + integrity sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA== dependencies: - available-typed-arrays "^1.0.5" - call-bind "^1.0.2" + available-typed-arrays "^1.0.7" + call-bind "^1.0.7" for-each "^0.3.3" - has-proto "^1.0.1" - is-typed-array "^1.1.10" + gopd "^1.0.1" + has-proto "^1.0.3" + is-typed-array "^1.1.13" -typed-array-length@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" - integrity sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng== +typed-array-length@^1.0.6: + version "1.0.6" + resolved "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz" + integrity sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g== dependencies: - call-bind "^1.0.2" + call-bind "^1.0.7" for-each "^0.3.3" - is-typed-array "^1.1.9" + gopd "^1.0.1" + has-proto "^1.0.3" + is-typed-array "^1.1.13" + possible-typed-array-names "^1.0.0" typescript@5.3.3: version "5.3.3" @@ -6441,12 +6796,12 @@ typescript@5.3.3: ultron@~1.1.0: version "1.1.1" - resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" + resolved "https://registry.npmjs.org/ultron/-/ultron-1.1.1.tgz" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== unbox-primitive@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + resolved "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz" integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== dependencies: call-bind "^1.0.2" @@ -6456,100 +6811,100 @@ unbox-primitive@^1.0.2: undici-types@~5.26.4: version "5.26.5" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" + resolved "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== undici@^5.25.4: - version "5.28.2" - resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.2.tgz#fea200eac65fc7ecaff80a023d1a0543423b4c91" - integrity sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w== + version "5.28.4" + resolved "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz" + integrity sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g== dependencies: "@fastify/busboy" "^2.0.0" -universal-github-app-jwt@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/universal-github-app-jwt/-/universal-github-app-jwt-1.1.1.tgz#d57cee49020662a95ca750a057e758a1a7190e6e" - integrity sha512-G33RTLrIBMFmlDV4u4CBF7dh71eWwykck4XgaxaIVeZKOYZRAAxvcGMRFTUclVY6xoUPQvO4Ne5wKGxYm/Yy9w== +universal-github-app-jwt@^1.1.2: + version "1.2.0" + resolved "https://registry.npmjs.org/universal-github-app-jwt/-/universal-github-app-jwt-1.2.0.tgz" + integrity sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g== dependencies: "@types/jsonwebtoken" "^9.0.0" - jsonwebtoken "^9.0.0" + jsonwebtoken "^9.0.2" universal-user-agent@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.0.tgz#3381f8503b251c0d9cd21bc1de939ec9df5480ee" + resolved "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz" integrity sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w== -update-browserslist-db@^1.0.11: - version "1.0.11" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz#9a2a641ad2907ae7b3616506f4b977851db5b940" - integrity sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA== +update-browserslist-db@^1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz" + integrity sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A== dependencies: - escalade "^3.1.1" - picocolors "^1.0.0" + escalade "^3.2.0" + picocolors "^1.1.0" uri-js@^4.2.2: version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz" integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== dependencies: punycode "^2.1.0" util-deprecate@^1.0.1: version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== uuid@^3.3.2, uuid@^3.3.3: version "3.4.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" + resolved "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -uuid@^8.3.0, uuid@^8.3.2: +uuid@^8.0.0, uuid@^8.3.0, uuid@^8.3.2: version "8.3.2" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== -uuid@^9.0.1: +uuid@^9.0.0, uuid@^9.0.1: version "9.0.1" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" + resolved "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz" integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== v8-compile-cache-lib@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + resolved "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz" integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== v8-to-istanbul@^9.0.1: - version "9.1.0" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz#1b83ed4e397f58c85c266a570fc2558b5feb9265" - integrity sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA== + version "9.3.0" + resolved "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz" + integrity sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA== dependencies: "@jridgewell/trace-mapping" "^0.3.12" "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" + convert-source-map "^2.0.0" walker@^1.0.8: version "1.0.8" - resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + resolved "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz" integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== dependencies: makeerror "1.0.12" wcwidth@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + resolved "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz" integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== dependencies: defaults "^1.0.3" webidl-conversions@^3.0.0: version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz" integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== whatwg-url@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz" integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== dependencies: tr46 "~0.0.3" @@ -6557,7 +6912,7 @@ whatwg-url@^5.0.0: which-boxed-primitive@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + resolved "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz" integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== dependencies: is-bigint "^1.0.1" @@ -6566,27 +6921,27 @@ which-boxed-primitive@^1.0.2: is-string "^1.0.5" is-symbol "^1.0.3" -which-typed-array@^1.1.11, which-typed-array@^1.1.13: - version "1.1.13" - resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.13.tgz#870cd5be06ddb616f504e7b039c4c24898184d36" - integrity sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow== +which-typed-array@^1.1.14, which-typed-array@^1.1.15: + version "1.1.15" + resolved "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz" + integrity sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA== dependencies: - available-typed-arrays "^1.0.5" - call-bind "^1.0.4" + available-typed-arrays "^1.0.7" + call-bind "^1.0.7" for-each "^0.3.3" gopd "^1.0.1" - has-tostringtag "^1.0.0" + has-tostringtag "^1.0.2" which@^2.0.1: version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz" integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" wrap-ansi@^7.0.0: version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== dependencies: ansi-styles "^4.0.0" @@ -6595,12 +6950,12 @@ wrap-ansi@^7.0.0: wrappy@1: version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== write-file-atomic@^4.0.2: version "4.0.2" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" + resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz" integrity sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg== dependencies: imurmurhash "^0.1.4" @@ -6608,7 +6963,7 @@ write-file-atomic@^4.0.2: ws@^2.3.1: version "2.3.1" - resolved "https://registry.yarnpkg.com/ws/-/ws-2.3.1.tgz#6b94b3e447cb6a363f785eaf94af6359e8e81c80" + resolved "https://registry.npmjs.org/ws/-/ws-2.3.1.tgz" integrity sha512-61a+9LgtYZxTq1hAonhX8Xwpo2riK4IOR/BIVxioFbCfc3QFKmpE4x9dLExfLHKtUfVZigYa36tThVhO57erEw== dependencies: safe-buffer "~5.0.1" @@ -6616,7 +6971,7 @@ ws@^2.3.1: xml2js@^0.5.0: version "0.5.0" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7" + resolved "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz" integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA== dependencies: sax ">=0.6.0" @@ -6624,32 +6979,32 @@ xml2js@^0.5.0: xmlbuilder@~11.0.0: version "11.0.1" - resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" + resolved "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz" integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== y18n@^5.0.5: version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + resolved "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz" integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== yallist@^3.0.2: version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + resolved "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yallist@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== yargs-parser@^21.0.1, yargs-parser@^21.1.1: version "21.1.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz" integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== -yargs@^17.3.1: +yargs@^17.3.1, yargs@^17.7.2: version "17.7.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + resolved "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz" integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== dependencies: cliui "^8.0.1" @@ -6662,10 +7017,10 @@ yargs@^17.3.1: yn@3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + resolved "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== yocto-queue@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==